Browse Source

Merge pull request #1089 from pelias/master

Merge master into staging
pull/1138/head
Julian Simioni 7 years ago committed by GitHub
parent
commit
eac88f8777
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 1
      .npmrc
  2. 10
      .travis.yml
  3. 30
      Dockerfile
  4. 21
      LICENSE
  5. 53
      README.md
  6. 2
      bin/start
  7. 105
      controller/libpostal.js
  8. 1
      controller/markdownToHtml.js
  9. 2
      controller/place.js
  10. 2
      controller/search.js
  11. 75
      controller/structured_libpostal.js
  12. 1
      helper/debug.js
  13. 45
      helper/fieldValue.js
  14. 4
      helper/geojsonify.js
  15. 38
      helper/geojsonify_place_details.js
  16. 2
      helper/stackTraceLine.js
  17. 1
      middleware/404.js
  18. 2
      middleware/access_log.js
  19. 2
      middleware/assignLabels.js
  20. 1
      middleware/changeLanguage.js
  21. 11
      middleware/confidenceScore.js
  22. 2
      middleware/confidenceScoreFallback.js
  23. 1
      middleware/cors.js
  24. 21
      middleware/dedupe.js
  25. 6
      middleware/geocodeJSON.js
  26. 5
      middleware/headers.js
  27. 1
      middleware/jsonp.js
  28. 7
      middleware/localNamingConventions.js
  29. 2
      middleware/normalizeParentIds.js
  30. 1
      middleware/options.js
  31. 2
      middleware/renamePlacenames.js
  32. 1
      middleware/requestLanguage.js
  33. 2
      middleware/sendJSON.js
  34. 26
      middleware/sizeCalculator.js
  35. 23
      package.json
  36. 4
      public/attribution.md
  37. 2
      query/autocomplete.js
  38. 1
      query/autocomplete_defaults.js
  39. 2
      query/reverse.js
  40. 1
      query/reverse_defaults.js
  41. 2
      query/search.js
  42. 1
      query/search_defaults.js
  43. 2
      query/search_original.js
  44. 1
      query/text_parser_addressit.js
  45. 1
      query/view/boost_exact_matches.js
  46. 1
      query/view/focus_selected_layers.js
  47. 1
      query/view/ngrams_last_token_only.js
  48. 1
      query/view/ngrams_strict.js
  49. 1
      query/view/phrase_first_tokens_only.js
  50. 1
      query/view/pop_subquery.js
  51. 27
      routes/v1.js
  52. 18
      sanitizer/_city_name_standardizer.js
  53. 2
      sanitizer/_deprecate_quattroshapes.js
  54. 1
      sanitizer/_single_scalar_parameters.js
  55. 44
      sanitizer/_synthesize_analysis.js
  56. 1
      sanitizer/_tokenizer.js
  57. 1
      sanitizer/place.js
  58. 1
      sanitizer/reverse.js
  59. 1
      sanitizer/sanitizeAll.js
  60. 1
      sanitizer/wrap.js
  61. 6
      schema.js
  62. 2
      service/configurations/Interpolation.js
  63. 2
      service/configurations/Language.js
  64. 31
      service/configurations/Libpostal.js
  65. 2
      service/configurations/PlaceHolder.js
  66. 2
      service/configurations/PointInPolygon.js
  67. 1
      service/mget.js
  68. 1
      service/search.js
  69. 5
      test/ciao/autocomplete/layers_alias_coarse.coffee
  70. 2
      test/ciao/autocomplete/layers_invalid.coffee
  71. 2
      test/ciao/autocomplete/layers_mix_invalid_valid.coffee
  72. 2
      test/ciao/index.coffee
  73. 23
      test/ciao/reverse/boundary_circle_valid_radius_coarse.coffee
  74. 5
      test/ciao/reverse/layers_alias_coarse.coffee
  75. 2
      test/ciao/reverse/layers_invalid.coffee
  76. 2
      test/ciao/reverse/layers_mix_invalid_valid.coffee
  77. 2
      test/ciao/reverse/sources_deprecation_warning.coffee
  78. 4
      test/ciao/reverse/sources_multiple.coffee
  79. 5
      test/ciao/search/layers_alias_coarse.coffee
  80. 2
      test/ciao/search/layers_invalid.coffee
  81. 2
      test/ciao/search/layers_mix_invalid_valid.coffee
  82. 2
      test/ciao/search/sources_deprecation_warning.coffee
  83. 1
      test/ciao_test_data.js
  84. 2
      test/unit/app.js
  85. 2
      test/unit/controller/coarse_reverse.js
  86. 1
      test/unit/controller/index.js
  87. 345
      test/unit/controller/libpostal.js
  88. 2
      test/unit/controller/place.js
  89. 2
      test/unit/controller/placeholder.js
  90. 2
      test/unit/controller/predicates/has_parsed_text_properties.js
  91. 2
      test/unit/controller/predicates/has_request_errors.js
  92. 2
      test/unit/controller/predicates/has_request_parameter.js
  93. 2
      test/unit/controller/predicates/has_response_data.js
  94. 2
      test/unit/controller/predicates/has_results_at_layers.js
  95. 2
      test/unit/controller/predicates/is_addressit_parse.js
  96. 2
      test/unit/controller/predicates/is_admin_only_analysis.js
  97. 2
      test/unit/controller/predicates/is_coarse_reverse.js
  98. 2
      test/unit/controller/predicates/is_only_non_admin_layers.js
  99. 2
      test/unit/controller/predicates/is_request_sources_only_whosonfirst.js
  100. 2
      test/unit/controller/search.js
  101. Some files were not shown because too many files have changed in this diff Show More

1
.npmrc

@ -0,0 +1 @@
package-lock=false

10
.travis.yml

@ -3,21 +3,17 @@ language: node_js
notifications: notifications:
email: false email: false
node_js: node_js:
- 4
- 6 - 6
- 8
matrix: matrix:
fast_finish: true fast_finish: true
env:
global:
- BUILD_LEADER_ID=2
script: npm run travis script: npm run travis
before_install: before_install:
- npm i -g npm@^3.0.0 - npm i -g npm
before_script: before_script:
- npm prune - npm prune
after_success: after_success:
- npm install -g npx - npm run semantic-release
- npx -p node@8 npm run semantic-release
branches: branches:
except: except:
- /^v\d+\.\d+\.\d+$/ - /^v\d+\.\d+\.\d+$/

30
Dockerfile

@ -1,29 +1,23 @@
# base image # base image
FROM pelias/libpostal_baseimage FROM pelias/baseimage
RUN useradd -ms /bin/bash pelias
USER pelias
# maintainer information # maintainer information
LABEL maintainer="pelias@mapzen.com" LABEL maintainer="pelias.team@gmail.com"
EXPOSE 3100
# Where the app is built and run inside the docker fs # Where the app is built and run inside the docker fs
ENV WORK=/opt/pelias ENV WORK=/home/pelias
WORKDIR ${WORK}
# Used indirectly for saving npm logs etc. # copy package.json first to prevent npm install being rerun when only code changes
ENV HOME=/opt/pelias COPY ./package.json ${WORK}
RUN npm install
WORKDIR ${WORK}
COPY . ${WORK} COPY . ${WORK}
# Build and set permissions for arbitrary non-root user # only allow containers to succeed if tests pass
RUN npm install && \ RUN npm test
npm test && \
chmod -R a+rwX .
# Don't run as root, because there's no reason to (https://docs.docker.com/engine/articles/dockerfile_best-practices/#user).
# This also reveals permission problems on local Docker.
RUN chown -R 9999:9999 ${WORK}
USER 9999
# start service # start service
CMD [ "npm", "start" ] CMD [ "./bin/start" ]

21
LICENSE

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Pelias Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

53
README.md

@ -1,11 +1,11 @@
>This repository is part of the [Pelias](https://github.com/pelias/pelias) >This repository is part of the [Pelias](https://github.com/pelias/pelias)
>project. Pelias is an open-source, open-data geocoder built by >project. Pelias is an open-source, open-data geocoder originally sponsored by
>[Mapzen](https://www.mapzen.com/) that also powers [Mapzen Search](https://mapzen.com/projects/search). Our >[Mapzen](https://www.mapzen.com/). Our official user documentation is
>official user documentation is [here](https://mapzen.com/documentation/search/). >[here](https://github.com/pelias/documentation).
# Pelias API Server # Pelias API Server
This is the API server for the Pelias project. It's the service that runs to process user HTTP requests and return results as GeoJSON by querying Elasticsearch. This is the API server for the Pelias project. It's the service that runs to process user HTTP requests and return results as GeoJSON by querying Elasticsearch and the other Pelias services.
[![NPM](https://nodei.co/npm/pelias-api.png?downloads=true&stars=true)](https://nodei.co/npm/pelias-api) [![NPM](https://nodei.co/npm/pelias-api.png?downloads=true&stars=true)](https://nodei.co/npm/pelias-api)
@ -13,11 +13,11 @@ This is the API server for the Pelias project. It's the service that runs to pro
## Documentation ## Documentation
See the [Mapzen Search documentation](https://mapzen.com/documentation/search/). Full documentation for the Pelias API lives in the [pelias/documentation](https://github.com/pelias/documentation) repository.
## Install Dependencies ## Install Dependencies
Note: Pelias requires Node.js v4 or newer Note: Pelias requires Node.js v6 or newer
```bash ```bash
npm install npm install
@ -40,49 +40,36 @@ The API recognizes the following properties under the top-level `api` key in you
|parameter|required|default|description| |parameter|required|default|description|
|---|---|---|---| |---|---|---|---|
|`host`|*yes*||specifies the url under which the http service is to run| |`host`|*yes*||specifies the url under which the http service is to run|
|`textAnalyzer`|*no*|*addressit*|can be either `libpostal` or `addressit` however will soon be **deprecated** and only `libpostal` will be supported going forward|
|`indexName`|*no*|*pelias*|name of the Elasticsearch index to be used when building queries| |`indexName`|*no*|*pelias*|name of the Elasticsearch index to be used when building queries|
|`relativeScores`|*no*|true|if set to true, confidence scores will be normalized, realistically at this point setting this to false is not tested or desirable |`relativeScores`|*no*|true|if set to true, confidence scores will be normalized, realistically at this point setting this to false is not tested or desirable
|`accessLog`|*no*||name of the format to use for access logs; may be any one of the [predefined values](https://github.com/expressjs/morgan#predefined-formats) in the `morgan` package. Defaults to `"common"`; if set to `false`, or an otherwise falsy value, disables access-logging entirely.| |`accessLog`|*no*||name of the format to use for access logs; may be any one of the [predefined values](https://github.com/expressjs/morgan#predefined-formats) in the `morgan` package. Defaults to `"common"`; if set to `false`, or an otherwise falsy value, disables access-logging entirely.|
|`services`|*no*||service definitions for [point-in-polygon](https://github.com/pelias/pip-service), and [placeholder](https://github.com/pelias/placeholder), and [interpolation](https://github.com/pelias/interpolation) services. If missing (which is not recommended), the services will not be called.| |`services`|*no*||service definitions for [point-in-polygon](https://github.com/pelias/pip-service), [libpostal](https://github.com/whosonfirst/go-whosonfirst-libpostal), [placeholder](https://github.com/pelias/placeholder), and [interpolation](https://github.com/pelias/interpolation) services. If missing (which is not recommended), the services will not be called.|
|`defaultParameters.focus.point.lon` <br> `defaultParameters.focus.point.lat`|no | |default coordinates for focus point |`defaultParameters.focus.point.lon` <br> `defaultParameters.focus.point.lat`|no | |default coordinates for focus point
Example configuration file would look something like this: A good starting configuration file includes this section (fill in the service and Elasticsearch hosts as needed):
``` ```
{ {
"esclient": { "esclient": {
"keepAlive": true, "hosts": [{
"requestTimeout": "1200000", "host": "elasticsearch"
"hosts": [ }]
{
"protocol": "http",
"host": "somesemachine.elb.amazonaws.com",
"port": 9200
}
]
}, },
"api": { "api": {
"host": "localhost:3100/v1/",
"indexName": "foobar",
"relativeScores": true,
"textAnalyzer": "libpostal",
"services": { "services": {
"pip": {
"url": "http://mypipservice.com:3000"
},
"placeholder": { "placeholder": {
"url": "http://myplaceholderservice.com:5000" "url": "http://placeholder:4100"
},
"libpostal": {
"url": "http://libpostal:8080"
},
"pip": {
"url": "http://pip-service:4200"
}, },
"interpolation": { "interpolation": {
"url": "http://myinterpolationservice.com:3000", "url": "http://interpolation:4300"
"timeout": 2500
} }
} }
"defaultParameters": {
"focus.point.lat": 12.121212,
"focus.point.lon": 21.212121
}
}, },
"logger": { "logger": {
"level": "debug" "level": "debug"
@ -136,7 +123,7 @@ $ curl localhost:9200/pelias/_count?pretty
### Continuous Integration ### Continuous Integration
Travis tests every release against Node.js versions `4` and `6`. Travis tests every release against all supported Node.js versions.
[![Build Status](https://travis-ci.org/pelias/api.png?branch=master)](https://travis-ci.org/pelias/api) [![Build Status](https://travis-ci.org/pelias/api.png?branch=master)](https://travis-ci.org/pelias/api)

2
bin/start

@ -0,0 +1,2 @@
#!/bin/bash
exec node index.js

105
controller/libpostal.js

@ -1,31 +1,108 @@
const text_analyzer = require('pelias-text-analyzer');
const _ = require('lodash'); const _ = require('lodash');
const iso3166 = require('iso3166-1'); const iso3166 = require('iso3166-1');
const Debug = require('../helper/debug'); const Debug = require('../helper/debug');
const debugLog = new Debug('controller:libpostal'); const debugLog = new Debug('controller:libpostal');
const logger = require('pelias-logger').get('api');
function setup(should_execute) { // mapping object from libpostal fields to pelias fields
var field_mapping = {
island: 'island',
category: 'category',
house: 'query',
house_number: 'number',
road: 'street',
suburb: 'neighbourhood',
city_district: 'borough',
city: 'city',
state_district: 'county',
state: 'state',
postcode: 'postalcode',
country: 'country'
};
// This controller calls the hosted libpostal service and converts the response
// to a generic format for later use. The hosted service returns an array like:
//
// ```
// [
// {
// label: 'house_number',
// value: '30'
// },
// {
// label: 'road',
// value: 'west 26th street'
// },
// {
// label: 'city',
// value: 'new york'
// },
// {
// label: 'state',
// value: 'ny'
// }
//]
// ```
//
// where `label` can be any of (currently):
// - house (generally interpreted as unknown, treated by pelias like a query term)
// - category (like "restaurants")
// - house_number
// - road
// - unit (apt or suite #)
// - suburb (like a neighbourhood)
// - city
// - city_district (like an NYC borough)
// - state_district (like a county)
// - state
// - postcode
// - country
//
// The Pelias query module is not concerned with unit.
//
function setup(libpostalService, should_execute) {
function controller( req, res, next ){ function controller( req, res, next ){
// bail early if req/res don't pass conditions for execution // bail early if req/res don't pass conditions for execution
if (!should_execute(req, res)) { if (!should_execute(req, res)) {
return next(); return next();
} }
const initialTime = debugLog.beginTimer(req); const initialTime = debugLog.beginTimer(req);
// parse text with query parser
const parsed_text = text_analyzer.parse(req.clean.text);
if (parsed_text !== undefined) { libpostalService(req, (err, response) => {
// if a known ISO2 country was parsed, convert it to ISO3 if (err) {
if (_.has(parsed_text, 'country') && iso3166.is2(_.toUpper(parsed_text.country))) { // push err.message or err onto req.errors
parsed_text.country = iso3166.to3(_.toUpper(parsed_text.country)); req.errors.push( _.get(err, 'message', err) );
} else if (_.some(_.countBy(response, o => o.label), count => count > 1)) {
logger.warn(`discarding libpostal parse of '${req.clean.text}' due to duplicate field assignments`);
return next();
} else if (_.isEmpty(response)) {
return next();
} else {
req.clean.parser = 'libpostal';
req.clean.parsed_text = response.reduce(function(o, f) {
if (field_mapping.hasOwnProperty(f.label)) {
o[field_mapping[f.label]] = f.value;
}
return o;
}, {});
if (_.has(req.clean.parsed_text, 'country') && iso3166.is2(_.toUpper(req.clean.parsed_text.country))) {
req.clean.parsed_text.country = iso3166.to3(_.toUpper(req.clean.parsed_text.country));
}
debugLog.push(req, {parsed_text: req.clean.parsed_text});
} }
req.clean.parser = 'libpostal'; debugLog.stopTimer(req, initialTime);
req.clean.parsed_text = parsed_text; return next();
debugLog.push(req, {parsed_text: req.clean.parsed_text});
} });
debugLog.stopTimer(req, initialTime);
return next();
} }

1
controller/markdownToHtml.js

@ -1,4 +1,3 @@
var markdown = require('markdown').markdown; var markdown = require('markdown').markdown;
var fs = require('fs'); var fs = require('fs');

2
controller/place.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const retry = require('retry'); const retry = require('retry');

2
controller/search.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const searchService = require('../service/search'); const searchService = require('../service/search');

75
controller/structured_libpostal.js

@ -0,0 +1,75 @@
const _ = require('lodash');
const Debug = require('../helper/debug');
const debugLog = new Debug('controller:libpostal');
const logger = require('pelias-logger').get('api');
// if there's a house_number in the libpostal response, return it
// otherwise return the postcode field (which may be undefined)
function findHouseNumberField(response) {
const house_number_field = response.find(f => f.label === 'house_number');
if (house_number_field) {
return house_number_field;
}
return response.find(f => f.label === 'postcode');
}
function setup(libpostalService, should_execute) {
function controller( req, res, next ){
// bail early if req/res don't pass conditions for execution
if (!should_execute(req, res)) {
return next();
}
const initialTime = debugLog.beginTimer(req);
libpostalService(req, (err, response) => {
if (err) {
// push err.message or err onto req.errors
req.errors.push( _.get(err, 'message', err) );
} else {
// figure out which field contains the probable house number, prefer house_number
// libpostal parses some inputs, like `3370 cobbe ave`, as a postcode+street
// so because we're treating the entire field as a street address, it's safe
// to assume that an identified postcode is actually a house number.
const house_number_field = findHouseNumberField(response);
// if we're fairly certain that libpostal identified a house number
// (from either the house_number or postcode field), place it into the
// number field and remove the first instance of that value from address
// and assign to street
// eg - '1090 N Charlotte St' becomes number=1090 and street=N Charlotte St
if (house_number_field) {
req.clean.parsed_text.number = house_number_field.value;
// remove the first instance of the number and trim whitespace
req.clean.parsed_text.street = _.trim(_.replace(req.clean.parsed_text.address, req.clean.parsed_text.number, ''));
} else {
// otherwise no house number was identifiable, so treat the entire input
// as a street
req.clean.parsed_text.street = req.clean.parsed_text.address;
}
// the address field no longer means anything since it's been parsed, so remove it
delete req.clean.parsed_text.address;
debugLog.push(req, {parsed_text: response});
}
debugLog.stopTimer(req, initialTime);
return next();
});
}
return controller;
}
module.exports = setup;

1
helper/debug.js

@ -1,4 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
class Debug { class Debug {

45
helper/fieldValue.js

@ -0,0 +1,45 @@
const _ = require('lodash');
function getStringValue(property) {
// numeric value, cast to string
if (_.isNumber(property)) {
return _.toString(property);
}
// isEmpty check works for all types of values: strings, arrays, objects
if (_.isEmpty(property)) {
return '';
}
if (_.isString(property)) {
return property;
}
// array value, take first item in array (at this time only used for admin & name values)
if (_.isArray(property)) {
return property[0];
}
return _.toString(property);
}
function getArrayValue(property) {
// numeric value, cast to array
if (_.isNumber(property)) {
return [property];
}
// isEmpty check works for all types of values: strings, arrays, objects
if (_.isEmpty(property)) {
return [];
}
if (_.isArray(property)) {
return property;
}
return [property];
}
module.exports.getStringValue = getStringValue;
module.exports.getArrayValue = getArrayValue;

4
helper/geojsonify.js

@ -1,10 +1,10 @@
const GeoJSON = require('geojson'); const GeoJSON = require('geojson');
const extent = require('@mapbox/geojson-extent'); const extent = require('@mapbox/geojson-extent');
const logger = require('pelias-logger').get('geojsonify'); const logger = require('pelias-logger').get('geojsonify');
const collectDetails = require('./geojsonify_place_details'); const collectDetails = require('./geojsonify_place_details');
const _ = require('lodash'); const _ = require('lodash');
const Document = require('pelias-model').Document; const Document = require('pelias-model').Document;
const field = require('./fieldValue');
function geojsonifyPlaces( params, docs ){ function geojsonifyPlaces( params, docs ){
@ -53,7 +53,7 @@ function geojsonifyPlace(params, place) {
// assign name, logging a warning if it doesn't exist // assign name, logging a warning if it doesn't exist
if (_.has(place, 'name.default')) { if (_.has(place, 'name.default')) {
doc.name = place.name.default; doc.name = field.getStringValue(place.name.default);
} else { } else {
logger.warn(`doc ${doc.gid} does not contain name.default`); logger.warn(`doc ${doc.gid} does not contain name.default`);
} }

38
helper/geojsonify_place_details.js

@ -1,6 +1,5 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const field = require('./fieldValue');
// Properties to be copied // Properties to be copied
// If a property is identified as a single string, assume it should be presented as a string in response // If a property is identified as a single string, assume it should be presented as a string in response
@ -89,10 +88,10 @@ function collectProperties( params, source ) {
switch (prop.type) { switch (prop.type) {
case 'string': case 'string':
value = getStringValue(source[prop.name]); value = field.getStringValue(source[prop.name]);
break; break;
case 'array': case 'array':
value = getArrayValue(source[prop.name]); value = field.getArrayValue(source[prop.name]);
break; break;
// default behavior is to copy property exactly as is // default behavior is to copy property exactly as is
default: default:
@ -110,35 +109,4 @@ function collectProperties( params, source ) {
} }
function getStringValue(property) {
// isEmpty check works for all types of values: strings, arrays, objects
if (_.isEmpty(property)) {
return '';
}
if (_.isString(property)) {
return property;
}
// array value, take first item in array (at this time only used for admin values)
if (_.isArray(property)) {
return property[0];
}
return _.toString(property);
}
function getArrayValue(property) {
// isEmpty check works for all types of values: strings, arrays, objects
if (_.isEmpty(property)) {
return '';
}
if (_.isArray(property)) {
return property;
}
return [property];
}
module.exports = collectProperties; module.exports = collectProperties;

2
helper/stackTraceLine.js

@ -1,5 +1,3 @@
'use strict';
module.exports = () => { module.exports = () => {
const stack = new Error().stack.split('\n'); const stack = new Error().stack.split('\n');
let targetLine; let targetLine;

1
middleware/404.js

@ -1,4 +1,3 @@
// handle not found errors // handle not found errors
function middleware(req, res) { function middleware(req, res) {
res.header('Cache-Control','public'); res.header('Cache-Control','public');

2
middleware/access_log.js

@ -2,8 +2,6 @@
* Create a middleware that prints access logs via pelias-logger. * Create a middleware that prints access logs via pelias-logger.
*/ */
'use strict';
var url = require( 'url' ); var url = require( 'url' );
var _ = require( 'lodash' ); var _ = require( 'lodash' );

2
middleware/assignLabels.js

@ -1,4 +1,4 @@
var defaultLabelGenerator = require('pelias-labels'); const defaultLabelGenerator = require('pelias-labels');
function setup(labelGenerator) { function setup(labelGenerator) {
function middleware(req, res, next) { function middleware(req, res, next) {

1
middleware/changeLanguage.js

@ -1,4 +1,3 @@
var logger = require( 'pelias-logger' ).get( 'api' ); var logger = require( 'pelias-logger' ).get( 'api' );
const _ = require('lodash'); const _ = require('lodash');

11
middleware/confidenceScore.js

@ -11,9 +11,10 @@
* - detection (or specification) of query type. i.e. an address shouldn't match an admin address. * - detection (or specification) of query type. i.e. an address shouldn't match an admin address.
*/ */
var stats = require('stats-lite'); const stats = require('stats-lite');
var logger = require('pelias-logger').get('api'); const logger = require('pelias-logger').get('api');
var check = require('check-types'); const check = require('check-types');
const field = require('../helper/fieldValue');
var RELATIVE_SCORES = true; var RELATIVE_SCORES = true;
@ -131,12 +132,12 @@ function checkDistanceFromMean(score, mean, stdev) {
function checkName(text, parsed_text, hit) { function checkName(text, parsed_text, hit) {
// parsed_text name should take precedence if available since it's the cleaner name property // parsed_text name should take precedence if available since it's the cleaner name property
if (check.assigned(parsed_text) && check.assigned(parsed_text.name) && if (check.assigned(parsed_text) && check.assigned(parsed_text.name) &&
hit.name.default.toLowerCase() === parsed_text.name.toLowerCase()) { field.getStringValue(hit.name.default).toLowerCase() === parsed_text.name.toLowerCase()) {
return 1; return 1;
} }
// if no parsed_text check the text value as provided against result's default name // if no parsed_text check the text value as provided against result's default name
if (hit.name.default.toLowerCase() === text.toLowerCase()) { if (field.getStringValue(hit.name.default).toLowerCase() === text.toLowerCase()) {
return 1; return 1;
} }

2
middleware/confidenceScoreFallback.js

@ -1,5 +1,3 @@
'use strict';
/** /**
* *
* Basic confidence score should be computed and returned for each item in the results. * Basic confidence score should be computed and returned for each item in the results.

1
middleware/cors.js

@ -1,4 +1,3 @@
function middleware(req, res, next){ function middleware(req, res, next){
res.header('Access-Control-Allow-Origin', '*'); res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET, OPTIONS'); res.header('Access-Control-Allow-Methods', 'GET, OPTIONS');

21
middleware/dedupe.js

@ -1,6 +1,7 @@
var logger = require('pelias-logger').get('api'); const logger = require('pelias-logger').get('api');
var _ = require('lodash'); const _ = require('lodash');
var isDifferent = require('../helper/diffPlaces').isDifferent; const isDifferent = require('../helper/diffPlaces').isDifferent;
const field = require('../helper/fieldValue');
function setup() { function setup() {
return dedupeResults; return dedupeResults;
@ -38,7 +39,7 @@ function dedupeResults(req, res, next) {
logger.info('[dupe][replacing]', { logger.info('[dupe][replacing]', {
query: req.clean.text, query: req.clean.text,
previous: uniqueResults[dupeIndex].source, previous: uniqueResults[dupeIndex].source,
hit: hit.name.default + ' ' + hit.source + ':' + hit._id hit: field.getStringValue(hit.name.default) + ' ' + hit.source + ':' + hit._id
}); });
// replace previous dupe item with current hit // replace previous dupe item with current hit
uniqueResults[dupeIndex] = hit; uniqueResults[dupeIndex] = hit;
@ -48,7 +49,7 @@ function dedupeResults(req, res, next) {
logger.info('[dupe][skipping]', { logger.info('[dupe][skipping]', {
query: req.clean.text, query: req.clean.text,
previous: uniqueResults[dupeIndex].source, previous: uniqueResults[dupeIndex].source,
hit: hit.name.default + ' ' + hit.source + ':' + hit._id hit: field.getStringValue(hit.name.default) + ' ' + hit.source + ':' + hit._id
}); });
} }
} }
@ -64,13 +65,13 @@ function dedupeResults(req, res, next) {
function isPreferred(existing, candidateReplacement) { function isPreferred(existing, candidateReplacement) {
// NOTE: we are assuming here that the layer for both records is the same // NOTE: we are assuming here that the layer for both records is the same
const hasZip = _.bind(_.has, null, _.bind.placeholder, 'address_parts.zip');
var isOA = _.flow(_.property('source'), _.eq.bind(null, 'openaddresses'));
var hasZip = _.bind(_.has, null, _.bind.placeholder, 'address_parts.zip');
// https://github.com/pelias/api/issues/872 // https://github.com/pelias/api/issues/872
if (isOA(existing) && isOA(candidateReplacement)) { const candidateHasZip = hasZip(candidateReplacement);
return hasZip(candidateReplacement) && !hasZip(existing); const existingHasZip = hasZip(existing);
if (candidateHasZip !== existingHasZip) {
return candidateHasZip;
} }
//bind the trumps function to the data items to keep the rest of the function clean //bind the trumps function to the data items to keep the rest of the function clean

6
middleware/geocodeJSON.js

@ -47,7 +47,11 @@ function convertToGeocodeJSON(req, res, next, opts) {
// OPTIONAL. Default: null. The attribution of the data. In case of multiple sources, // OPTIONAL. Default: null. The attribution of the data. In case of multiple sources,
// and then multiple attributions, can be an object with one key by source. // and then multiple attributions, can be an object with one key by source.
// Can be a URI on the server, which outlines attribution details. // Can be a URI on the server, which outlines attribution details.
res.body.geocoding.attribution = url.resolve(opts.config.host, opts.basePath + 'attribution'); res.body.geocoding.attribution = url.format({
protocol: req.protocol,
host: req.get('host'),
pathname: opts.basePath + 'attribution'
});
// OPTIONAL. Default: null. The query that has been issued to trigger the // OPTIONAL. Default: null. The query that has been issued to trigger the
// search. // search.

5
middleware/headers.js

@ -1,12 +1,11 @@
var pkg = require('../package'); var pkg = require('../package');
function middleware(req, res, next){ function middleware(req, res, next){
res.header('Charset','utf8'); res.header('Charset','utf8');
res.header('Cache-Control','public'); res.header('Cache-Control','public');
res.header('Server', 'Pelias/'+pkg.version); res.header('Server', 'Pelias/'+pkg.version);
res.header('X-Powered-By', 'mapzen'); res.header('X-Powered-By', 'pelias');
next(); next();
} }
module.exports = middleware; module.exports = middleware;

1
middleware/jsonp.js

@ -1,4 +1,3 @@
function middleware(req, res, next){ function middleware(req, res, next){
// store old json function // store old json function

7
middleware/localNamingConventions.js

@ -1,5 +1,6 @@
var check = require('check-types'); const check = require('check-types');
var _ = require('lodash'); const _ = require('lodash');
const field = require('../helper/fieldValue');
var flipNumberAndStreetCountries = ['DEU', 'FIN', 'SWE', 'NOR', 'DNK', 'ISL', 'CZE']; var flipNumberAndStreetCountries = ['DEU', 'FIN', 'SWE', 'NOR', 'DNK', 'ISL', 'CZE'];
@ -49,7 +50,7 @@ function flipNumberAndStreet(place) {
flipped = ( place.address_parts.street + ' ' + place.address_parts.number ); flipped = ( place.address_parts.street + ' ' + place.address_parts.number );
// flip street name and housenumber // flip street name and housenumber
if( place.name.default === standard ){ if( field.getStringValue(place.name.default) === standard ){
place.name.default = flipped; place.name.default = flipped;
} }
} }

2
middleware/normalizeParentIds.js

@ -1,5 +1,3 @@
'use strict';
const logger = require('pelias-logger').get('api'); const logger = require('pelias-logger').get('api');
const Document = require('pelias-model').Document; const Document = require('pelias-model').Document;
const placeTypes = require('../helper/placeTypes'); const placeTypes = require('../helper/placeTypes');

1
middleware/options.js

@ -1,4 +1,3 @@
/** /**
this functionality is required by CORS as the browser will send an this functionality is required by CORS as the browser will send an
HTTP OPTIONS request before performing the CORS request. HTTP OPTIONS request before performing the CORS request.

2
middleware/renamePlacenames.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const PARENT_PROPS = require('../helper/placeTypes'); const PARENT_PROPS = require('../helper/placeTypes');

1
middleware/requestLanguage.js

@ -1,4 +1,3 @@
const _ = require('lodash'); const _ = require('lodash');
const logger = require( 'pelias-logger' ).get( 'api' ); const logger = require( 'pelias-logger' ).get( 'api' );

2
middleware/sendJSON.js

@ -59,8 +59,8 @@ function sendJSONResponse(req, res, next) {
statusCode = Math.max( statusCode, 500 ); statusCode = Math.max( statusCode, 500 );
break; // break on first match break; // break on first match
} }
logger.warn( 'unknown geocoding error string:', err );
} }
logger.warn( 'unknown geocoding error string:', err );
} else { } else {
logger.warn( 'unknown geocoding error type:', typeof err, err ); logger.warn( 'unknown geocoding error type:', typeof err, err );
} }

26
middleware/sizeCalculator.js

@ -2,21 +2,25 @@ var _ = require('lodash');
var SIZE_PADDING = 2; var SIZE_PADDING = 2;
var MIN_QUERY_SIZE = 20; var DEFAULT_MIN_QUERY_SIZE = 20;
/** /**
* Utility for calculating query result size * Utility for calculating query result size
* incorporating padding for dedupe process * incorporating padding for dedupe process
*/ */
function setup() { function setup(min_size) {
return function setQuerySize(req, res, next) { if (min_size === undefined) {
if (_.isUndefined(req.clean) || _.isUndefined(req.clean.size)) { min_size = DEFAULT_MIN_QUERY_SIZE;
return next(); }
}
req.clean.querySize = calculateSize(req.clean.size); return function setQuerySize(req, res, next) {
next(); if (_.isUndefined(req.clean) || _.isUndefined(req.clean.size)) {
}; return next();
}
req.clean.querySize = calculateSize(req.clean.size, min_size);
next();
};
} }
/** /**
@ -25,8 +29,8 @@ function setup() {
* @param {number} cleanSize * @param {number} cleanSize
* @returns {number} * @returns {number}
*/ */
function calculateSize(cleanSize) { function calculateSize(cleanSize, min_size) {
return Math.max(MIN_QUERY_SIZE, cleanSize * SIZE_PADDING); return Math.max(min_size, cleanSize * SIZE_PADDING);
} }
module.exports = setup; module.exports = setup;

23
package.json

@ -1,7 +1,7 @@
{ {
"name": "pelias-api", "name": "pelias-api",
"version": "0.0.0-semantic-release", "version": "0.0.0-semantic-release",
"author": "mapzen", "author": "pelias",
"description": "Pelias API", "description": "Pelias API",
"homepage": "https://github.com/pelias/api", "homepage": "https://github.com/pelias/api",
"license": "MIT", "license": "MIT",
@ -12,7 +12,7 @@
"coverage": "node_modules/.bin/istanbul cover test/unit/run.js", "coverage": "node_modules/.bin/istanbul cover test/unit/run.js",
"docs": "./bin/generate-docs", "docs": "./bin/generate-docs",
"lint": "jshint .", "lint": "jshint .",
"start": "node index.js", "start": "./bin/start",
"test": "npm run unit", "test": "npm run unit",
"travis": "npm run check-dependencies && npm test", "travis": "npm run check-dependencies && npm test",
"unit": "./bin/units", "unit": "./bin/units",
@ -35,13 +35,13 @@
"url": "https://github.com/pelias/api/issues" "url": "https://github.com/pelias/api/issues"
}, },
"engines": { "engines": {
"node": ">=4.0.0" "node": ">=6.0.0"
}, },
"dependencies": { "dependencies": {
"addressit": "1.5.0", "addressit": "1.5.0",
"async": "^2.0.0", "async": "^2.0.0",
"check-types": "^7.0.0", "check-types": "^7.0.0",
"elasticsearch": "^13.0.0", "elasticsearch": "^14.2.1",
"elasticsearch-exceptions": "0.0.4", "elasticsearch-exceptions": "0.0.4",
"express": "^4.8.8", "express": "^4.8.8",
"geojson": "^0.5.0", "geojson": "^0.5.0",
@ -55,14 +55,13 @@
"markdown": "0.5.0", "markdown": "0.5.0",
"morgan": "^1.8.2", "morgan": "^1.8.2",
"pelias-categories": "1.2.0", "pelias-categories": "1.2.0",
"pelias-config": "2.13.0", "pelias-config": "2.14.0",
"pelias-labels": "1.7.0", "pelias-labels": "1.8.0",
"pelias-logger": "0.3.0", "pelias-logger": "0.3.1",
"pelias-microservice-wrapper": "1.3.0", "pelias-microservice-wrapper": "1.3.0",
"pelias-model": "5.2.0", "pelias-model": "5.3.2",
"pelias-query": "9.1.1", "pelias-query": "9.1.1",
"pelias-sorting": "1.1.0", "pelias-sorting": "1.1.0",
"pelias-text-analyzer": "1.10.2",
"predicates": "^2.0.0", "predicates": "^2.0.0",
"retry": "^0.10.1", "retry": "^0.10.1",
"stats-lite": "^2.0.4", "stats-lite": "^2.0.4",
@ -77,9 +76,9 @@
"nsp": "^3.0.0", "nsp": "^3.0.0",
"pelias-mock-logger": "1.2.0", "pelias-mock-logger": "1.2.0",
"precommit-hook": "^3.0.0", "precommit-hook": "^3.0.0",
"proxyquire": "^1.7.10", "proxyquire": "^2.0.0",
"semantic-release": "^8.0.0", "semantic-release": "^15.1.4",
"source-map": "^0.6.0", "source-map": "^0.7.0",
"tap-dot": "1.0.5", "tap-dot": "1.0.5",
"tape": "^4.5.1", "tape": "^4.5.1",
"tmp": "0.0.33", "tmp": "0.0.33",

4
public/attribution.md

@ -1,7 +1,7 @@
## Attribution ## Attribution
* Geocoding by [Pelias](https://mapzen.com/pelias) from [Mapzen](https://mapzen.com) * Geocoding by [Pelias](https://pelias.io).
* Data from * Data from
* [OpenStreetMap](http://www.openstreetmap.org/copyright) © OpenStreetMap contributors under [ODbL](http://opendatacommons.org/licenses/odbl/) * [OpenStreetMap](http://www.openstreetmap.org/copyright) © OpenStreetMap contributors under [ODbL](http://opendatacommons.org/licenses/odbl/)
* [OpenAddresses](http://openaddresses.io) under a [Creative Commons Zero](https://github.com/openaddresses/openaddresses/blob/master/sources/LICENSE) public domain designation * [OpenAddresses](http://openaddresses.io) under a [Creative Commons Zero](https://github.com/openaddresses/openaddresses/blob/master/sources/LICENSE) public domain designation
* [GeoNames](http://www.geonames.org/) under [CC-BY-3.0](https://creativecommons.org/licenses/by/2.0/) * [GeoNames](http://www.geonames.org/) under [CC-BY-3.0](https://creativecommons.org/licenses/by/2.0/)
* [WhosOnFirst](http://whosonfirst.mapzen.com) under [various licenses](https://github.com/whosonfirst/whosonfirst-data/blob/master/LICENSE.md) * [WhosOnFirst](https://www.whosonfirst.org/) under [various licenses](https://github.com/whosonfirst/whosonfirst-data/blob/master/LICENSE.md)

2
query/autocomplete.js

@ -1,5 +1,3 @@
'use strict';
const peliasQuery = require('pelias-query'); const peliasQuery = require('pelias-query');
const defaults = require('./autocomplete_defaults'); const defaults = require('./autocomplete_defaults');
const textParser = require('./text_parser_addressit'); const textParser = require('./text_parser_addressit');

1
query/autocomplete_defaults.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'); var peliasQuery = require('pelias-query');
var _ = require('lodash'); var _ = require('lodash');

2
query/reverse.js

@ -1,5 +1,3 @@
'use strict';
const peliasQuery = require('pelias-query'); const peliasQuery = require('pelias-query');
const defaults = require('./reverse_defaults'); const defaults = require('./reverse_defaults');
const check = require('check-types'); const check = require('check-types');

1
query/reverse_defaults.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'); var peliasQuery = require('pelias-query');
var _ = require('lodash'); var _ = require('lodash');

2
query/search.js

@ -1,5 +1,3 @@
'use strict';
const peliasQuery = require('pelias-query'); const peliasQuery = require('pelias-query');
const defaults = require('./search_defaults'); const defaults = require('./search_defaults');
const textParser = require('./text_parser'); const textParser = require('./text_parser');

1
query/search_defaults.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'); var peliasQuery = require('pelias-query');
var _ = require('lodash'); var _ = require('lodash');

2
query/search_original.js

@ -1,5 +1,3 @@
'use strict';
const peliasQuery = require('pelias-query'); const peliasQuery = require('pelias-query');
const defaults = require('./search_defaults'); const defaults = require('./search_defaults');
const textParser = require('./text_parser_addressit'); const textParser = require('./text_parser_addressit');

1
query/text_parser_addressit.js

@ -1,4 +1,3 @@
var logger = require('pelias-logger').get('api'); var logger = require('pelias-logger').get('api');
var placeTypes = require('../helper/placeTypes'); var placeTypes = require('../helper/placeTypes');

1
query/view/boost_exact_matches.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'), var peliasQuery = require('pelias-query'),
searchDefaults = require('../search_defaults'); searchDefaults = require('../search_defaults');

1
query/view/focus_selected_layers.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'); var peliasQuery = require('pelias-query');
/** /**

1
query/view/ngrams_last_token_only.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'), var peliasQuery = require('pelias-query'),
ngrams_strict = require('./ngrams_strict'); ngrams_strict = require('./ngrams_strict');

1
query/view/ngrams_strict.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'); var peliasQuery = require('pelias-query');
/** /**

1
query/view/phrase_first_tokens_only.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'); var peliasQuery = require('pelias-query');
/** /**

1
query/view/pop_subquery.js

@ -1,4 +1,3 @@
var peliasQuery = require('pelias-query'), var peliasQuery = require('pelias-query'),
check = require('check-types'); check = require('check-types');

27
routes/v1.js

@ -29,6 +29,7 @@ var controllers = {
coarse_reverse: require('../controller/coarse_reverse'), coarse_reverse: require('../controller/coarse_reverse'),
mdToHTML: require('../controller/markdownToHtml'), mdToHTML: require('../controller/markdownToHtml'),
libpostal: require('../controller/libpostal'), libpostal: require('../controller/libpostal'),
structured_libpostal: require('../controller/structured_libpostal'),
place: require('../controller/place'), place: require('../controller/place'),
placeholder: require('../controller/placeholder'), placeholder: require('../controller/placeholder'),
search: require('../controller/search'), search: require('../controller/search'),
@ -96,6 +97,7 @@ const PlaceHolder = require('../service/configurations/PlaceHolder');
const PointInPolygon = require('../service/configurations/PointInPolygon'); const PointInPolygon = require('../service/configurations/PointInPolygon');
const Language = require('../service/configurations/Language'); const Language = require('../service/configurations/Language');
const Interpolation = require('../service/configurations/Interpolation'); const Interpolation = require('../service/configurations/Interpolation');
const Libpostal = require('../service/configurations/Libpostal');
/** /**
* Append routes to app * Append routes to app
@ -122,9 +124,21 @@ function addRoutes(app, peliasConfig) {
const interpolationService = serviceWrapper(interpolationConfiguration); const interpolationService = serviceWrapper(interpolationConfiguration);
const isInterpolationEnabled = _.constant(interpolationConfiguration.isEnabled()); const isInterpolationEnabled = _.constant(interpolationConfiguration.isEnabled());
// standard libpostal should use req.clean.text for the `address` parameter
const libpostalConfiguration = new Libpostal(
_.defaultTo(peliasConfig.api.services.libpostal, {}),
_.property('clean.text'));
const libpostalService = serviceWrapper(libpostalConfiguration);
// structured libpostal should use req.clean.parsed_text.address for the `address` parameter
const structuredLibpostalConfiguration = new Libpostal(
_.defaultTo(peliasConfig.api.services.libpostal, {}),
_.property('clean.parsed_text.address'));
const structuredLibpostalService = serviceWrapper(structuredLibpostalConfiguration);
// fallback to coarse reverse when regular reverse didn't return anything // fallback to coarse reverse when regular reverse didn't return anything
const coarseReverseShouldExecute = all( const coarseReverseShouldExecute = all(
isPipServiceEnabled, not(hasRequestErrors), not(hasResponseData) isPipServiceEnabled, not(hasRequestErrors), not(hasResponseData), not(isOnlyNonAdminLayers)
); );
const libpostalShouldExecute = all( const libpostalShouldExecute = all(
@ -132,6 +146,12 @@ function addRoutes(app, peliasConfig) {
not(isRequestSourcesOnlyWhosOnFirst) not(isRequestSourcesOnlyWhosOnFirst)
); );
// for libpostal to execute for structured requests, req.clean.parsed_text.address must exist
const structuredLibpostalShouldExecute = all(
not(hasRequestErrors),
hasParsedTextProperties.all('address')
);
// execute placeholder if libpostal only parsed as admin-only and needs to // execute placeholder if libpostal only parsed as admin-only and needs to
// be geodisambiguated // be geodisambiguated
const placeholderGeodisambiguationShouldExecute = all( const placeholderGeodisambiguationShouldExecute = all(
@ -256,7 +276,7 @@ function addRoutes(app, peliasConfig) {
sanitizers.search.middleware(peliasConfig.api), sanitizers.search.middleware(peliasConfig.api),
middleware.requestLanguage, middleware.requestLanguage,
middleware.calcSize(), middleware.calcSize(),
controllers.libpostal(libpostalShouldExecute), controllers.libpostal(libpostalService, libpostalShouldExecute),
controllers.placeholder(placeholderService, geometricFiltersApply, placeholderGeodisambiguationShouldExecute), controllers.placeholder(placeholderService, geometricFiltersApply, placeholderGeodisambiguationShouldExecute),
controllers.placeholder(placeholderService, geometricFiltersDontApply, placeholderIdsLookupShouldExecute), controllers.placeholder(placeholderService, geometricFiltersDontApply, placeholderIdsLookupShouldExecute),
controllers.search_with_ids(peliasConfig.api, esclient, queries.address_using_ids, searchWithIdsShouldExecute), controllers.search_with_ids(peliasConfig.api, esclient, queries.address_using_ids, searchWithIdsShouldExecute),
@ -286,6 +306,7 @@ function addRoutes(app, peliasConfig) {
sanitizers.structured_geocoding.middleware(peliasConfig.api), sanitizers.structured_geocoding.middleware(peliasConfig.api),
middleware.requestLanguage, middleware.requestLanguage,
middleware.calcSize(), middleware.calcSize(),
controllers.structured_libpostal(structuredLibpostalService, structuredLibpostalShouldExecute),
controllers.search(peliasConfig.api, esclient, queries.structured_geocoding, not(hasResponseDataOrRequestErrors)), controllers.search(peliasConfig.api, esclient, queries.structured_geocoding, not(hasResponseDataOrRequestErrors)),
postProc.trimByGranularityStructured(), postProc.trimByGranularityStructured(),
postProc.distances('focus.point.'), postProc.distances('focus.point.'),
@ -323,7 +344,7 @@ function addRoutes(app, peliasConfig) {
reverse: createRouter([ reverse: createRouter([
sanitizers.reverse.middleware, sanitizers.reverse.middleware,
middleware.requestLanguage, middleware.requestLanguage,
middleware.calcSize(), middleware.calcSize(2),
controllers.search(peliasConfig.api, esclient, queries.reverse, nonCoarseReverseShouldExecute), controllers.search(peliasConfig.api, esclient, queries.reverse, nonCoarseReverseShouldExecute),
controllers.coarse_reverse(pipService, coarseReverseShouldExecute), controllers.coarse_reverse(pipService, coarseReverseShouldExecute),
postProc.distances('point.'), postProc.distances('point.'),

18
sanitizer/_city_name_standardizer.js

@ -1,20 +1,18 @@
const _ = require('lodash'); const _ = require('lodash');
// matches 'ft', 'mt', 'saint', and 'sainte' on word boundary // matches 'ft', 'mt' on word boundary
const mountSaintFort = /\b([fm]t|sainte?)\b/g; const mountFort = /\b([fm]t)\b/g;
const transliterations = { const transliterations = {
'mt': 'mount', 'mt': 'mount',
'ft': 'fort', 'ft': 'fort'
'saint': 'st',
'sainte': 'ste'
}; };
function transliterate(match) { function transliterate(match) {
return _.get(transliterations, match); return _.get(transliterations, match);
} }
// transliterate ft/mt/saint/sainte to fort/mount/st/ste, respectively // transliterate ft/mt to fort/mount, respectively
function _sanitize(raw, clean) { function _sanitize(raw, clean) {
// error & warning messages // error & warning messages
// this function doesn't add any error or warning messages // this function doesn't add any error or warning messages
@ -22,16 +20,16 @@ function _sanitize(raw, clean) {
// only try to transliterate if there is a city in parsed_text // only try to transliterate if there is a city in parsed_text
if (!_.isEmpty(_.get(clean, 'parsed_text.city'))) { if (!_.isEmpty(_.get(clean, 'parsed_text.city'))) {
// eg input: Ft. Saint Louis // eg input: Ft. st Louis
// after 1. ft saint louis // after 1. ft st louis
// after 2. fort st louis // after 2. fort st louis
// after 3. fort st louis // after 3. fort st louis
// 1. remove '.' that could abbreviate ft and mt (makes transliteration regex easier) // 1. remove '.' that could abbreviate ft and mt (makes transliteration regex easier)
const periods_removed = _.toLower(clean.parsed_text.city).replace(/\b(mt|ft)\./g, '$1 '); const periods_removed = _.toLower(clean.parsed_text.city).replace(/\b(mt|ft)\./g, '$1 ');
// 2. transliterate 'saint'->'st', etc // 2. transliterate 'ft'->'fort', etc
const transliterated = periods_removed.replace(mountSaintFort, transliterate); const transliterated = periods_removed.replace(mountFort, transliterate);
// 3. reduce whitespace sequences that can occur when removing periods down to a single space // 3. reduce whitespace sequences that can occur when removing periods down to a single space
const whitespace_normalized = _.trimEnd(transliterated.replace(/\s+/, ' ')); const whitespace_normalized = _.trimEnd(transliterated.replace(/\s+/, ' '));

2
sanitizer/_deprecate_quattroshapes.js

@ -26,7 +26,7 @@ function _sanitize( raw, clean, opts ) {
'replaced by Who\'s on First, an actively maintained data project based on Quattroshapes' + 'replaced by Who\'s on First, an actively maintained data project based on Quattroshapes' +
'Your existing queries WILL CONTINUE TO WORK for the foreseeable future, but results will ' + 'Your existing queries WILL CONTINUE TO WORK for the foreseeable future, but results will ' +
'be coming from Who\'s on First and `sources=quattroshapes` will be interpreted as ' + 'be coming from Who\'s on First and `sources=quattroshapes` will be interpreted as ' +
'`sources=whosonfirst`. If you have any questions, please email search@mapzen.com.'); '`sources=whosonfirst`. If you have any questions, please email pelias.team@gmail.com.');
// user requested 'quattroshapes', we will give them 'whosonfirst' instead. // user requested 'quattroshapes', we will give them 'whosonfirst' instead.
sources = _.without(sources, 'quattroshapes', 'qs'); sources = _.without(sources, 'quattroshapes', 'qs');

1
sanitizer/_single_scalar_parameters.js

@ -1,4 +1,3 @@
var _ = require('lodash'), var _ = require('lodash'),
check = require('check-types'); check = require('check-types');

44
sanitizer/_synthesize_analysis.js

@ -1,5 +1,4 @@
const _ = require('lodash'); const _ = require('lodash');
const text_analyzer = require('pelias-text-analyzer');
const fields = { const fields = {
'venue': 'query', 'venue': 'query',
@ -17,20 +16,6 @@ function normalizeWhitespaceToSingleSpace(val) {
return _.replace(_.trim(val), /\s+/g, ' '); return _.replace(_.trim(val), /\s+/g, ' ');
} }
function isPostalCodeOnly(parsed_text) {
return Object.keys(parsed_text).length === 1 &&
parsed_text.hasOwnProperty('postalcode');
}
// figure out which field contains the probable house number, prefer number
// libpostal parses some inputs, like `3370 cobbe ave`, as a postcode+street
// so because we're treating the entire field as a street address, it's safe
// to assume that an identified postcode is actually a house number.
function getHouseNumberField(analyzed_address) {
// return the first field available in the libpostal response, undefined if none
return _.find(['number', 'postalcode'], _.partial(_.has, analyzed_address));
}
function _sanitize( raw, clean ){ function _sanitize( raw, clean ){
// error & warning messages // error & warning messages
@ -51,35 +36,8 @@ function _sanitize( raw, clean ){
`at least one of the following fields is required: ${Object.keys(fields).join(', ')}`); `at least one of the following fields is required: ${Object.keys(fields).join(', ')}`);
} }
if (clean.parsed_text.hasOwnProperty('address')) {
const analyzed_address = text_analyzer.parse(clean.parsed_text.address);
const house_number_field = getHouseNumberField(analyzed_address);
// if we're fairly certain that libpostal identified a house number
// (from either the house_number or postcode field), place it into the
// number field and remove the first instance of that value from address
// and assign to street
// eg - '1090 N Charlotte St' becomes number=1090 and street=N Charlotte St
if (house_number_field) {
clean.parsed_text.number = analyzed_address[house_number_field];
// remove the first instance of the number and trim whitespace
clean.parsed_text.street = _.trim(_.replace(clean.parsed_text.address, clean.parsed_text.number, ''));
} else {
// otherwise no house number was identifiable, so treat the entire input
// as a street
clean.parsed_text.street = clean.parsed_text.address;
}
// the address field no longer means anything since it's been parsed, so remove it
delete clean.parsed_text.address;
}
return messages; return messages;
} }
function _expected() { function _expected() {

1
sanitizer/_tokenizer.js

@ -1,4 +1,3 @@
var check = require('check-types'); var check = require('check-types');
/** /**

1
sanitizer/place.js

@ -1,4 +1,3 @@
var sanitizeAll = require('../sanitizer/sanitizeAll'), var sanitizeAll = require('../sanitizer/sanitizeAll'),
sanitizers = { sanitizers = {
singleScalarParameters: require('../sanitizer/_single_scalar_parameters')(), singleScalarParameters: require('../sanitizer/_single_scalar_parameters')(),

1
sanitizer/reverse.js

@ -1,4 +1,3 @@
var type_mapping = require('../helper/type_mapping'); var type_mapping = require('../helper/type_mapping');
var sanitizeAll = require('../sanitizer/sanitizeAll'), var sanitizeAll = require('../sanitizer/sanitizeAll'),
sanitizers = { sanitizers = {

1
sanitizer/sanitizeAll.js

@ -1,4 +1,3 @@
'use strict';
function sanitize( req, sanitizers ){ function sanitize( req, sanitizers ){
// init an object to store clean (sanitized) input parameters if not initialized // init an object to store clean (sanitized) input parameters if not initialized
req.clean = req.clean || {}; req.clean = req.clean || {};

1
sanitizer/wrap.js

@ -1,4 +1,3 @@
/** /**
normalize co-ordinates that lie outside of the normal ranges. normalize co-ordinates that lie outside of the normal ranges.

6
schema.js

@ -1,4 +1,3 @@
'use strict';
const Joi = require('joi'); const Joi = require('joi');
@ -41,6 +40,11 @@ module.exports = Joi.object().keys({
url: Joi.string().uri({ scheme: /https?/ }), url: Joi.string().uri({ scheme: /https?/ }),
timeout: Joi.number().integer().optional().default(250).min(0), timeout: Joi.number().integer().optional().default(250).min(0),
retries: Joi.number().integer().optional().default(3).min(0), retries: Joi.number().integer().optional().default(3).min(0),
}).unknown(false).requiredKeys('url'),
libpostal: Joi.object().keys({
url: Joi.string().uri({ scheme: /https?/ }),
timeout: Joi.number().integer().optional().default(250).min(0),
retries: Joi.number().integer().optional().default(3).min(0),
}).unknown(false).requiredKeys('url') }).unknown(false).requiredKeys('url')
}).unknown(false).default({}), // default api.services to an empty object }).unknown(false).default({}), // default api.services to an empty object
defaultParameters: Joi.object().keys({ defaultParameters: Joi.object().keys({

2
service/configurations/Interpolation.js

@ -1,5 +1,3 @@
'use strict';
const url = require('url'); const url = require('url');
const _ = require('lodash'); const _ = require('lodash');

2
service/configurations/Language.js

@ -1,5 +1,3 @@
'use strict';
const url = require('url'); const url = require('url');
const _ = require('lodash'); const _ = require('lodash');

31
service/configurations/Libpostal.js

@ -0,0 +1,31 @@
const url = require('url');
const ServiceConfiguration = require('pelias-microservice-wrapper').ServiceConfiguration;
class Libpostal extends ServiceConfiguration {
constructor(o, propertyExtractor) {
super('libpostal', o);
// save off the propertyExtractor function
// this is used to extract a single property from req. eg:
// * _.property('clean.text')
// * _.property('clean.parsed_text.address')
// will return those properties from req
this.propertyExtractor = propertyExtractor;
}
getParameters(req) {
return {
address: this.propertyExtractor(req)
};
}
getUrl(req) {
return url.resolve(this.baseUrl, 'parse');
}
}
module.exports = Libpostal;

2
service/configurations/PlaceHolder.js

@ -1,5 +1,3 @@
'use strict';
const url = require('url'); const url = require('url');
const _ = require('lodash'); const _ = require('lodash');

2
service/configurations/PointInPolygon.js

@ -1,5 +1,3 @@
'use strict';
const url = require('url'); const url = require('url');
const _ = require('lodash'); const _ = require('lodash');

1
service/mget.js

@ -1,4 +1,3 @@
/** /**
query must be an array of hashes, structured like so: query must be an array of hashes, structured like so:

1
service/search.js

@ -1,4 +1,3 @@
/** /**
cmd can be any valid ES query command cmd can be any valid ES query command

5
test/ciao/autocomplete/layers_alias_coarse.coffee

@ -32,6 +32,7 @@ should.not.exist json.geocoding.warnings
json.geocoding.query['text'].should.eql 'a' json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10 json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql [ "continent", json.geocoding.query.layers.should.eql [ "continent",
"empire",
"country", "country",
"dependency", "dependency",
"macroregion", "macroregion",
@ -45,5 +46,7 @@ json.geocoding.query.layers.should.eql [ "continent",
"neighbourhood", "neighbourhood",
"microhood", "microhood",
"disputed", "disputed",
"postalcode" "postalcode",
"ocean",
"marinearea"
] ]

2
test/ciao/autocomplete/layers_invalid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors #? expected errors
should.exist json.geocoding.errors should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ] json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,empire,dependency,macrocounty,macrohood,microhood,disputed,postalcode,ocean,marinearea' ]
#? expected warnings #? expected warnings
should.not.exist json.geocoding.warnings should.not.exist json.geocoding.warnings

2
test/ciao/autocomplete/layers_mix_invalid_valid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors #? expected errors
should.exist json.geocoding.errors should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ] json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,empire,dependency,macrocounty,macrohood,microhood,disputed,postalcode,ocean,marinearea' ]
#? expected warnings #? expected warnings
should.not.exist json.geocoding.warnings should.not.exist json.geocoding.warnings

2
test/ciao/index.coffee

@ -19,4 +19,4 @@ response.should.have.header 'Server'
response.headers.server.should.match /Pelias\/\d{1,2}\.\d{1,2}\.\d{1,2}/ response.headers.server.should.match /Pelias\/\d{1,2}\.\d{1,2}\.\d{1,2}/
#? vanity header correctly set #? vanity header correctly set
response.should.have.header 'X-Powered-By','mapzen' response.should.have.header 'X-Powered-By','pelias'

23
test/ciao/reverse/boundary_circle_valid_radius_coarse.coffee

@ -26,14 +26,31 @@ json.features.should.be.instanceof Array
should.not.exist json.geocoding.errors should.not.exist json.geocoding.errors
#? expected warnings #? expected warnings
should.exist json.geocoding.warnings should.not.exist json.geocoding.warnings
json.geocoding.warnings.should.eql [ 'boundary.circle.radius is not applicable for coarse reverse' ]
#? inputs #? inputs
json.geocoding.query['size'].should.eql 10 json.geocoding.query['size'].should.eql 10
json.geocoding.query['layers'].should.eql 'coarse'
json.geocoding.query['point.lat'].should.eql 40.744243 json.geocoding.query['point.lat'].should.eql 40.744243
json.geocoding.query['point.lon'].should.eql -73.990342 json.geocoding.query['point.lon'].should.eql -73.990342
json.geocoding.query['boundary.circle.lat'].should.eql 40.744243 json.geocoding.query['boundary.circle.lat'].should.eql 40.744243
json.geocoding.query['boundary.circle.lon'].should.eql -73.990342 json.geocoding.query['boundary.circle.lon'].should.eql -73.990342
json.geocoding.query['boundary.circle.radius'].should.eql 999.9 json.geocoding.query['boundary.circle.radius'].should.eql 999.9
json.geocoding.query['layers'].should.eql [ "continent",
"empire",
"country",
"dependency",
"macroregion",
"region",
"locality",
"localadmin",
"macrocounty",
"county",
"macrohood",
"borough",
"neighbourhood",
"microhood",
"disputed",
"postalcode",
"ocean",
"marinearea"
]

5
test/ciao/reverse/layers_alias_coarse.coffee

@ -31,6 +31,7 @@ should.not.exist json.geocoding.warnings
#? inputs #? inputs
json.geocoding.query['size'].should.eql 10 json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql [ "continent", json.geocoding.query.layers.should.eql [ "continent",
"empire",
"country", "country",
"dependency", "dependency",
"macroregion", "macroregion",
@ -44,5 +45,7 @@ json.geocoding.query.layers.should.eql [ "continent",
"neighbourhood", "neighbourhood",
"microhood", "microhood",
"disputed", "disputed",
"postalcode" "postalcode",
"ocean",
"marinearea"
] ]

2
test/ciao/reverse/layers_invalid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors #? expected errors
should.exist json.geocoding.errors should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ] json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,empire,dependency,macrocounty,macrohood,microhood,disputed,postalcode,ocean,marinearea' ]
#? expected warnings #? expected warnings
should.not.exist json.geocoding.warnings should.not.exist json.geocoding.warnings

2
test/ciao/reverse/layers_mix_invalid_valid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors #? expected errors
should.exist json.geocoding.errors should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ] json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,empire,dependency,macrocounty,macrohood,microhood,disputed,postalcode,ocean,marinearea' ]
#? expected warnings #? expected warnings
should.not.exist json.geocoding.warnings should.not.exist json.geocoding.warnings

2
test/ciao/reverse/sources_deprecation_warning.coffee

@ -27,7 +27,7 @@ should.not.exist json.geocoding.errors
#? expected warnings #? expected warnings
should.exist json.geocoding.warnings should.exist json.geocoding.warnings
json.geocoding.warnings.should.eql ['You are using Quattroshapes as a data source in this query. Quattroshapes has been disabled as a data source for Mapzen Search, and has beenreplaced by Who\'s on First, an actively maintained data project based on QuattroshapesYour existing queries WILL CONTINUE TO WORK for the foreseeable future, but results will be coming from Who\'s on First and `sources=quattroshapes` will be interpreted as `sources=whosonfirst`. If you have any questions, please email search@mapzen.com.' ] json.geocoding.warnings.should.eql ['You are using Quattroshapes as a data source in this query. Quattroshapes has been disabled as a data source for Mapzen Search, and has beenreplaced by Who\'s on First, an actively maintained data project based on QuattroshapesYour existing queries WILL CONTINUE TO WORK for the foreseeable future, but results will be coming from Who\'s on First and `sources=quattroshapes` will be interpreted as `sources=whosonfirst`. If you have any questions, please email pelias.team@gmail.com.' ]
#? inputs #? inputs
json.geocoding.query['size'].should.eql 10 json.geocoding.query['size'].should.eql 10

4
test/ciao/reverse/sources_multiple.coffee

@ -1,6 +1,6 @@
#> sources filter #> sources filter
path: '/v1/reverse?point.lat=1&point.lon=2&sources=openstreetmap,geonames' path: '/v1/reverse?point.lat=1&point.lon=2&sources=openstreetmap,openaddresses'
#? 200 ok #? 200 ok
response.statusCode.should.be.equal 200 response.statusCode.should.be.equal 200
@ -30,4 +30,4 @@ should.not.exist json.geocoding.warnings
#? inputs #? inputs
json.geocoding.query['size'].should.eql 10 json.geocoding.query['size'].should.eql 10
json.geocoding.query.sources.should.eql ["openstreetmap", "geonames"] json.geocoding.query.sources.should.eql ["openstreetmap", "openaddresses"]

5
test/ciao/search/layers_alias_coarse.coffee

@ -32,6 +32,7 @@ should.not.exist json.geocoding.warnings
json.geocoding.query['text'].should.eql 'a' json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10 json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql [ "continent", json.geocoding.query.layers.should.eql [ "continent",
"empire",
"country", "country",
"dependency", "dependency",
"macroregion", "macroregion",
@ -45,5 +46,7 @@ json.geocoding.query.layers.should.eql [ "continent",
"neighbourhood", "neighbourhood",
"microhood", "microhood",
"disputed", "disputed",
"postalcode" "postalcode",
"ocean",
"marinearea"
] ]

2
test/ciao/search/layers_invalid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors #? expected errors
should.exist json.geocoding.errors should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ] json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,empire,dependency,macrocounty,macrohood,microhood,disputed,postalcode,ocean,marinearea' ]
#? expected warnings #? expected warnings
should.not.exist json.geocoding.warnings should.not.exist json.geocoding.warnings

2
test/ciao/search/layers_mix_invalid_valid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors #? expected errors
should.exist json.geocoding.errors should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ] json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,empire,dependency,macrocounty,macrohood,microhood,disputed,postalcode,ocean,marinearea' ]
#? expected warnings #? expected warnings
should.not.exist json.geocoding.warnings should.not.exist json.geocoding.warnings

2
test/ciao/search/sources_deprecation_warning.coffee

@ -27,7 +27,7 @@ should.not.exist json.geocoding.errors
#? expected warnings #? expected warnings
should.exist json.geocoding.warnings should.exist json.geocoding.warnings
json.geocoding.warnings.should.eql ['You are using Quattroshapes as a data source in this query. Quattroshapes has been disabled as a data source for Mapzen Search, and has beenreplaced by Who\'s on First, an actively maintained data project based on QuattroshapesYour existing queries WILL CONTINUE TO WORK for the foreseeable future, but results will be coming from Who\'s on First and `sources=quattroshapes` will be interpreted as `sources=whosonfirst`. If you have any questions, please email search@mapzen.com.' ] json.geocoding.warnings.should.eql ['You are using Quattroshapes as a data source in this query. Quattroshapes has been disabled as a data source for Mapzen Search, and has beenreplaced by Who\'s on First, an actively maintained data project based on QuattroshapesYour existing queries WILL CONTINUE TO WORK for the foreseeable future, but results will be coming from Who\'s on First and `sources=quattroshapes` will be interpreted as `sources=whosonfirst`. If you have any questions, please email pelias.team@gmail.com.' ]
#? inputs #? inputs
json.geocoding.query['size'].should.eql 10 json.geocoding.query['size'].should.eql 10

1
test/ciao_test_data.js

@ -1,4 +1,3 @@
/** /**
Test data required by the ciao test suite. Test data required by the ciao test suite.

2
test/unit/app.js

@ -1,5 +1,3 @@
'use strict';
const proxyquire = require('proxyquire').noCallThru(); const proxyquire = require('proxyquire').noCallThru();
module.exports.tests = {}; module.exports.tests = {};

2
test/unit/controller/coarse_reverse.js

@ -1,5 +1,3 @@
'use strict';
const setup = require('../../../controller/coarse_reverse'); const setup = require('../../../controller/coarse_reverse');
const proxyquire = require('proxyquire').noCallThru(); const proxyquire = require('proxyquire').noCallThru();
const _ = require('lodash'); const _ = require('lodash');

1
test/unit/controller/index.js

@ -1,4 +1,3 @@
var setup = require('../../../controller/markdownToHtml'); var setup = require('../../../controller/markdownToHtml');
module.exports.tests = {}; module.exports.tests = {};

345
test/unit/controller/libpostal.js

@ -1,281 +1,302 @@
'use strict';
const proxyquire = require('proxyquire').noCallThru(); const proxyquire = require('proxyquire').noCallThru();
const libpostal = require('../../../controller/libpostal');
const _ = require('lodash'); const _ = require('lodash');
const mock_logger = require('pelias-mock-logger');
module.exports.tests = {}; module.exports.tests = {};
module.exports.tests.interface = (test, common) => { module.exports.tests.interface = (test, common) => {
test('valid interface', t => { test('valid interface', (t) => {
const controller = proxyquire('../../../controller/libpostal', { t.equal(typeof libpostal, 'function', 'libpostal is a function');
'pelias-text-analyzer': { t.equal(typeof libpostal(), 'function', 'libpostal returns a controller');
parse: () => undefined
}
});
t.equal(typeof controller, 'function', 'libpostal is a function');
t.equal(typeof controller(), 'function', 'libpostal returns a controller');
t.end(); t.end();
}); });
}; };
module.exports.tests.should_execute = (test, common) => { module.exports.tests.early_exit_conditions = (test, common) => {
test('should_execute returning false should not call text-analyzer', t => { test('should_execute returning false should not call service', t => {
const should_execute = (req, res) => { const service = () => {
t.fail('service should not have been called');
};
const should_execute = (req) => {
// req and res should be passed to should_execute // req and res should be passed to should_execute
t.deepEquals(req, { t.deepEquals(req, {
clean: { clean: {
text: 'original query' text: 'original query'
} }
}); });
t.deepEquals(res, { b: 2 });
return false; return false;
}; };
const controller = proxyquire('../../../controller/libpostal', { const controller = libpostal(service, should_execute);
'pelias-text-analyzer': {
parse: () => {
t.fail('parse should not have been called');
}
}
})(should_execute);
const req = { const req = {
clean: { clean: {
text: 'original query' text: 'original query'
} }
}; };
const res = { b: 2 };
controller(req, res, () => { controller(req, undefined, () => {
t.deepEquals(req, { t.deepEquals(req, {
clean: { clean: {
text: 'original query' text: 'original query'
} }
}, 'req should not have been modified'); }, 'req should not have been modified');
t.deepEquals(res, { b: 2 });
t.end(); t.end();
}); });
}); });
test('should_execute returning false should not call text-analyzer', t => { };
t.plan(5);
const should_execute = (req, res) => { module.exports.tests.error_conditions = (test, common) => {
// req and res should be passed to should_execute test('service returning error should append and not modify req.clean', t => {
t.deepEquals(req, { const service = (req, callback) => {
clean: { callback('libpostal service error', []);
text: 'original query'
}
});
t.deepEquals(res, { b: 2 });
return true;
}; };
const controller = proxyquire('../../../controller/libpostal', { const controller = libpostal(service, () => true);
'pelias-text-analyzer': {
parse: (query) => {
t.equals(query, 'original query');
return undefined;
}
}
})(should_execute);
const req = { const req = {
clean: { clean: {
text: 'original query' text: 'original query'
} },
errors: []
}; };
const res = { b: 2 };
controller(req, res, () => { controller(req, undefined, () => {
t.deepEquals(req, { t.deepEquals(req, {
clean: { clean: {
text: 'original query' text: 'original query'
} },
errors: ['libpostal service error']
}, 'req should not have been modified'); }, 'req should not have been modified');
t.deepEquals(res, { b: 2 });
t.end(); t.end();
}); });
}); });
}; };
module.exports.tests.parse_is_called = (test, common) => { module.exports.tests.failure_conditions = (test, common) => {
test('parse returning undefined should not overwrite clean.parsed_text', t => { test('service returning 2 or more of a label should return undefined and log message', t => {
const controller = proxyquire('../../../controller/libpostal', { const logger = mock_logger();
'pelias-text-analyzer': {
parse: () => undefined
}
})(() => true);
const req = {
clean: {
parsed_text: 'original parsed_text'
}
};
const res = 'this is the response';
controller(req, res, () => { const service = (req, callback) => {
t.deepEquals(req, { const response = [
clean: { {
parsed_text: 'original parsed_text' label: 'road',
value: 'road value 1'
},
{
label: 'city',
value: 'city value'
},
{
label: 'road',
value: 'road value 2'
} }
}); ];
t.deepEquals(res, 'this is the response');
t.end();
});
}); callback(null, response);
};
test('parse returning something should overwrite clean.parsed_text', t => {
const controller = proxyquire('../../../controller/libpostal', { const controller = proxyquire('../../../controller/libpostal', {
'pelias-text-analyzer': { 'pelias-logger': logger
parse: () => 'replacement parsed_text' })(service, () => true);
}
})(() => true);
const req = { const req = {
clean: { clean: {
parsed_text: 'original parsed_text' text: 'query value'
} },
errors: []
}; };
const res = 'this is the response';
controller(req, res, () => { controller(req, undefined, () => {
t.ok(logger.isWarnMessage('discarding libpostal parse of \'query value\' due to duplicate field assignments'));
t.deepEquals(req, { t.deepEquals(req, {
clean: { clean: {
parser: 'libpostal', text: 'query value'
parsed_text: 'replacement parsed_text' },
} errors: []
}); }, 'req should not have been modified');
t.deepEquals(res, 'this is the response');
t.end(); t.end();
}); });
}); });
}; test('service returning empty array should not set parsed_text or parser', t => {
const logger = mock_logger();
const service = (req, callback) => {
callback(null, []);
};
module.exports.tests.iso2_conversion = (test, common) => {
test('no country in parse response should not leave country unset', t => {
const controller = proxyquire('../../../controller/libpostal', { const controller = proxyquire('../../../controller/libpostal', {
'pelias-text-analyzer': { 'pelias-logger': logger
parse: () => ({ })(service, () => true);
locality: 'this is the locality'
})
},
'iso3166-1': {
is2: () => t.fail('should not have been called'),
to3: () => t.fail('should not have been called')
}
})(() => true);
const req = { const req = {
clean: { clean: {
parsed_text: 'original parsed_text' text: 'query value'
} },
errors: []
}; };
const res = 'this is the response';
controller(req, res, () => { controller(req, undefined, () => {
t.deepEquals(req, { t.deepEquals(req, {
clean: { clean: {
parser: 'libpostal', text: 'query value'
parsed_text: { },
locality: 'this is the locality' errors: []
} }, 'req should not have been modified');
}
});
t.deepEquals(res, 'this is the response');
t.end(); t.end();
}); });
}); });
test('unknown country should not be converted', t => { };
t.plan(3);
const controller = proxyquire('../../../controller/libpostal', { module.exports.tests.success_conditions = (test, common) => {
'pelias-text-analyzer': { test('service returning valid response should convert and append', t => {
parse: () => ({ const service = (req, callback) => {
country: 'unknown country code' const response = [
}) {
}, label: 'island',
'iso3166-1': { value: 'island value'
is2: country => {
t.equals(country, 'UNKNOWN COUNTRY CODE');
return false;
}, },
to3: () => t.fail('should not have been called') {
} label: 'category',
})(() => true); value: 'category value'
},
{
label: 'house',
value: 'house value'
},
{
label: 'house_number',
value: 'house_number value'
},
{
label: 'road',
value: 'road value'
},
{
label: 'suburb',
value: 'suburb value'
},
{
label: 'city_district',
value: 'city_district value'
},
{
label: 'city',
value: 'city value'
},
{
label: 'state_district',
value: 'state_district value'
},
{
label: 'state',
value: 'state value'
},
{
label: 'postcode',
value: 'postcode value'
},
{
label: 'country',
value: 'country value'
}
];
callback(null, response);
};
const controller = libpostal(service, () => true);
const req = { const req = {
clean: { clean: {
parsed_text: 'original parsed_text' text: 'original query'
} },
errors: []
}; };
const res = 'this is the response';
controller(req, res, () => { controller(req, undefined, () => {
t.deepEquals(req, { t.deepEquals(req, {
clean: { clean: {
text: 'original query',
parser: 'libpostal', parser: 'libpostal',
parsed_text: { parsed_text: {
country: 'unknown country code' island: 'island value',
category: 'category value',
query: 'house value',
number: 'house_number value',
street: 'road value',
neighbourhood: 'suburb value',
borough: 'city_district value',
city: 'city value',
county: 'state_district value',
state: 'state value',
postalcode: 'postcode value',
country: 'country value'
} }
} },
}); errors: []
t.deepEquals(res, 'this is the response'); }, 'req should not have been modified');
t.end(); t.end();
}); });
}); });
test('ISO2 country should be converted to ISO3', t => { test('ISO-2 country should be converted to ISO-3', t => {
t.plan(4); const service = (req, callback) => {
const response = [
const controller = proxyquire('../../../controller/libpostal', { {
'pelias-text-analyzer': { label: 'country',
parse: () => ({ value: 'ca'
country: 'ISO2 COUNTRY CODE'
})
},
'iso3166-1': {
is2: country => {
t.equals(country, 'ISO2 COUNTRY CODE');
return true;
},
to3: country => {
t.equals(country, 'ISO2 COUNTRY CODE');
return 'ISO3 COUNTRY CODE';
} }
} ];
})(() => true);
callback(null, response);
};
const controller = libpostal(service, () => true);
const req = { const req = {
clean: { clean: {
parsed_text: 'original parsed_text' text: 'original query'
} },
errors: []
}; };
const res = 'this is the response';
controller(req, res, () => { controller(req, undefined, () => {
t.deepEquals(req, { t.deepEquals(req, {
clean: { clean: {
text: 'original query',
parser: 'libpostal', parser: 'libpostal',
parsed_text: { parsed_text: {
country: 'ISO3 COUNTRY CODE' country: 'CAN'
} }
} },
}); errors: []
t.deepEquals(res, 'this is the response'); }, 'req should not have been modified');
t.end(); t.end();
}); });
}); });

2
test/unit/controller/place.js

@ -1,5 +1,3 @@
'use strict';
const setup = require('../../../controller/place'); const setup = require('../../../controller/place');
const proxyquire = require('proxyquire').noCallThru(); const proxyquire = require('proxyquire').noCallThru();

2
test/unit/controller/placeholder.js

@ -1,5 +1,3 @@
'use strict';
const placeholder = require('../../../controller/placeholder'); const placeholder = require('../../../controller/placeholder');
const proxyquire = require('proxyquire').noCallThru(); const proxyquire = require('proxyquire').noCallThru();
const mock_logger = require('pelias-mock-logger'); const mock_logger = require('pelias-mock-logger');

2
test/unit/controller/predicates/has_parsed_text_properties.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const has_parsed_text_properties = require('../../../../controller/predicates/has_parsed_text_properties'); const has_parsed_text_properties = require('../../../../controller/predicates/has_parsed_text_properties');

2
test/unit/controller/predicates/has_request_errors.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const has_request_errors = require('../../../../controller/predicates/has_request_errors'); const has_request_errors = require('../../../../controller/predicates/has_request_errors');

2
test/unit/controller/predicates/has_request_parameter.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const has_request_parameter = require('../../../../controller/predicates/has_request_parameter'); const has_request_parameter = require('../../../../controller/predicates/has_request_parameter');

2
test/unit/controller/predicates/has_response_data.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const has_response_data = require('../../../../controller/predicates/has_response_data'); const has_response_data = require('../../../../controller/predicates/has_response_data');

2
test/unit/controller/predicates/has_results_at_layers.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const has_results_at_layers = require('../../../../controller/predicates/has_results_at_layers'); const has_results_at_layers = require('../../../../controller/predicates/has_results_at_layers');

2
test/unit/controller/predicates/is_addressit_parse.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const is_addressit_parse = require('../../../../controller/predicates/is_addressit_parse'); const is_addressit_parse = require('../../../../controller/predicates/is_addressit_parse');

2
test/unit/controller/predicates/is_admin_only_analysis.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const is_admin_only_analysis = require('../../../../controller/predicates/is_admin_only_analysis'); const is_admin_only_analysis = require('../../../../controller/predicates/is_admin_only_analysis');

2
test/unit/controller/predicates/is_coarse_reverse.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const is_coarse_reverse = require('../../../../controller/predicates/is_coarse_reverse'); const is_coarse_reverse = require('../../../../controller/predicates/is_coarse_reverse');

2
test/unit/controller/predicates/is_only_non_admin_layers.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const is_only_non_admin_layers = require('../../../../controller/predicates/is_only_non_admin_layers'); const is_only_non_admin_layers = require('../../../../controller/predicates/is_only_non_admin_layers');

2
test/unit/controller/predicates/is_request_sources_only_whosonfirst.js

@ -1,5 +1,3 @@
'use strict';
const _ = require('lodash'); const _ = require('lodash');
const is_request_sources_only_whosonfirst = require('../../../../controller/predicates/is_request_sources_only_whosonfirst'); const is_request_sources_only_whosonfirst = require('../../../../controller/predicates/is_request_sources_only_whosonfirst');

2
test/unit/controller/search.js

@ -1,5 +1,3 @@
'use strict';
const setup = require('../../../controller/search'); const setup = require('../../../controller/search');
const proxyquire = require('proxyquire').noCallThru(); const proxyquire = require('proxyquire').noCallThru();

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save