Browse Source

Merge pull request #833 from pelias/staging

Merge staging into production
pull/1005/head
Stephen K Hess 8 years ago committed by GitHub
parent
commit
82e11a014f
  1. 10
      .travis.yml
  2. 129
      controller/coarse_reverse.js
  3. 5
      controller/predicates/has_request_errors.js
  4. 5
      controller/predicates/has_response_data.js
  5. 9
      controller/predicates/is_coarse_reverse.js
  6. 7
      controller/predicates/is_pip_service_enabled.js
  7. 21
      controller/search.js
  8. 172
      middleware/changeLanguage.js
  9. 7
      middleware/interpolate.js
  10. 109
      middleware/requestLanguage.js
  11. 34
      package.json
  12. 66
      routes/v1.js
  13. 5
      sanitizer/_synthesize_analysis.js
  14. 3
      schema.js
  15. 93
      service/language.js
  16. 44
      service/pointinpolygon.js
  17. 37
      test/ciao/autocomplete/language_default.coffee
  18. 38
      test/ciao/autocomplete/language_header_invalid.coffee
  19. 38
      test/ciao/autocomplete/language_header_valid.coffee
  20. 37
      test/ciao/autocomplete/language_querystring_invalid.coffee
  21. 37
      test/ciao/autocomplete/language_querystring_valid.coffee
  22. 3
      test/ciao/autocomplete/layers_alias_coarse.coffee
  23. 2
      test/ciao/autocomplete/layers_invalid.coffee
  24. 2
      test/ciao/autocomplete/layers_mix_invalid_valid.coffee
  25. 37
      test/ciao/place/language_default.coffee
  26. 38
      test/ciao/place/language_header_invalid.coffee
  27. 38
      test/ciao/place/language_header_valid.coffee
  28. 37
      test/ciao/place/language_querystring_invalid.coffee
  29. 37
      test/ciao/place/language_querystring_valid.coffee
  30. 37
      test/ciao/reverse/language_default.coffee
  31. 38
      test/ciao/reverse/language_header_invalid.coffee
  32. 38
      test/ciao/reverse/language_header_valid.coffee
  33. 37
      test/ciao/reverse/language_querystring_invalid.coffee
  34. 37
      test/ciao/reverse/language_querystring_valid.coffee
  35. 3
      test/ciao/reverse/layers_alias_coarse.coffee
  36. 2
      test/ciao/reverse/layers_invalid.coffee
  37. 2
      test/ciao/reverse/layers_mix_invalid_valid.coffee
  38. 2
      test/ciao/search/address_parsing.coffee
  39. 37
      test/ciao/search/language_default.coffee
  40. 38
      test/ciao/search/language_header_invalid.coffee
  41. 38
      test/ciao/search/language_header_valid.coffee
  42. 37
      test/ciao/search/language_querystring_invalid.coffee
  43. 37
      test/ciao/search/language_querystring_valid.coffee
  44. 3
      test/ciao/search/layers_alias_coarse.coffee
  45. 2
      test/ciao/search/layers_invalid.coffee
  46. 2
      test/ciao/search/layers_mix_invalid_valid.coffee
  47. 578
      test/unit/controller/coarse_reverse.js
  48. 2
      test/unit/controller/place.js
  49. 60
      test/unit/controller/predicates/has_request_errors.js
  50. 60
      test/unit/controller/predicates/has_response_data.js
  51. 128
      test/unit/controller/predicates/is_coarse_reverse.js
  52. 42
      test/unit/controller/predicates/is_pip_service_enabled.js
  53. 58
      test/unit/controller/search.js
  54. 69
      test/unit/fixture/structured_geocoding/fallback.json
  55. 68
      test/unit/fixture/structured_geocoding/postalcode_only.js
  56. 216
      test/unit/middleware/changeLanguage.js
  57. 76
      test/unit/middleware/interpolate.js
  58. 322
      test/unit/middleware/requestLanguage.js
  59. 18
      test/unit/query/structured_geocoding.js
  60. 11
      test/unit/run.js
  61. 2
      test/unit/sanitizer/_synthesize_analysis.js
  62. 80
      test/unit/schema.js
  63. 107
      test/unit/service/language.js
  64. 169
      test/unit/service/pointinpolygon.js

10
.travis.yml

@ -1,20 +1,16 @@
sudo: false
language: node_js
cache:
directories:
- node_modules
notifications:
email: false
node_js:
- 4
- 6
matrix:
fast_finish: true
allow_failures:
fast_finish: true
env:
global:
- CXX=g++-4.8
script: "npm run travis"
script: npm run travis
addons:
apt:
sources:
@ -22,7 +18,7 @@ addons:
packages:
- g++-4.8
before_install:
- npm i -g npm@^2.0.0
- npm i -g npm@^3.0.0
before_script:
- npm prune
after_success:

129
controller/coarse_reverse.js

@ -0,0 +1,129 @@
const logger = require('pelias-logger').get('coarse_reverse');
const _ = require('lodash');
const Document = require('pelias-model').Document;
const granularities = [
'neighbourhood',
'borough',
'locality',
'localadmin',
'county',
'macrocounty',
'region',
'macroregion',
'dependency',
'country'
];
function getMostGranularLayer(results) {
return granularities.find((granularity) => {
return results.hasOwnProperty(granularity);
});
}
function hasResultsAtRequestedLayers(results, layers) {
return _.intersection(layers, Object.keys(results)).length > 0;
}
function synthesizeDoc(results) {
// now create a model.Document from what's level, using the most granular
// result available as the starting point
// the immediately above cannot be re-used since county may be the most
// granular layer requested but the results may start at region (no county found)
const most_granular_layer = getMostGranularLayer(results);
const id = results[most_granular_layer][0].id;
const doc = new Document('whosonfirst', most_granular_layer, id.toString());
doc.setName('default', results[most_granular_layer][0].name);
if (results[most_granular_layer][0].hasOwnProperty('centroid')) {
doc.setCentroid( results[most_granular_layer][0].centroid );
}
if (results[most_granular_layer][0].hasOwnProperty('bounding_box')) {
const parsedBoundingBox = results[most_granular_layer][0].bounding_box.split(',').map(parseFloat);
doc.setBoundingBox({
upperLeft: {
lat: parsedBoundingBox[3],
lon: parsedBoundingBox[0]
},
lowerRight: {
lat: parsedBoundingBox[1],
lon: parsedBoundingBox[2]
}
});
}
if (_.has(results, 'country[0].abbr')) {
doc.setAlpha3(results.country[0].abbr);
}
// assign the administrative hierarchy
Object.keys(results).forEach((layer) => {
if (results[layer][0].hasOwnProperty('abbr')) {
doc.addParent(layer, results[layer][0].name, results[layer][0].id.toString(), results[layer][0].abbr);
} else {
doc.addParent(layer, results[layer][0].name, results[layer][0].id.toString());
}
});
const esDoc = doc.toESDocument();
esDoc.data._id = esDoc._id;
esDoc.data._type = esDoc._type;
return esDoc.data;
}
function setup(service, should_execute) {
function controller(req, res, next) {
// do not run controller when a request validation error has occurred
if (!should_execute(req, res)) {
return next();
}
const centroid = {
lat: req.clean['point.lat'],
lon: req.clean['point.lon']
};
service(centroid, (err, results) => {
// if there's an error, log it and bail
if (err) {
logger.error(err);
return next();
}
// find the finest granularity requested
const finest_granularity_requested = granularities.findIndex((granularity) => {
return req.clean.layers.indexOf(granularity) !== -1;
});
// now remove everything from the response that is more granular than the
// most granular layer requested. that is, if req.clean.layers=['county'],
// remove neighbourhoods, localities, and localadmins
Object.keys(results).forEach((layer) => {
if (granularities.indexOf(layer) < finest_granularity_requested) {
delete results[layer];
}
});
res.meta = {};
res.data = [];
// synthesize a doc from results if there's a result at the request layer(s)
if (hasResultsAtRequestedLayers(results, req.clean.layers)) {
res.data.push(synthesizeDoc(results));
}
return next();
});
}
return controller;
}
module.exports = setup;

5
controller/predicates/has_request_errors.js

@ -0,0 +1,5 @@
const _ = require('lodash');
module.exports = (request, response) => {
return _.get(request, 'errors', []).length > 0;
};

5
controller/predicates/has_response_data.js

@ -0,0 +1,5 @@
const _ = require('lodash');
module.exports = (request, response) => {
return _.get(response, 'data', []).length > 0;
};

9
controller/predicates/is_coarse_reverse.js

@ -0,0 +1,9 @@
const _ = require('lodash');
const non_coarse_layers = ['address', 'street', 'venue'];
module.exports = (req, res) => {
// returns true if layers is undefined, empty, or contains 'address', 'street', or 'venue'
return !_.isEmpty(req.clean.layers) &&
_.intersection(req.clean.layers, non_coarse_layers).length === 0;
};

7
controller/predicates/is_pip_service_enabled.js

@ -0,0 +1,7 @@
module.exports = (uri) => {
// this predicate relies upon the fact that the schema has already validated
// that api.pipService is a URI-formatted string
return (request, response) => {
return uri !== undefined;
};
};

21
controller/search.js

@ -7,30 +7,13 @@ const logger = require('pelias-logger').get('api');
const logging = require( '../helper/logging' );
const retry = require('retry');
function requestHasErrors(request) {
return _.get(request, 'errors', []).length > 0;
}
function responseHasData(response) {
return _.get(response, 'data', []).length > 0;
}
function isRequestTimeout(err) {
return _.get(err, 'status') === 408;
}
function setup( apiConfig, esclient, query ){
function setup( apiConfig, esclient, query, should_execute ){
function controller( req, res, next ){
// do not run controller when a request
// validation error has occurred.
if (requestHasErrors(req)) {
return next();
}
// do not run controller if there are already results
// this was added during libpostal integration. if the libpostal parse/query
// doesn't return anything then fallback to old search-engine-y behavior
if (responseHasData(res)) {
if (!should_execute(req, res)) {
return next();
}

172
middleware/changeLanguage.js

@ -0,0 +1,172 @@
var logger = require( 'pelias-logger' ).get( 'api' );
var service = require('../service/language');
/**
example response from language web service:
{
"101748479": {
"wofid": 101748479,
"placetype": "locality",
"iso": "DE",
"area": 0.031614,
"lineage": {
"continent_id": 102191581,
"country_id": 85633111,
"county_id": 102063261,
"locality_id": 101748479,
"macrocounty_id": 404227567,
"region_id": 85682571
},
"rowid": 90425,
"names": {
"default": "München",
"eng": "Munich"
}
},
}
**/
function setup() {
var transport = service.findById();
var middleware = function(req, res, next) {
// no-op, request did not require a language change
if( !isLanguageChangeRequired( req, res ) ){
return next();
}
// collect a list of parent ids to fetch translations for
var ids = extractIds( res );
// perform language lookup for all relevant ids
var timer = (new Date()).getTime();
transport.query( ids, function( err, translations ){
// update documents using a translation map
if( err ){
logger.error( '[language] [error]', err );
} else {
updateDocs( req, res, translations );
}
logger.info( '[language] [took]', (new Date()).getTime() - timer, 'ms' );
next();
});
};
middleware.transport = transport;
return middleware;
}
// collect a list of parent ids to fetch translations for
function extractIds( res ){
// store ids in an object in order to avoid duplicates
var ids = {};
// convenience function for adding a new id to the object
function addId(id) {
ids[id] = true;
}
// extract all parent ids from documents
res.data.forEach( function( doc ){
// skip invalid records
if( !doc || !doc.parent ){ return; }
// iterate over doc.parent.* attributes
for( var attr in doc.parent ){
// match only attributes ending with '_id'
var match = attr.match(/_id$/);
if( !match ){ continue; }
// skip invalid/empty arrays
if( !Array.isArray( doc.parent[attr] ) || !doc.parent[attr].length ){
continue;
}
// add each id as a key in the ids object
doc.parent[attr].forEach( addId );
}
});
// return a deduplicated array of ids
return Object.keys( ids );
}
// update documents using a translation map
function updateDocs( req, res, translations ){
// sanity check arguments
if( !req || !res || !res.data || !translations ){ return; }
// this is the target language we will be translating to
var requestLanguage = req.language.iso6393;
// iterate over response documents
res.data.forEach( function( doc, p ){
// skip invalid records
if( !doc || !doc.parent ){ return; }
// iterate over doc.parent.* attributes
for( var attr in doc.parent ){
// match only attributes ending with '_id'
var match = attr.match(/^(.*)_id$/);
if( !match ){ continue; }
// adminKey is the property name without the '_id'
// eg. for 'country_id', adminKey would be 'country'.
var adminKey = match[1];
var adminValues = doc.parent[adminKey];
// skip invalid/empty arrays
if( !Array.isArray( adminValues ) || !adminValues.length ){ continue; }
// iterate over adminValues (it's an array and can have more than one value)
for( var i in adminValues ){
// find the corresponding key from the '_id' Array
var id = doc.parent[attr][i];
if( !id ){ continue; }
// id not found in translation service response
if( !translations.hasOwnProperty( id ) ){
logger.error( '[language] [error]', 'failed to find translations for', id );
continue;
}
// skip invalid records
if( !translations[id].hasOwnProperty( 'names' ) ){ continue; }
// requested language is not available
if( !translations[id].names.hasOwnProperty( requestLanguage ) ){
logger.info( '[language] [info]', 'missing translation', requestLanguage, id );
continue;
}
// translate 'parent.*' property
adminValues[i] = translations[id].names[ requestLanguage ];
// if the record is an admin record we also translate
// the 'name.default' property.
if( adminKey === doc.layer ){
doc.name.default = translations[id].names[ requestLanguage ];
}
}
}
});
}
// boolean function to check if changing the language is required
function isLanguageChangeRequired( req, res ){
return req && res && res.data && res.data.length &&
req.hasOwnProperty('language');
}
module.exports = setup;

7
middleware/interpolate.js

@ -43,6 +43,13 @@ function setup() {
res.data = results;
}
// sort the results to ensure that addresses show up higher than street centroids
res.data = res.data.sort((a, b) => {
if (a.layer === 'address' && b.layer !== 'address') { return -1; }
if (a.layer !== 'address' && b.layer === 'address') { return 1; }
return 0;
});
// log the execution time, continue
logger.info( '[interpolation] [took]', (new Date()).getTime() - timer, 'ms' );
next();

109
middleware/requestLanguage.js

@ -0,0 +1,109 @@
const _ = require('lodash');
const logger = require( 'pelias-logger' ).get( 'api' );
/**
this middleware is responsible for negotiating HTTP locales for incoming
browser requests by reading the querystring param 'lang' or 'Accept-Language' request headers.
the preferred language will then be available on the $req object:
eg. for '?lang=fr' or 'Accept-Language: fr':
```
console.log( req.language );
{
name: 'French',
type: 'living',
scope: 'individual',
iso6393: 'fra',
iso6392B: 'fre',
iso6392T: 'fra',
iso6391: 'fr',
defaulted: false
}
```
for configuration options see:
https://github.com/florrain/locale
**/
const locale = require('locale');
/**
BCP47 language tags can contain three parts:
1. A language subtag (en, zh).
2. A script subtag (Hant, Latn).
3. A region subtag (US, CN).
at time of writing we will only be concerned with 1. (the language subtag) with
the intention of being compatible with the language standard of whosonfirst data.
whosonfirst data is in ISO 639-3 format so we will need to configure the library
to support all ISO 639-1 (2 char) codes and convert them to 639-1 (3-char) codes.
see: https://github.com/whosonfirst/whosonfirst-names
**/
const iso6393 = require('iso-639-3');
// create a dictionary which maps the ISO 639-1 language subtags to a map
// of it's represenation in several different standards.
const language = {};
iso6393.filter( i => !!i.iso6391 ).forEach( i => language[ i.iso6391 ] = i );
// a pre-processed locale list of language subtags we support (all of them).
const allLocales = new locale.Locales( Object.keys( language ) );
// return the middleware
module.exports = function middleware( req, res, next ){
// init an object to store clean (sanitized) input parameters if not initialized
req.clean = req.clean || {};
// init warnings array if not initialized
req.warnings = req.warnings || [];
// set defaults
var lang = language.en;
var locales, best, via = 'default';
// input language via query param
if( via === 'default' && req.query && req.query.lang ){
locales = new locale.Locales( req.query.lang );
best = locales.best( allLocales );
if( best.defaulted ){
req.warnings.push( 'invalid language provided via querystring' );
} else {
lang = language[ best.language ];
via = 'querystring';
}
}
// input language via request headers
if( via === 'default' && req.headers && req.headers['accept-language'] ){
locales = new locale.Locales( req.headers['accept-language'] );
best = locales.best( allLocales );
if( best.defaulted ){
req.warnings.push( 'invalid language provided via header' );
} else {
lang = language[ best.language ];
via = 'header';
}
}
// set $req.language property
req.language = _.clone( lang );
req.language.defaulted = ( via === 'default' );
// set $req.clean property in order to print language info in response header
req.clean.lang = {
name: req.language.name,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
defaulted: req.language.defaulted
};
// logging
logger.info( '[lang] \'%s\' via \'%s\'', lang.iso6391, via );
next();
};

34
package.json

@ -14,11 +14,12 @@
"lint": "jshint .",
"start": "node index.js",
"test": "npm run unit",
"travis": "npm test",
"travis": "npm run check-dependencies && npm test",
"unit": "./bin/units",
"validate": "npm ls",
"semantic-release": "semantic-release pre && npm publish && semantic-release post",
"config": "node -e \"console.log(JSON.stringify(require( 'pelias-config' ).generate(require('./schema')), null, 2))\""
"config": "node -e \"console.log(JSON.stringify(require( 'pelias-config' ).generate(require('./schema')), null, 2))\"",
"check-dependencies": "node_modules/.bin/npm-check --production --ignore pelias-interpolation"
},
"repository": {
"type": "git",
@ -47,28 +48,34 @@
"geojson": "^0.4.0",
"geojson-extent": "^0.3.1",
"geolib": "^2.0.18",
"iso3166-1": "^0.2.3",
"iso-639-3": "^1.0.0",
"iso3166-1": "^0.3.0",
"joi": "^10.1.0",
"locale": "^0.1.0",
"lodash": "^4.5.0",
"markdown": "0.5.0",
"morgan": "1.8.1",
"pelias-categories": "1.1.0",
"pelias-config": "2.8.0",
"pelias-labels": "1.5.1",
"pelias-logger": "0.1.0",
"pelias-model": "4.5.1",
"pelias-query": "8.13.0",
"pelias-text-analyzer": "1.7.2",
"pelias-config": "2.9.0",
"pelias-categories": "1.2.0",
"pelias-labels": "1.6.0",
"pelias-logger": "0.2.0",
"pelias-mock-logger": "^1.0.1",
"pelias-model": "4.6.0",
"pelias-query": "8.15.0",
"pelias-text-analyzer": "1.7.3",
"predicates": "^1.0.1",
"retry": "^0.10.1",
"stats-lite": "2.0.3",
"request": "^2.79.0",
"stats-lite": "^2.0.4",
"superagent": "^3.2.1",
"through2": "^2.0.3"
},
"devDependencies": {
"ciao": "^0.6.0",
"ciao": "^1.0.0",
"difflet": "^1.0.1",
"istanbul": "^0.4.2",
"jshint": "^2.5.6",
"npm-check": "^5.4.0",
"nsp": "^2.2.0",
"precommit-hook": "^3.0.0",
"proxyquire": "^1.7.10",
@ -82,6 +89,7 @@
"pre-commit": [
"lint",
"validate",
"test"
"test",
"check-dependencies"
]
}

66
routes/v1.js

@ -1,6 +1,10 @@
var Router = require('express').Router;
var elasticsearch = require('elasticsearch');
const all = require('predicates').all;
const any = require('predicates').any;
const not = require('predicates').not;
/** ----------------------- sanitizers ----------------------- **/
var sanitizers = {
autocomplete: require('../sanitizer/autocomplete'),
@ -14,12 +18,14 @@ var sanitizers = {
/** ----------------------- middleware ------------------------ **/
var middleware = {
calcSize: require('../middleware/sizeCalculator')
calcSize: require('../middleware/sizeCalculator'),
requestLanguage: require('../middleware/requestLanguage')
};
/** ----------------------- controllers ----------------------- **/
var controllers = {
coarse_reverse: require('../controller/coarse_reverse'),
mdToHTML: require('../controller/markdownToHtml'),
place: require('../controller/place'),
search: require('../controller/search'),
@ -52,9 +58,18 @@ var postProc = {
sendJSON: require('../middleware/sendJSON'),
parseBoundingBox: require('../middleware/parseBBox'),
normalizeParentIds: require('../middleware/normalizeParentIds'),
assignLabels: require('../middleware/assignLabels')
assignLabels: require('../middleware/assignLabels'),
changeLanguage: require('../middleware/changeLanguage')
};
// predicates that drive whether controller/search runs
const hasResponseData = require('../controller/predicates/has_response_data');
const hasRequestErrors = require('../controller/predicates/has_request_errors');
const isCoarseReverse = require('../controller/predicates/is_coarse_reverse');
// shorthand for standard early-exit conditions
const hasResponseDataOrRequestErrors = any(hasResponseData, hasRequestErrors);
/**
* Append routes to app
*
@ -64,6 +79,24 @@ var postProc = {
function addRoutes(app, peliasConfig) {
const esclient = elasticsearch.Client(peliasConfig.esclient);
const isPipServiceEnabled = require('../controller/predicates/is_pip_service_enabled')(peliasConfig.api.pipService);
const pipService = require('../service/pointinpolygon')(peliasConfig.api.pipService);
const coarse_reverse_should_execute = all(
not(hasRequestErrors), isPipServiceEnabled, isCoarseReverse
);
// execute under the following conditions:
// - there are no errors or data
// - request is not coarse OR pip service is disabled
const original_reverse_should_execute = all(
not(hasResponseDataOrRequestErrors),
any(
not(isCoarseReverse),
not(isPipServiceEnabled)
)
);
var base = '/v1/';
/** ------------------------- routers ------------------------- **/
@ -77,49 +110,54 @@ function addRoutes(app, peliasConfig) {
]),
search: createRouter([
sanitizers.search.middleware,
middleware.requestLanguage,
middleware.calcSize(),
// 3rd parameter is which query module to use, use fallback/geodisambiguation
// first, then use original search strategy if first query didn't return anything
controllers.search(peliasConfig.api, esclient, queries.libpostal),
controllers.search(peliasConfig.api, esclient, queries.libpostal, not(hasResponseDataOrRequestErrors)),
sanitizers.search_fallback.middleware,
controllers.search(peliasConfig.api, esclient, queries.fallback_to_old_prod),
controllers.search(peliasConfig.api, esclient, queries.fallback_to_old_prod, not(hasResponseDataOrRequestErrors)),
postProc.trimByGranularity(),
postProc.distances('focus.point.'),
postProc.confidenceScores(peliasConfig.api),
postProc.confidenceScoresFallback(),
postProc.dedupe(),
postProc.interpolate(),
postProc.dedupe(),
postProc.accuracy(),
postProc.localNamingConventions(),
postProc.renamePlacenames(),
postProc.parseBoundingBox(),
postProc.normalizeParentIds(),
postProc.changeLanguage(),
postProc.assignLabels(),
postProc.geocodeJSON(peliasConfig.api, base),
postProc.sendJSON
]),
structured: createRouter([
sanitizers.structured_geocoding.middleware,
middleware.requestLanguage,
middleware.calcSize(),
controllers.search(peliasConfig.api, esclient, queries.structured_geocoding),
controllers.search(peliasConfig.api, esclient, queries.structured_geocoding, not(hasResponseDataOrRequestErrors)),
postProc.trimByGranularityStructured(),
postProc.distances('focus.point.'),
postProc.confidenceScores(peliasConfig.api),
postProc.confidenceScoresFallback(),
postProc.dedupe(),
postProc.interpolate(),
postProc.dedupe(),
postProc.accuracy(),
postProc.localNamingConventions(),
postProc.renamePlacenames(),
postProc.parseBoundingBox(),
postProc.normalizeParentIds(),
postProc.changeLanguage(),
postProc.assignLabels(),
postProc.geocodeJSON(peliasConfig.api, base),
postProc.sendJSON
]),
autocomplete: createRouter([
sanitizers.autocomplete.middleware,
controllers.search(peliasConfig.api, esclient, queries.autocomplete),
middleware.requestLanguage,
controllers.search(peliasConfig.api, esclient, queries.autocomplete, not(hasResponseDataOrRequestErrors)),
postProc.distances('focus.point.'),
postProc.confidenceScores(peliasConfig.api),
postProc.dedupe(),
@ -128,14 +166,17 @@ function addRoutes(app, peliasConfig) {
postProc.renamePlacenames(),
postProc.parseBoundingBox(),
postProc.normalizeParentIds(),
postProc.changeLanguage(),
postProc.assignLabels(),
postProc.geocodeJSON(peliasConfig.api, base),
postProc.sendJSON
]),
reverse: createRouter([
sanitizers.reverse.middleware,
middleware.requestLanguage,
middleware.calcSize(),
controllers.search(peliasConfig.api, esclient, queries.reverse),
controllers.coarse_reverse(pipService, coarse_reverse_should_execute),
controllers.search(peliasConfig.api, esclient, queries.reverse, original_reverse_should_execute),
postProc.distances('point.'),
// reverse confidence scoring depends on distance from origin
// so it must be calculated first
@ -146,14 +187,16 @@ function addRoutes(app, peliasConfig) {
postProc.renamePlacenames(),
postProc.parseBoundingBox(),
postProc.normalizeParentIds(),
postProc.changeLanguage(),
postProc.assignLabels(),
postProc.geocodeJSON(peliasConfig.api, base),
postProc.sendJSON
]),
nearby: createRouter([
sanitizers.nearby.middleware,
middleware.requestLanguage,
middleware.calcSize(),
controllers.search(peliasConfig.api, esclient, queries.reverse),
controllers.search(peliasConfig.api, esclient, queries.reverse, not(hasResponseDataOrRequestErrors)),
postProc.distances('point.'),
// reverse confidence scoring depends on distance from origin
// so it must be calculated first
@ -164,18 +207,21 @@ function addRoutes(app, peliasConfig) {
postProc.renamePlacenames(),
postProc.parseBoundingBox(),
postProc.normalizeParentIds(),
postProc.changeLanguage(),
postProc.assignLabels(),
postProc.geocodeJSON(peliasConfig.api, base),
postProc.sendJSON
]),
place: createRouter([
sanitizers.place.middleware,
middleware.requestLanguage,
controllers.place(peliasConfig.api, esclient),
postProc.accuracy(),
postProc.localNamingConventions(),
postProc.renamePlacenames(),
postProc.parseBoundingBox(),
postProc.normalizeParentIds(),
postProc.changeLanguage(),
postProc.assignLabels(),
postProc.geocodeJSON(peliasConfig.api, base),
postProc.sendJSON

5
sanitizer/_synthesize_analysis.js

@ -50,10 +50,7 @@ function sanitize( raw, clean ){
}, {});
if (isPostalCodeOnly(clean.parsed_text)) {
messages.errors.push('postalcode-only inputs are not supported');
}
else if (_.isEmpty(Object.keys(clean.parsed_text))) {
if (_.isEmpty(Object.keys(clean.parsed_text))) {
messages.errors.push(
`at least one of the following fields is required: ${Object.keys(fields).join(', ')}`);
}

3
schema.js

@ -25,7 +25,8 @@ module.exports = Joi.object().keys({
requestRetries: Joi.number().integer().min(0),
localization: Joi.object().keys({
flipNumberAndStreetCountries: Joi.array().items(Joi.string().regex(/^[A-Z]{3}$/))
}).unknown(false)
}).unknown(false),
pipService: Joi.string().uri({ scheme: /https?/ })
}).requiredKeys('version', 'indexName', 'host').unknown(true),
esclient: Joi.object().keys({

93
service/language.js

@ -0,0 +1,93 @@
var logger = require( 'pelias-logger' ).get( 'api' ),
request = require( 'superagent' ),
peliasConfig = require( 'pelias-config' );
/**
language subsitution service client
this file provides a 'transport' which can be used to access the language
service via a network connnection.
the exported method for this module checks pelias-config for a configuration block such as:
"language": {
"client": {
"adapter": "http",
"host": "http://localhost:6100"
}
}
for more info on running the service see: https://github.com/pelias/placeholder
**/
/**
NullTransport
disables the service completely
**/
function NullTransport(){}
NullTransport.prototype.query = function( ids, cb ){
cb(); // no-op
};
/**
HttpTransport
allows the api to be used via a remote web service
**/
function HttpTransport( host, settings ){
this.query = function( ids, cb ){
request
.get( host + '/parser/findbyid' )
.set( 'Accept', 'application/json' )
.query({ ids: Array.isArray( ids ) ? ids.join(',') : '' })
.timeout( settings && settings.timeout || 1000 )
.end( function( err, res ){
if( err || !res ){ return cb( err ); }
if( 200 !== res.status ){ return cb( 'non 200 status' ); }
return cb( null, res.body );
});
};
}
HttpTransport.prototype.query = function( coord, number, street, cb ){
throw new Error( 'language: transport not connected' );
};
/**
Setup
allows instantiation of transport depending on configuration and preference
**/
module.exports.findById = function setup(){
// user config
var config = peliasConfig.generate();
// ensure config variables set correctly
if( !config.hasOwnProperty('language') || !config.language.hasOwnProperty('client') ){
logger.warn( 'language: configuration not found' );
}
// valid configuration found
else {
// get adapter settings from config
var settings = config.language.client;
// http adapter
if( 'http' === settings.adapter && settings.hasOwnProperty('host') ){
logger.info( 'language: using http transport:', settings.host );
if( settings.hasOwnProperty('timeout') ){
return new HttpTransport( settings.host, { timeout: parseInt( settings.timeout, 10 ) } );
}
return new HttpTransport( settings.host );
}
}
// default adapter
logger.info( 'language: using null transport' );
return new NullTransport();
};

44
service/pointinpolygon.js

@ -0,0 +1,44 @@
const logger = require( 'pelias-logger' ).get( 'pointinpolygon' );
const request = require('request');
const _ = require('lodash');
module.exports = (url) => {
if (!_.isEmpty(url)) {
logger.info(`using point-in-polygon service at ${url}`);
return function pointinpolygon( centroid, callback ) {
const requestUrl = `${url}/${centroid.lon}/${centroid.lat}`;
request.get(requestUrl, (err, response, body) => {
if (err) {
logger.error(JSON.stringify(err));
callback(err);
}
else if (response.statusCode === 200) {
try {
const parsed = JSON.parse(body);
callback(err, parsed);
}
catch (err) {
logger.error(`${requestUrl}: could not parse response body: ${body}`);
callback(`${requestUrl} returned status 200 but with non-JSON response: ${body}`);
}
}
else {
logger.error(`${requestUrl} returned status ${response.statusCode}: ${body}`);
callback(`${requestUrl} returned status ${response.statusCode}: ${body}`);
}
});
};
} else {
logger.warn('point-in-polygon service disabled');
return (centroid, callback) => {
callback(`point-in-polygon service disabled, unable to resolve ${JSON.stringify(centroid)}`);
};
}
};

37
test/ciao/autocomplete/language_default.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/autocomplete?text=example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

38
test/ciao/autocomplete/language_header_invalid.coffee

@ -0,0 +1,38 @@
#> language
path: '/v1/autocomplete?text=example'
headers: 'Accept-Language': 'example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
json.geocoding.warnings.should.eql [ 'invalid language provided via header' ]
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

38
test/ciao/autocomplete/language_header_valid.coffee

@ -0,0 +1,38 @@
#> language
path: '/v1/autocomplete?text=example'
headers: 'Accept-Language': 'fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
defaulted: false,
iso6391: 'fr',
iso6393: 'fra',
name: 'French'
}

37
test/ciao/autocomplete/language_querystring_invalid.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/autocomplete?lang=example&text=example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
json.geocoding.warnings.should.eql [ 'invalid language provided via querystring' ]
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

37
test/ciao/autocomplete/language_querystring_valid.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/autocomplete?lang=fr&text=example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
defaulted: false,
iso6391: 'fr',
iso6393: 'fra',
name: 'French'
}

3
test/ciao/autocomplete/layers_alias_coarse.coffee

@ -44,5 +44,6 @@ json.geocoding.query.layers.should.eql [ "continent",
"borough",
"neighbourhood",
"microhood",
"disputed"
"disputed",
"postalcode"
]

2
test/ciao/autocomplete/layers_invalid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ]
#? expected warnings
should.not.exist json.geocoding.warnings

2
test/ciao/autocomplete/layers_mix_invalid_valid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ]
#? expected warnings
should.not.exist json.geocoding.warnings

37
test/ciao/place/language_default.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/place?ids=geonames:venue:1'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

38
test/ciao/place/language_header_invalid.coffee

@ -0,0 +1,38 @@
#> language
path: '/v1/place?ids=geonames:venue:1'
headers: 'Accept-Language': 'example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
json.geocoding.warnings.should.eql [ 'invalid language provided via header' ]
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

38
test/ciao/place/language_header_valid.coffee

@ -0,0 +1,38 @@
#> language
path: '/v1/place?ids=geonames:venue:1'
headers: 'Accept-Language': 'fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
defaulted: false,
iso6391: 'fr',
iso6393: 'fra',
name: 'French'
}

37
test/ciao/place/language_querystring_invalid.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/place?lang=example&ids=geonames:venue:1'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
json.geocoding.warnings.should.eql [ 'invalid language provided via querystring' ]
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

37
test/ciao/place/language_querystring_valid.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/place?lang=fr&ids=geonames:venue:1'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
defaulted: false,
iso6391: 'fr',
iso6393: 'fra',
name: 'French'
}

37
test/ciao/reverse/language_default.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/reverse?point.lat=1&point.lon=2'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

38
test/ciao/reverse/language_header_invalid.coffee

@ -0,0 +1,38 @@
#> language
path: '/v1/reverse?point.lat=1&point.lon=2'
headers: 'Accept-Language': 'example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
json.geocoding.warnings.should.eql [ 'invalid language provided via header' ]
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

38
test/ciao/reverse/language_header_valid.coffee

@ -0,0 +1,38 @@
#> language
path: '/v1/reverse?point.lat=1&point.lon=2'
headers: 'Accept-Language': 'fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
defaulted: false,
iso6391: 'fr',
iso6393: 'fra',
name: 'French'
}

37
test/ciao/reverse/language_querystring_invalid.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/reverse?lang=example&point.lat=1&point.lon=2'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
json.geocoding.warnings.should.eql [ 'invalid language provided via querystring' ]
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

37
test/ciao/reverse/language_querystring_valid.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/reverse?lang=fr&point.lat=1&point.lon=2'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
defaulted: false,
iso6391: 'fr',
iso6393: 'fra',
name: 'French'
}

3
test/ciao/reverse/layers_alias_coarse.coffee

@ -43,5 +43,6 @@ json.geocoding.query.layers.should.eql [ "continent",
"borough",
"neighbourhood",
"microhood",
"disputed"
"disputed",
"postalcode"
]

2
test/ciao/reverse/layers_invalid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ]
#? expected warnings
should.not.exist json.geocoding.warnings

2
test/ciao/reverse/layers_mix_invalid_valid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ]
#? expected warnings
should.not.exist json.geocoding.warnings

2
test/ciao/search/address_parsing.coffee

@ -35,7 +35,7 @@ json.geocoding.query['size'].should.eql 10
#? address parsing
json.geocoding.query.parsed_text['number'].should.eql '30'
json.geocoding.query.parsed_text['street'].should.eql 'w 26th st'
json.geocoding.query.parsed_text['state'].should.eql 'NY'
json.geocoding.query.parsed_text['state'].should.eql 'ny'
json.features[0].properties.confidence.should.eql 1
json.features[0].properties.match_type.should.eql "exact"

37
test/ciao/search/language_default.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/search?text=example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

38
test/ciao/search/language_header_invalid.coffee

@ -0,0 +1,38 @@
#> language
path: '/v1/search?text=example'
headers: 'Accept-Language': 'example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
json.geocoding.warnings.should.eql [ 'invalid language provided via header' ]
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

38
test/ciao/search/language_header_valid.coffee

@ -0,0 +1,38 @@
#> language
path: '/v1/search?text=example'
headers: 'Accept-Language': 'fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
defaulted: false,
iso6391: 'fr',
iso6393: 'fra',
name: 'French'
}

37
test/ciao/search/language_querystring_invalid.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/search?lang=example&text=example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
json.geocoding.warnings.should.eql [ 'invalid language provided via querystring' ]
#? language
json.geocoding.query['lang'].should.eql {
name: 'English',
iso6391: 'en',
iso6393: 'eng',
defaulted: true
}

37
test/ciao/search/language_querystring_valid.coffee

@ -0,0 +1,37 @@
#> language
path: '/v1/search?lang=fr&text=example'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? language
json.geocoding.query['lang'].should.eql {
defaulted: false,
iso6391: 'fr',
iso6393: 'fra',
name: 'French'
}

3
test/ciao/search/layers_alias_coarse.coffee

@ -44,5 +44,6 @@ json.geocoding.query.layers.should.eql [ "continent",
"borough",
"neighbourhood",
"microhood",
"disputed"
"disputed",
"postalcode"
]

2
test/ciao/search/layers_invalid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ]
#? expected warnings
should.not.exist json.geocoding.warnings

2
test/ciao/search/layers_mix_invalid_valid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,street,country,macroregion,region,county,localadmin,locality,borough,neighbourhood,continent,dependency,macrocounty,macrohood,microhood,disputed,postalcode' ]
#? expected warnings
should.not.exist json.geocoding.warnings

578
test/unit/controller/coarse_reverse.js

@ -0,0 +1,578 @@
'use strict';
const setup = require('../../../controller/coarse_reverse');
const proxyquire = require('proxyquire').noCallThru();
module.exports.tests = {};
module.exports.tests.interface = (test, common) => {
test('valid interface', (t) => {
t.equal(typeof setup, 'function', 'setup is a function');
t.equal(typeof setup(), 'function', 'setup returns a controller');
t.end();
});
};
module.exports.tests.early_exit_conditions = (test, common) => {
test('should_execute returning false should not call service', (t) => {
const service = () => {
throw Error('service should not have been called');
};
const should_execute = () => { return false; };
const controller = setup(service, should_execute);
const req = {
clean: {
layers: ['locality']
},
errors: ['error']
};
// verify that next was called
let next_was_called = false;
const next = () => {
next_was_called = true;
};
// passing res=undefined verifies that it wasn't interacted with
t.doesNotThrow(controller.bind(null, req, undefined, next));
t.ok(next_was_called);
t.end();
});
};
module.exports.tests.error_conditions = (test, common) => {
test('service error should log and call next', (t) => {
const service = (point, callback) => {
callback('this is an error');
};
const logger = require('pelias-mock-logger')();
const should_execute = () => { return true; };
const controller = proxyquire('../../../controller/coarse_reverse', {
'pelias-logger': logger
})(service, should_execute);
const req = {
clean: {
layers: ['locality'],
point: {
lat: 12.121212,
lon: 21.212121
}
}
};
// verify that next was called
let next_was_called = false;
const next = () => {
next_was_called = true;
};
// passing res=undefined verifies that it wasn't interacted with
controller(req, undefined, next);
t.ok(logger.isErrorMessage('this is an error'));
t.ok(next_was_called);
t.end();
});
};
module.exports.tests.success_conditions = (test, common) => {
test('service returning results should use first entry for each layer', (t) => {
const service = (point, callback) => {
const results = {
neighbourhood: [
{
id: 10,
name: 'neighbourhood name',
abbr: 'neighbourhood abbr',
centroid: {
lat: 12.121212,
lon: 21.212121
},
bounding_box: '-76.345902,40.006751,-76.254038,40.072939'
},
{ id: 11, name: 'neighbourhood name 2'}
],
borough: [
{ id: 20, name: 'borough name', abbr: 'borough abbr'},
{ id: 21, name: 'borough name 2'}
],
locality: [
{ id: 30, name: 'locality name', abbr: 'locality abbr'},
{ id: 31, name: 'locality name 2'}
],
localadmin: [
{ id: 40, name: 'localadmin name', abbr: 'localadmin abbr'},
{ id: 41, name: 'localadmin name 2'}
],
county: [
{ id: 50, name: 'county name', abbr: 'county abbr'},
{ id: 51, name: 'county name 2'}
],
macrocounty: [
{ id: 60, name: 'macrocounty name', abbr: 'macrocounty abbr'},
{ id: 61, name: 'macrocounty name 2'}
],
region: [
{ id: 70, name: 'region name', abbr: 'region abbr'},
{ id: 71, name: 'region name 2'}
],
macroregion: [
{ id: 80, name: 'macroregion name', abbr: 'macroregion abbr'},
{ id: 81, name: 'macroregion name 2'}
],
dependency: [
{ id: 90, name: 'dependency name', abbr: 'dependency abbr'},
{ id: 91, name: 'dependency name 2'}
],
country: [
{ id: 100, name: 'country name', abbr: 'xyz'},
{ id: 101, name: 'country name 2'}
]
};
callback(undefined, results);
};
const logger = require('pelias-mock-logger')();
const should_execute = () => { return true; };
const controller = proxyquire('../../../controller/coarse_reverse', {
'pelias-logger': logger
})(service, should_execute);
const req = {
clean: {
layers: ['neighbourhood'],
point: {
lat: 12.121212,
lon: 21.212121
}
}
};
const res = { };
// verify that next was called
let next_was_called = false;
const next = () => {
next_was_called = true;
};
controller(req, res, next);
const expected = {
meta: {},
data: [
{
_id: '10',
_type: 'neighbourhood',
layer: 'neighbourhood',
source: 'whosonfirst',
source_id: '10',
name: {
'default': 'neighbourhood name'
},
phrase: {
'default': 'neighbourhood name'
},
parent: {
neighbourhood: ['neighbourhood name'],
neighbourhood_id: ['10'],
neighbourhood_a: ['neighbourhood abbr'],
borough: ['borough name'],
borough_id: ['20'],
borough_a: ['borough abbr'],
locality: ['locality name'],
locality_id: ['30'],
locality_a: ['locality abbr'],
localadmin: ['localadmin name'],
localadmin_id: ['40'],
localadmin_a: ['localadmin abbr'],
county: ['county name'],
county_id: ['50'],
county_a: ['county abbr'],
macrocounty: ['macrocounty name'],
macrocounty_id: ['60'],
macrocounty_a: ['macrocounty abbr'],
region: ['region name'],
region_id: ['70'],
region_a: ['region abbr'],
macroregion: ['macroregion name'],
macroregion_id: ['80'],
macroregion_a: ['macroregion abbr'],
dependency: ['dependency name'],
dependency_id: ['90'],
dependency_a: ['dependency abbr'],
country: ['country name'],
country_id: ['100'],
country_a: ['xyz']
},
alpha3: 'XYZ',
center_point: {
lat: 12.121212,
lon: 21.212121
},
bounding_box: '{"min_lat":40.006751,"max_lat":40.072939,"min_lon":-76.345902,"max_lon":-76.254038}'
}
]
};
t.deepEquals(res, expected);
t.notOk(logger.hasErrorMessages());
t.ok(next_was_called);
t.end();
});
test('layers missing from results should be ignored', (t) => {
const service = (point, callback) => {
const results = {
neighbourhood: [
{
id: 10,
name: 'neighbourhood name',
abbr: 'neighbourhood abbr',
centroid: {
lat: 12.121212,
lon: 21.212121
},
bounding_box: '-76.345902,40.006751,-76.254038,40.072939'
}
]
};
callback(undefined, results);
};
const logger = require('pelias-mock-logger')();
const should_execute = () => { return true; };
const controller = proxyquire('../../../controller/coarse_reverse', {
'pelias-logger': logger
})(service, should_execute);
const req = {
clean: {
layers: ['neighbourhood'],
point: {
lat: 12.121212,
lon: 21.212121
}
}
};
const res = { };
// verify that next was called
let next_was_called = false;
const next = () => {
next_was_called = true;
};
controller(req, res, next);
const expected = {
meta: {},
data: [
{
_id: '10',
_type: 'neighbourhood',
layer: 'neighbourhood',
source: 'whosonfirst',
source_id: '10',
name: {
'default': 'neighbourhood name'
},
phrase: {
'default': 'neighbourhood name'
},
parent: {
neighbourhood: ['neighbourhood name'],
neighbourhood_id: ['10'],
neighbourhood_a: ['neighbourhood abbr']
},
center_point: {
lat: 12.121212,
lon: 21.212121
},
bounding_box: '{"min_lat":40.006751,"max_lat":40.072939,"min_lon":-76.345902,"max_lon":-76.254038}'
}
]
};
t.deepEquals(res, expected);
t.notOk(logger.hasErrorMessages());
t.ok(next_was_called);
t.end();
});
test('most granular layer missing centroid should not set', (t) => {
const service = (point, callback) => {
const results = {
neighbourhood: [
{
id: 10,
name: 'neighbourhood name',
abbr: 'neighbourhood abbr',
bounding_box: '-76.345902,40.006751,-76.254038,40.072939'
}
]
};
callback(undefined, results);
};
const logger = require('pelias-mock-logger')();
const should_execute = () => { return true; };
const controller = proxyquire('../../../controller/coarse_reverse', {
'pelias-logger': logger
})(service, should_execute);
const req = {
clean: {
layers: ['neighbourhood'],
point: {
lat: 12.121212,
lon: 21.212121
}
}
};
const res = { };
// verify that next was called
let next_was_called = false;
const next = () => {
next_was_called = true;
};
controller(req, res, next);
const expected = {
meta: {},
data: [
{
_id: '10',
_type: 'neighbourhood',
layer: 'neighbourhood',
source: 'whosonfirst',
source_id: '10',
name: {
'default': 'neighbourhood name'
},
phrase: {
'default': 'neighbourhood name'
},
parent: {
neighbourhood: ['neighbourhood name'],
neighbourhood_id: ['10'],
neighbourhood_a: ['neighbourhood abbr']
},
bounding_box: '{"min_lat":40.006751,"max_lat":40.072939,"min_lon":-76.345902,"max_lon":-76.254038}'
}
]
};
t.deepEquals(res, expected);
t.notOk(logger.hasErrorMessages());
t.ok(next_was_called);
t.end();
});
test('most granular layer missing bounding_box should not set', (t) => {
const service = (point, callback) => {
const results = {
neighbourhood: [
{
id: 10,
name: 'neighbourhood name',
abbr: 'neighbourhood abbr',
centroid: {
lat: 12.121212,
lon: 21.212121
}
}
]
};
callback(undefined, results);
};
const logger = require('pelias-mock-logger')();
const should_execute = () => { return true; };
const controller = proxyquire('../../../controller/coarse_reverse', {
'pelias-logger': logger
})(service, should_execute);
const req = {
clean: {
layers: ['neighbourhood'],
point: {
lat: 12.121212,
lon: 21.212121
}
}
};
const res = { };
// verify that next was called
let next_was_called = false;
const next = () => {
next_was_called = true;
};
controller(req, res, next);
const expected = {
meta: {},
data: [
{
_id: '10',
_type: 'neighbourhood',
layer: 'neighbourhood',
source: 'whosonfirst',
source_id: '10',
name: {
'default': 'neighbourhood name'
},
phrase: {
'default': 'neighbourhood name'
},
parent: {
neighbourhood: ['neighbourhood name'],
neighbourhood_id: ['10'],
neighbourhood_a: ['neighbourhood abbr']
},
center_point: {
lat: 12.121212,
lon: 21.212121
}
}
]
};
t.deepEquals(res, expected);
t.notOk(logger.hasErrorMessages());
t.ok(next_was_called);
t.end();
});
};
module.exports.tests.failure_conditions = (test, common) => {
test('service returning 0 results at the requested layer should return nothing', (t) => {
const service = (point, callback) => {
// response without neighbourhood results
const results = {
borough: [
{ id: 20, name: 'borough name', abbr: 'borough abbr'},
{ id: 21, name: 'borough name 2'}
],
locality: [
{ id: 30, name: 'locality name', abbr: 'locality abbr'},
{ id: 31, name: 'locality name 2'}
],
localadmin: [
{ id: 40, name: 'localadmin name', abbr: 'localadmin abbr'},
{ id: 41, name: 'localadmin name 2'}
],
county: [
{ id: 50, name: 'county name', abbr: 'county abbr'},
{ id: 51, name: 'county name 2'}
],
macrocounty: [
{ id: 60, name: 'macrocounty name', abbr: 'macrocounty abbr'},
{ id: 61, name: 'macrocounty name 2'}
],
region: [
{ id: 70, name: 'region name', abbr: 'region abbr'},
{ id: 71, name: 'region name 2'}
],
macroregion: [
{ id: 80, name: 'macroregion name', abbr: 'macroregion abbr'},
{ id: 81, name: 'macroregion name 2'}
],
dependency: [
{ id: 90, name: 'dependency name', abbr: 'dependency abbr'},
{ id: 91, name: 'dependency name 2'}
],
country: [
{ id: 100, name: 'country name', abbr: 'xyz'},
{ id: 101, name: 'country name 2'}
]
};
callback(undefined, results);
};
const logger = require('pelias-mock-logger')();
const should_execute = () => { return true; };
const controller = proxyquire('../../../controller/coarse_reverse', {
'pelias-logger': logger
})(service, should_execute);
const req = {
clean: {
layers: ['neighbourhood'],
point: {
lat: 12.121212,
lon: 21.212121
}
}
};
const res = { };
// verify that next was called
let next_was_called = false;
const next = () => {
next_was_called = true;
};
controller(req, res, next);
const expected = {
meta: {},
data: []
};
t.deepEquals(res, expected);
t.notOk(logger.hasErrorMessages());
t.ok(next_was_called);
t.end();
});
};
module.exports.all = (tape, common) => {
function test(name, testFunction) {
return tape(`GET /coarse_reverse ${name}`, testFunction);
}
for( const testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

2
test/unit/controller/place.js

@ -1,6 +1,6 @@
'use strict';
const setup = require('../../../controller/search');
const setup = require('../../../controller/place');
const proxyquire = require('proxyquire').noCallThru();
module.exports.tests = {};

60
test/unit/controller/predicates/has_request_errors.js

@ -0,0 +1,60 @@
'use strict';
const _ = require('lodash');
const has_request_errors = require('../../../../controller/predicates/has_request_errors');
module.exports.tests = {};
module.exports.tests.interface = (test, common) => {
test('valid interface', (t) => {
t.equal(typeof has_request_errors, 'function', 'has_request_errors is a function');
t.end();
});
};
module.exports.tests.true_conditions = (test, common) => {
test('request with non-empty errors should return true', (t) => {
const req = {
errors: ['error']
};
const res = {};
t.ok(has_request_errors(req, res));
t.end();
});
};
module.exports.tests.false_conditions = (test, common) => {
test('response with undefined errors should return false', (t) => {
const req = {};
const res = {};
t.notOk(has_request_errors(req, res));
t.end();
});
test('response with empty errors array should return false', (t) => {
const req = {
errors: []
};
const res = {};
t.notOk(has_request_errors(req, res));
t.end();
});
};
module.exports.all = (tape, common) => {
function test(name, testFunction) {
return tape(`GET /has_request_errors ${name}`, testFunction);
}
for( const testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

60
test/unit/controller/predicates/has_response_data.js

@ -0,0 +1,60 @@
'use strict';
const _ = require('lodash');
const has_response_data = require('../../../../controller/predicates/has_response_data');
module.exports.tests = {};
module.exports.tests.interface = (test, common) => {
test('valid interface', (t) => {
t.equal(typeof has_response_data, 'function', 'has_response_data is a function');
t.end();
});
};
module.exports.tests.true_conditions = (test, common) => {
test('response with non-empty data should return true', (t) => {
const req = {};
const res = {
data: [1]
};
t.ok(has_response_data(req, res));
t.end();
});
};
module.exports.tests.false_conditions = (test, common) => {
test('response with undefined data should return true', (t) => {
const req = {};
const res = {};
t.notOk(has_response_data(req, res));
t.end();
});
test('response with empty data array should return true', (t) => {
const req = {};
const res = {
data: []
};
t.notOk(has_response_data(req, res));
t.end();
});
};
module.exports.all = (tape, common) => {
function test(name, testFunction) {
return tape(`GET /has_response_data ${name}`, testFunction);
}
for( const testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

128
test/unit/controller/predicates/is_coarse_reverse.js

@ -0,0 +1,128 @@
'use strict';
const _ = require('lodash');
const is_coarse_reverse = require('../../../../controller/predicates/is_coarse_reverse');
const coarse_layers = [
'continent',
'country',
'dependency',
'macroregion',
'region',
'locality',
'localadmin',
'macrocounty',
'county',
'macrohood',
'borough',
'neighbourhood',
'microhood',
'disputed'
];
module.exports.tests = {};
module.exports.tests.interface = (test, common) => {
test('valid interface', (t) => {
t.equal(typeof is_coarse_reverse, 'function', 'is_coarse_reverse is a function');
t.end();
});
};
module.exports.tests.false_conditions = (test, common) => {
test('request without layers should return false', (t) => {
const req = {
clean: {}
};
t.notOk(is_coarse_reverse(req));
t.end();
});
test('request with empty layers should return false', (t) => {
const req = {
clean: {
layers: []
}
};
t.notOk(is_coarse_reverse(req));
t.end();
});
test('request with layers just "address" or "venue" return false', (t) => {
['address', 'street', 'venue'].forEach((non_coarse_layer) => {
const req = {
clean: {
layers: [non_coarse_layer]
}
};
t.notOk(is_coarse_reverse(req));
});
t.end();
});
test('request with layers containing "address" or "venue" and a coarse layer should return false', (t) => {
['address', 'street', 'venue'].forEach((non_coarse_layer) => {
const req = {
clean: {
layers: [_.sample(coarse_layers), non_coarse_layer]
}
};
t.notOk(is_coarse_reverse(req));
});
t.end();
});
test('request with layers containing "address" and "venue" should return false', (t) => {
const req = {
clean: {
layers: ['address', 'venue']
}
};
t.notOk(is_coarse_reverse(req));
t.end();
});
};
module.exports.tests.true_conditions = (test, common) => {
test('request with non-empty layers and not containing "address" or "venue" should return true', (t) => {
coarse_layers.forEach((coarse_layer) => {
const req = {
clean: {
layers: [coarse_layer]
}
};
t.ok(is_coarse_reverse(req));
});
t.end();
});
};
module.exports.all = (tape, common) => {
function test(name, testFunction) {
return tape(`GET /is_coarse_reverse ${name}`, testFunction);
}
for( const testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

42
test/unit/controller/predicates/is_pip_service_enabled.js

@ -0,0 +1,42 @@
'use strict';
const _ = require('lodash');
const is_pip_service_enabled = require('../../../../controller/predicates/is_pip_service_enabled');
module.exports.tests = {};
module.exports.tests.interface = (test, common) => {
test('valid interface', (t) => {
t.equal(typeof is_pip_service_enabled, 'function', 'is_pip_service_enabled is a function');
t.equal(typeof is_pip_service_enabled(), 'function', 'is_pip_service_enabled() is a function');
t.end();
});
};
module.exports.tests.true_conditions = (test, common) => {
test('string uri should return true', (t) => {
t.ok(is_pip_service_enabled('pip uri')());
t.end();
});
};
module.exports.tests.false_conditions = (test, common) => {
test('undefined uri should return false', (t) => {
t.notOk(is_pip_service_enabled()());
t.end();
});
};
module.exports.all = (tape, common) => {
function test(name, testFunction) {
return tape(`GET /is_pip_service_enabled ${name}`, testFunction);
}
for( const testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

58
test/unit/controller/search.js

@ -55,7 +55,7 @@ module.exports.tests.success = function(test, common) {
};
}
}
})(config, esclient, query);
})(config, esclient, query, () => { return true; });
const req = { clean: { }, errors: [], warnings: [] };
const res = {};
@ -119,7 +119,7 @@ module.exports.tests.success = function(test, common) {
};
}
}
})(config, esclient, query);
})(config, esclient, query, () => { return true; });
const req = { clean: { }, errors: [], warnings: [] };
const res = {};
@ -183,7 +183,7 @@ module.exports.tests.success = function(test, common) {
};
}
}
})(config, esclient, query);
})(config, esclient, query, () => { return true; });
const req = { clean: { }, errors: [], warnings: [] };
const res = {};
@ -263,7 +263,7 @@ module.exports.tests.success = function(test, common) {
};
}
}
})(config, esclient, query);
})(config, esclient, query, () => { return true; });
const req = { clean: { }, errors: [], warnings: [] };
const res = {};
@ -341,7 +341,7 @@ module.exports.tests.timeout = function(test, common) {
};
}
}
})(config, esclient, query);
})(config, esclient, query, () => { return true; });
const req = { clean: { }, errors: [], warnings: [] };
const res = {};
@ -392,7 +392,7 @@ module.exports.tests.timeout = function(test, common) {
callback(timeoutError);
}
})(config, esclient, query);
})(config, esclient, query, () => { return true; });
const req = { clean: { }, errors: [], warnings: [] };
const res = {};
@ -432,7 +432,7 @@ module.exports.tests.timeout = function(test, common) {
callback(nonTimeoutError);
}
})(config, esclient, query);
})(config, esclient, query, () => { return true; });
const req = { clean: { }, errors: [], warnings: [] };
const res = {};
@ -473,7 +473,7 @@ module.exports.tests.timeout = function(test, common) {
callback(stringTypeError);
}
})(config, esclient, query);
})(config, esclient, query, () => { return true; });
const req = { clean: { }, errors: [], warnings: [] };
const res = {};
@ -494,49 +494,21 @@ module.exports.tests.timeout = function(test, common) {
};
module.exports.tests.existing_errors = function(test, common) {
test('req with errors should not call esclient or query', function(t) {
module.exports.tests.should_execute = (test, common) => {
test('should_execute returning false and empty req.errors should call next', (t) => {
const esclient = () => {
throw new Error('esclient should not have been called');
};
const query = () => {
throw new Error('query should not have been called');
};
const controller = setup( {}, esclient, query );
// the existence of `errors` means that a sanitizer detected an error,
// so don't call the esclient
const req = {
errors: ['error']
};
const res = { };
t.doesNotThrow(() => {
controller(req, res, () => {});
});
t.end();
});
};
module.exports.tests.existing_results = function(test, common) {
test('res with existing data should not call esclient or query', function(t) {
const esclient = () => {
throw new Error('esclient should not have been called');
};
const query = () => {
throw new Error('query should not have been called');
};
const controller = setup( {}, esclient, query );
const controller = setup( {}, esclient, query, () => { return false; } );
const req = { };
// the existence of `data` means that there are already results so
// don't call esclient or query
const res = { data: [{}] };
const res = { };
const next = function() {
t.deepEqual(res, {data: [{}]});
const next = () => {
t.deepEqual(res, { });
t.end();
};
controller(req, res, next);
@ -559,7 +531,7 @@ module.exports.tests.undefined_query = function(test, common) {
search_service_was_called = true;
throw new Error('search service should not have been called');
}
})(undefined, undefined, query);
})(undefined, undefined, query, () => { return true; });
const next = () => {
t.notOk(search_service_was_called, 'should have returned before search service was called');

69
test/unit/fixture/structured_geocoding/fallback.json

@ -287,6 +287,75 @@
"boost": 5
}
},
{
"bool": {
"_name": "fallback.postalcode",
"must": [
{
"multi_match": {
"query": "postalcode value",
"type": "phrase",
"fields": [
"parent.postalcode"
]
}
},
{
"multi_match": {
"query": "city value",
"type": "phrase",
"fields": [
"parent.locality",
"parent.locality_a",
"parent.localadmin",
"parent.localadmin_a"
]
}
},
{
"multi_match": {
"query": "county value",
"type": "phrase",
"fields": [
"parent.county",
"parent.county_a",
"parent.macrocounty",
"parent.macrocounty_a"
]
}
},
{
"multi_match": {
"query": "state value",
"type": "phrase",
"fields": [
"parent.region",
"parent.region_a",
"parent.macroregion",
"parent.macroregion_a"
]
}
},
{
"multi_match": {
"query": "country value",
"type": "phrase",
"fields": [
"parent.country",
"parent.country_a",
"parent.dependency",
"parent.dependency_a"
]
}
}
],
"filter": {
"term": {
"layer": "postalcode"
}
}
}
},
{
"bool": {
"_name": "fallback.neighbourhood",

68
test/unit/fixture/structured_geocoding/postalcode_only.js

@ -0,0 +1,68 @@
module.exports = {
'query': {
'function_score': {
'query': {
'filtered': {
'query': {
'bool': {
'should': [
{
'bool': {
'_name': 'fallback.postalcode',
'must': [
{
'multi_match': {
'query': 'postalcode value',
'type': 'phrase',
'fields': [
'parent.postalcode'
]
}
}
],
'filter': {
'term': {
'layer': 'postalcode'
}
}
}
}
]
}
},
'filter': {
'bool': {
'must': []
}
}
}
},
'max_boost': 20,
'functions': [
{
'field_value_factor': {
'modifier': 'log1p',
'field': 'popularity',
'missing': 1
},
'weight': 1
},
{
'field_value_factor': {
'modifier': 'log1p',
'field': 'population',
'missing': 1
},
'weight': 2
}
],
'score_mode': 'avg',
'boost_mode': 'multiply'
}
},
'size': 20,
'track_scores': true,
'sort': [
'_score'
]
};

216
test/unit/middleware/changeLanguage.js

@ -0,0 +1,216 @@
var fs = require('fs'),
tmp = require('tmp'),
setup = require('../../../middleware/changeLanguage');
// load middleware using the default pelias config
var load = function(){
// adapter is driven by config
var tmpfile = tmp.tmpNameSync({ postfix: '.json' });
fs.writeFileSync( tmpfile, '{}', { encoding: 'utf8' } );
process.env.PELIAS_CONFIG = tmpfile;
var middleware = setup();
delete process.env.PELIAS_CONFIG;
return middleware;
};
module.exports.tests = {};
module.exports.tests.interface = function(test, common) {
test('valid interface', function(t) {
var middleware = load();
t.equal(typeof middleware, 'function', 'middleware is a function');
t.equal(middleware.length, 3, 'middleware is a function');
t.end();
});
};
module.exports.tests.isLanguageChangeRequired = function(test, common) {
test('invalid query - null req/res', function(t) {
var middleware = load();
middleware(null, null, t.end);
});
test('invalid query - no results', function(t) {
var req = { language: { iso6393: 'spa' } };
var res = {};
var middleware = load();
middleware(req, res, function(){
t.deepEqual( req, { language: { iso6393: 'spa' } } );
t.deepEqual( res, {} );
t.end();
});
});
test('invalid query - empty results', function(t) {
var req = { language: { iso6393: 'spa' } };
var res = { data: [] };
var middleware = load();
middleware(req, res, function(){
t.deepEqual( req, { language: { iso6393: 'spa' } } );
t.deepEqual( res, { data: [] } );
t.end();
});
});
test('invalid query - no target language', function(t) {
var req = {};
var res = { data: [] };
var middleware = load();
middleware(req, res, function(){
t.deepEqual( req, {} );
t.deepEqual( res, { data: [] } );
t.end();
});
});
};
// check the service is called and response mapped correctly
module.exports.tests.miss = function(test, common) {
test('miss', function(t) {
var req = { language: { iso6393: 'spa' } };
var res = { data: [
{
layer: 'locality',
name: { default: 'London' },
parent: {
locality_id: [ 101750367 ],
locality: [ 'London' ]
}
},
{
layer: 'example',
name: { default: 'London' },
parent: {
locality_id: [ 101735809 ],
locaity: [ 'London' ]
}
}
]};
var middleware = load();
// mock out the transport
middleware.transport.query = function mock( ids, cb ){
t.deepEqual( ids, [ '101735809', '101750367' ] );
t.equal( typeof cb, 'function' );
cb( 'error' );
};
middleware(req, res, function(){
t.deepEqual( res, { data: [
{
layer: 'locality',
name: { default: 'London' },
parent: {
locality_id: [ 101750367 ],
locality: [ 'London' ]
}
},
{
layer: 'example',
name: { default: 'London' },
parent: {
locality_id: [ 101735809 ],
locaity: [ 'London' ]
}
}
]});
t.end();
});
});
};
// check the service is called and response mapped correctly
module.exports.tests.hit = function(test, common) {
test('hit', function(t) {
var req = { language: { iso6393: 'spa' } };
var res = { data: [
{
layer: 'locality',
name: { default: 'London' },
parent: {
locality_id: [ 101750367 ],
locality: [ 'London' ]
}
},
{
layer: 'example',
name: { default: 'London' },
parent: {
locality_id: [ 101735809 ],
locaity: [ 'London' ]
}
}
]};
var middleware = load();
// mock out the transport
middleware.transport.query = function mock( ids, cb ){
t.deepEqual( ids, [ '101735809', '101750367' ] );
t.equal( typeof cb, 'function' );
cb( null, {
'101750367': {
'names': {
'default':'London',
'chi':'倫敦',
'spa':'Londres',
'eng':'London',
'hin':'लदन',
'ara':'لندن',
'por':'Londres',
'ben':'লনডন',
'rus':'Лондон',
'jpn':'ロンドン',
'kor':'런던'
}
},
'101735809': {
'names':{
'default':'London',
'eng':'London'
}
}
});
};
middleware(req, res, function(){
t.deepEqual( res, { data: [
{
layer: 'locality',
name: { default: 'Londres' },
parent: {
locality_id: [ 101750367 ],
locality: [ 'Londres' ]
}
},
{
layer: 'example',
name: { default: 'London' },
parent: {
locality_id: [ 101735809 ],
locaity: [ 'London' ]
}
}
]});
t.end();
});
});
};
module.exports.all = function (tape, common) {
function test(name, testFunction) {
return tape('[middleware] changeLanguage: ' + name, testFunction);
}
for( var testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

76
test/unit/middleware/interpolate.js

@ -190,6 +190,82 @@ module.exports.tests.hit = function(test, common) {
});
};
// check the service is called and response mapped correctly
module.exports.tests.hit = function(test, common) {
test('hit', function(t) {
var req = { clean: {
parsed_text: {
number: '1',
street: 'sesame st'
}}
};
var res = { data: [
{
layer: 'street',
center_point: { lat: 1, lon: 1 },
address_parts: { street: 'sesame rd' },
name: { default: 'street name' },
source_id: '123456'
},
{
layer: 'street',
center_point: { lat: 2, lon: 2 },
address_parts: { street: 'sesame rd' },
name: { default: 'street name' },
source_id: '654321'
}
]};
var middleware = load();
// mock out the transport
middleware.transport.query = function mock(coord, number, street, cb) {
if (coord.lat === 2) {
t.deepEqual(coord, res.data[1].center_point);
t.deepEqual(number, req.clean.parsed_text.number);
t.deepEqual(street, res.data[1].address_parts.street);
t.equal(typeof cb, 'function');
return cb(null, {
properties: {
number: '100A',
source: 'OSM',
source_id: 'way:111111',
lat: 22.2,
lon: -33.3,
}
});
}
else {
return cb('miss');
}
};
middleware(req, res, function(){
t.deepEqual( res, { data: [
{
layer: 'address',
match_type: 'interpolated',
center_point: { lat: 22.2, lon: -33.3 },
address_parts: { street: 'sesame rd', number: '100A' },
name: { default: '100A street name' },
source: 'openstreetmap',
source_id: 'way:111111'
},
{
layer: 'street',
center_point: { lat: 1, lon: 1 },
address_parts: { street: 'sesame rd' },
name: { default: 'street name' },
source_id: '123456'
}
]});
t.end();
});
});
};
module.exports.all = function (tape, common) {
function test(name, testFunction) {

322
test/unit/middleware/requestLanguage.js

@ -0,0 +1,322 @@
var middleware = require('../../../middleware/requestLanguage');
module.exports.tests = {};
var DEFAULTS = {
defaulted: true,
iso6391: 'en',
iso6392B: 'eng',
iso6392T: 'eng',
iso6393: 'eng',
name: 'English',
scope: 'individual',
type: 'living'
};
module.exports.tests.defaults = function(test, common) {
test('default language', function(t) {
var req = {};
middleware(req, {}, function () {
t.deepEqual( req.language, DEFAULTS, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, []);
t.end();
});
});
test('both querystring & header invalid', function(t) {
var req = {
headers: { 'accept-language': 'foobar' },
query: { 'lang': 'foobar' }
};
middleware(req, {}, function () {
t.deepEqual( req.language, DEFAULTS, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, [
'invalid language provided via querystring',
'invalid language provided via header'
]);
t.end();
});
});
};
module.exports.tests.invalid = function(test, common) {
test('headers: invalid language', function(t) {
var req = { headers: {
'accept-language': 'invalid language'
}};
middleware(req, {}, function () {
t.deepEqual( req.language, DEFAULTS, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, [
'invalid language provided via header'
]);
t.end();
});
});
test('query: invalid language', function(t) {
var req = { query: {
lang: 'invalid language'
}};
middleware(req, {}, function () {
t.deepEqual( req.language, DEFAULTS, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, [
'invalid language provided via querystring'
]);
t.end();
});
});
};
module.exports.tests.valid = function(test, common) {
test('headers: valid language - french', function(t) {
var req = { headers: {
'accept-language': 'fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5'
}};
var expected = {
defaulted: false,
iso6391: 'fr',
iso6392B: 'fre',
iso6392T: 'fra',
iso6393: 'fra',
name: 'French',
scope: 'individual',
type: 'living'
};
middleware(req, {}, function () {
t.deepEqual( req.language, expected, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, []);
t.end();
});
});
test('query: valid language - french', function(t) {
var req = { query: {
lang: 'fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5'
}};
var expected = {
defaulted: false,
iso6391: 'fr',
iso6392B: 'fre',
iso6392T: 'fra',
iso6393: 'fra',
name: 'French',
scope: 'individual',
type: 'living'
};
middleware(req, {}, function () {
t.deepEqual( req.language, expected, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, []);
t.end();
});
});
test('headers: valid language - english', function(t) {
var req = { headers: {
'accept-language': 'en'
}};
var expected = {
defaulted: false,
iso6391: 'en',
iso6392B: 'eng',
iso6392T: 'eng',
iso6393: 'eng',
name: 'English',
scope: 'individual',
type: 'living'
};
middleware(req, {}, function () {
t.deepEqual( req.language, expected, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, []);
t.end();
});
});
test('query: valid language - english', function(t) {
var req = { query: {
lang: 'en'
}};
var expected = {
defaulted: false,
iso6391: 'en',
iso6392B: 'eng',
iso6392T: 'eng',
iso6393: 'eng',
name: 'English',
scope: 'individual',
type: 'living'
};
middleware(req, {}, function () {
t.deepEqual( req.language, expected, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, []);
t.end();
});
});
};
module.exports.tests.precedence = function(test, common) {
test('precedence: query has precedence over headers', function(t) {
var req = {
headers: { 'accept-language': 'fr' },
query: { 'lang': 'es' }
};
var expected = {
defaulted: false,
iso6391: 'es',
iso6392B: 'spa',
iso6392T: 'spa',
iso6393: 'spa',
name: 'Spanish',
scope: 'individual',
type: 'living'
};
middleware(req, {}, function () {
t.deepEqual( req.language, expected, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, []);
t.end();
});
});
test('precedence: invalid querystring but valid header', function(t) {
var req = {
headers: { 'accept-language': 'fr' },
query: { 'lang': 'foobar' }
};
var expected = {
defaulted: false,
iso6391: 'fr',
iso6392B: 'fre',
iso6392T: 'fra',
iso6393: 'fra',
name: 'French',
scope: 'individual',
type: 'living'
};
middleware(req, {}, function () {
t.deepEqual( req.language, expected, '$req.language set' );
t.deepEqual( req.clean.lang, {
defaulted: req.language.defaulted,
iso6391: req.language.iso6391,
iso6393: req.language.iso6393,
name: req.language.name
}, '$req.clean.lang set' );
t.deepEqual( req.warnings, [
'invalid language provided via querystring'
]);
t.end();
});
});
};
module.exports.all = function (tape, common) {
function test(name, testFunction) {
return tape('[middleware] requestLanguage: ' + name, testFunction);
}
for( var testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

18
test/unit/query/structured_geocoding.js

@ -188,6 +188,24 @@ module.exports.tests.query = function(test, common) {
});
test('parsed_text with all fields should use FallbackQuery', function(t) {
var clean = {
parsed_text: {
postalcode: 'postalcode value'
}
};
var query = generate(clean);
var compiled = JSON.parse(JSON.stringify(query));
var expected = require('../fixture/structured_geocoding/postalcode_only');
t.deepEqual(compiled.type, 'fallback', 'query type set');
t.deepEqual(compiled.body, expected, 'structured postalcode only');
t.end();
});
test('valid boundary.country search', function(t) {
var clean = {
parsed_text: {

11
test/unit/run.js

@ -11,9 +11,14 @@ var common = {
var tests = [
require('./app'),
require('./schema'),
require('./controller/coarse_reverse'),
require('./controller/index'),
require('./controller/place'),
require('./controller/search'),
require('./controller/predicates/has_response_data'),
require('./controller/predicates/has_request_errors'),
require('./controller/predicates/is_coarse_reverse'),
require('./controller/predicates/is_pip_service_enabled'),
require('./helper/diffPlaces'),
require('./helper/geojsonify'),
require('./helper/logging'),
@ -25,6 +30,7 @@ var tests = [
require('./middleware/confidenceScore'),
require('./middleware/confidenceScoreFallback'),
require('./middleware/confidenceScoreReverse'),
require('./middleware/changeLanguage'),
require('./middleware/distance'),
require('./middleware/interpolate'),
require('./middleware/localNamingConventions'),
@ -34,6 +40,7 @@ var tests = [
require('./middleware/normalizeParentIds'),
require('./middleware/trimByGranularity'),
require('./middleware/trimByGranularityStructured'),
require('./middleware/requestLanguage'),
require('./query/autocomplete'),
require('./query/autocomplete_defaults'),
require('./query/search_defaults'),
@ -73,7 +80,9 @@ var tests = [
require('./sanitizer/wrap'),
require('./service/mget'),
require('./service/search'),
require('./service/interpolation')
require('./service/interpolation'),
require('./service/pointinpolygon'),
require('./service/language')
];
tests.map(function(t) {

2
test/unit/sanitizer/_synthesize_analysis.js

@ -131,7 +131,7 @@ module.exports.tests.text_parser = function(test, common) {
const messages = sanitizer(raw, clean);
t.deepEquals(clean, expected_clean);
t.deepEquals(messages.errors, ['postalcode-only inputs are not supported'], 'no errors');
t.deepEquals(messages.errors, [], 'no errors');
t.deepEquals(messages.warnings, [], 'no warnings');
t.end();

80
test/unit/schema.js

@ -367,6 +367,86 @@ module.exports.tests.api_validation = (test, common) => {
});
test('non-string api.pipService should throw error', (t) => {
[null, 17, {}, [], true].forEach((value) => {
var config = {
api: {
version: 'version value',
indexName: 'index name value',
host: 'host value',
pipService: value
},
esclient: {}
};
t.throws(validate.bind(null, config), /"pipService" must be a string/);
});
t.end();
});
test('non-URI-formatted api.pipService should throw error', (t) => {
['this is not a URI'].forEach((value) => {
var config = {
api: {
version: 'version value',
indexName: 'index name value',
host: 'host value',
pipService: value
},
esclient: {}
};
t.throws(validate.bind(null, config), /"pipService" must be a valid uri/);
});
t.end();
});
test('non-http/https api.pipService should throw error', (t) => {
['ftp', 'git', 'unknown'].forEach((scheme) => {
var config = {
api: {
version: 'version value',
indexName: 'index name value',
host: 'host value',
pipService: `${scheme}://localhost`
},
esclient: {}
};
t.throws(validate.bind(null, config), /"pipService" must be a valid uri/);
});
t.end();
});
test('http/https api.pipService should not throw error', (t) => {
['http', 'https'].forEach((scheme) => {
var config = {
api: {
version: 'version value',
indexName: 'index name value',
host: 'host value',
pipService: `${scheme}://localhost`
},
esclient: {}
};
t.doesNotThrow(validate.bind(null, config), `${scheme} should be allowed`);
});
t.end();
});
};
module.exports.tests.esclient_validation = (test, common) => {

107
test/unit/service/language.js

@ -0,0 +1,107 @@
var fs = require('fs'),
tmp = require('tmp'),
setup = require('../../../service/language').findById;
module.exports.tests = {};
module.exports.tests.interface = function(test, common) {
test('valid interface', function(t) {
t.equal(typeof setup, 'function', 'setup is a function');
t.end();
});
};
// adapter factory
module.exports.tests.factory = function(test, common) {
test('http adapter', function(t) {
var config = { language: { client: {
adapter: 'http',
host: 'http://example.com'
}}};
// adapter is driven by config
var tmpfile = tmp.tmpNameSync({ postfix: '.json' });
fs.writeFileSync( tmpfile, JSON.stringify( config ), { encoding: 'utf8' } );
process.env.PELIAS_CONFIG = tmpfile;
var adapter = setup();
delete process.env.PELIAS_CONFIG;
t.equal(adapter.constructor.name, 'HttpTransport', 'HttpTransport');
t.equal(typeof adapter, 'object', 'adapter is an object');
t.equal(typeof adapter.query, 'function', 'query is a function');
t.equal(adapter.query.length, 2, 'query function signature');
t.end();
});
test('null adapter', function(t) {
var config = { language: { client: {
adapter: 'null'
}}};
// adapter is driven by config
var tmpfile = tmp.tmpNameSync({ postfix: '.json' });
fs.writeFileSync( tmpfile, JSON.stringify( config ), { encoding: 'utf8' } );
process.env.PELIAS_CONFIG = tmpfile;
var adapter = setup();
delete process.env.PELIAS_CONFIG;
t.equal(adapter.constructor.name, 'NullTransport', 'NullTransport');
t.equal(typeof adapter, 'object', 'adapter is an object');
t.equal(typeof adapter.query, 'function', 'query is a function');
t.equal(adapter.query.length, 2, 'query function signature');
t.end();
});
test('default adapter', function(t) {
var config = {};
// adapter is driven by config
var tmpfile = tmp.tmpNameSync({ postfix: '.json' });
fs.writeFileSync( tmpfile, JSON.stringify( config ), { encoding: 'utf8' } );
process.env.PELIAS_CONFIG = tmpfile;
var adapter = setup();
delete process.env.PELIAS_CONFIG;
t.equal(adapter.constructor.name, 'NullTransport', 'NullTransport');
t.equal(typeof adapter, 'object', 'adapter is an object');
t.equal(typeof adapter.query, 'function', 'query is a function');
t.equal(adapter.query.length, 2, 'query function signature');
t.end();
});
};
// null transport
module.exports.tests.NullTransport = function(test, common) {
test('null transport', function(t) {
// adapter is driven by config
var tmpfile = tmp.tmpNameSync({ postfix: '.json' });
fs.writeFileSync( tmpfile, '{}', { encoding: 'utf8' } );
process.env.PELIAS_CONFIG = tmpfile;
var adapter = setup();
delete process.env.PELIAS_CONFIG;
// test null transport performs a no-op
adapter.query( null, function( err, res ){
t.equal(err, undefined, 'no-op');
t.equal(res, undefined, 'no-op');
t.end();
});
});
};
module.exports.all = function (tape, common) {
function test(name, testFunction) {
return tape('SERVICE language', testFunction);
}
for( var testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

169
test/unit/service/pointinpolygon.js

@ -0,0 +1,169 @@
const proxyquire = require('proxyquire').noCallThru();
const setup = require('../../../service/pointinpolygon');
module.exports.tests = {};
module.exports.tests.interface = (test, common) => {
test('valid interface', (t) => {
const logger = require('pelias-mock-logger')();
var service = proxyquire('../../../service/pointinpolygon', {
'pelias-logger': logger
});
t.equal(typeof service, 'function', 'service is a function');
t.end();
});
};
module.exports.tests.do_nothing_service = (test, common) => {
test('undefined PiP uri should return service that logs fact that PiP service is not available', (t) => {
const logger = require('pelias-mock-logger')();
const service = proxyquire('../../../service/pointinpolygon', {
'pelias-logger': logger
})();
service({ lat: 12.121212, lon: 21.212121 }, (err) => {
t.deepEquals(logger.getWarnMessages(), [
'point-in-polygon service disabled'
]);
t.equals(err, 'point-in-polygon service disabled, unable to resolve {"lat":12.121212,"lon":21.212121}');
t.end();
});
});
};
module.exports.tests.success = (test, common) => {
test('lat and lon should be passed to server', (t) => {
const pipServer = require('express')();
pipServer.get('/:lon/:lat', (req, res, next) => {
t.equals(req.params.lat, '12.121212');
t.equals(req.params.lon, '21.212121');
res.send('{ "field": "value" }');
});
const server = pipServer.listen();
const port = server.address().port;
const logger = require('pelias-mock-logger')();
const service = proxyquire('../../../service/pointinpolygon', {
'pelias-logger': logger
})(`http://localhost:${port}`);
service({ lat: 12.121212, lon: 21.212121}, (err, results) => {
t.notOk(err);
t.deepEquals(results, { field: 'value' });
t.ok(logger.isInfoMessage(`using point-in-polygon service at http://localhost:${port}`));
t.notOk(logger.hasErrorMessages());
t.end();
server.close();
});
});
};
module.exports.tests.failure = (test, common) => {
test('server returning success but non-JSON body should log error and return no results', (t) => {
const pipServer = require('express')();
pipServer.get('/:lon/:lat', (req, res, next) => {
t.equals(req.params.lat, '12.121212');
t.equals(req.params.lon, '21.212121');
res.send('this is not JSON');
});
const server = pipServer.listen();
const port = server.address().port;
const logger = require('pelias-mock-logger')();
const service = proxyquire('../../../service/pointinpolygon', {
'pelias-logger': logger
})(`http://localhost:${port}`);
service({ lat: 12.121212, lon: 21.212121}, (err, results) => {
t.equals(err, `http://localhost:${port}/21.212121/12.121212 returned status 200 but with non-JSON response: this is not JSON`);
t.notOk(results);
t.ok(logger.isErrorMessage(`http://localhost:${port}/21.212121/12.121212: could not parse response body: this is not JSON`));
t.end();
server.close();
});
});
test('server returning error should log it and return no results', (t) => {
const server = require('express')().listen();
const port = server.address().port;
// immediately close the server so to ensure an error response
server.close();
const logger = require('pelias-mock-logger')();
const service = proxyquire('../../../service/pointinpolygon', {
'pelias-logger': logger
})(`http://localhost:${port}`);
service({ lat: 12.121212, lon: 21.212121}, (err, results) => {
t.equals(err.code, 'ECONNREFUSED');
t.notOk(results);
t.ok(logger.isErrorMessage(/ECONNREFUSED/), 'there should be a connection refused error message');
t.end();
server.close();
});
});
test('non-OK status should log error and return no results', (t) => {
const pipServer = require('express')();
pipServer.get('/:lat/:lon', (req, res, next) => {
res.status(400).send('a bad request was made');
});
const server = pipServer.listen();
const port = server.address().port;
const logger = require('pelias-mock-logger')();
const service = proxyquire('../../../service/pointinpolygon', {
'pelias-logger': logger
})(`http://localhost:${port}`);
service({ lat: 12.121212, lon: 21.212121}, (err, results) => {
t.equals(err, `http://localhost:${port}/21.212121/12.121212 returned status 400: a bad request was made`);
t.notOk(results);
t.ok(logger.isErrorMessage(`http://localhost:${port}/21.212121/12.121212 returned status 400: a bad request was made`));
t.end();
server.close();
});
});
};
module.exports.all = (tape, common) => {
function test(name, testFunction) {
return tape(`SERVICE /pointinpolygon ${name}`, testFunction);
}
for( var testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};
Loading…
Cancel
Save