diff --git a/README.md b/README.md index 652f289c..53074f58 100644 --- a/README.md +++ b/README.md @@ -34,3 +34,41 @@ The API recognizes the following properties under the top-level `api` key in you Please fork and pull request against upstream master on a feature branch. Pretty please; provide unit tests and script fixtures in the `test` directory. + +## Unit tests + +You can run the unit test suite using the command: + +```bash +$ npm test +``` + +## HTTP tests + +We have another set of tests which are used to test the HTTP API layer, these tests send expected HTTP requests and then +assert that the responses coming back have the correct geoJSON format and HTTP status codes. + +You can run the HTTP test suite using the command: + +```bash +$ npm run ciao +``` + +Note: some of the tests in this suite fail when no data is present in the index, there is a small set of test documents +provided in `./test/ciao_test_data` which can be inserted in order to avoid these errors. + +To inject dummy data in to your local index: + +```bash +$ node test/ciao_test_data.js +``` + +You can confirm the dummy data has been inserted with the command: + +```bash +$ curl localhost:9200/pelias/_count?pretty +{ + "count" : 9, + ... +} +``` diff --git a/bin/units b/bin/units new file mode 100755 index 00000000..b011e2ee --- /dev/null +++ b/bin/units @@ -0,0 +1,5 @@ +#!/bin/bash + +set -euo pipefail + +node test/unit/run.js | ./node_modules/.bin/tap-dot diff --git a/helper/geojsonify.js b/helper/geojsonify.js index 67fed28e..666ab60c 100644 --- a/helper/geojsonify.js +++ b/helper/geojsonify.js @@ -4,6 +4,7 @@ var GeoJSON = require('geojson'), labelGenerator = require('./labelGenerator'), logger = require('pelias-logger').get('api'), type_mapping = require('./type_mapping'), + Document = require('pelias-model').Document, _ = require('lodash'); // Properties to be copied @@ -14,30 +15,31 @@ var DETAILS_PROPS = [ 'confidence', 'distance', 'country', - 'country_id', + 'country_gid', 'country_a', 'macroregion', - 'macroregion_id', + 'macroregion_gid', 'macroregion_a', 'region', - 'region_id', + 'region_gid', 'region_a', 'macrocounty', - 'macrocounty_id', + 'macrocounty_gid', 'macrocounty_a', 'county', - 'county_id', + 'county_gid', 'county_a', 'localadmin', - 'localadmin_id', + 'localadmin_gid', 'localadmin_a', 'locality', - 'locality_id', + 'locality_gid', 'locality_a', 'borough', - 'borough_id', + 'borough_gid', + 'borough_a', 'neighbourhood', - 'neighbourhood_id', + 'neighbourhood_gid', 'bounding_box' ]; @@ -67,6 +69,10 @@ function geojsonifyPlaces( docs ){ var geojson = GeoJSON.parse( geodata, { Point: ['lat', 'lng'] }); var geojsonExtentPoints = GeoJSON.parse( extentPoints, { Point: ['lat', 'lng'] }); + // to insert the bbox property at the top level of each feature, it must be done separately after + // initial geojson construction is finished + addBBoxPerFeature(geojson); + // bounding box calculations computeBBox(geojson, geojsonExtentPoints); @@ -119,6 +125,30 @@ function addLabel(src, dst) { dst.label = labelGenerator(dst); } +/** + * Add bounding box + * + * @param {object} geojson + */ +function addBBoxPerFeature(geojson) { + geojson.features.forEach(function (feature) { + + if (!feature.properties.hasOwnProperty('bounding_box')) { + return; + } + + if (feature.properties.bounding_box) { + feature.bbox = [ + feature.properties.bounding_box.min_lon, + feature.properties.bounding_box.min_lat, + feature.properties.bounding_box.max_lon, + feature.properties.bounding_box.max_lat + ]; + } + + delete feature.properties.bounding_box; + }); +} /** * Collect all points from the geodata. @@ -210,7 +240,8 @@ function copyProperties( source, props, dst ) { * @param {object} src */ function makeGid(src) { - return lookupSource(src) + ':' + lookupLayer(src) + ':' + src._id; + var doc = new Document(lookupSource(src), lookupLayer(src), src._id); + return doc.getGid(); } /** @@ -224,6 +255,9 @@ function addMetaData(src, dst) { dst.gid = makeGid(src); dst.layer = lookupLayer(src); dst.source = lookupSource(src); + if (src.hasOwnProperty('bounding_box')) { + dst.bounding_box = src.bounding_box; + } } /** diff --git a/helper/placeTypes.js b/helper/placeTypes.js new file mode 100644 index 00000000..76538982 --- /dev/null +++ b/helper/placeTypes.js @@ -0,0 +1,11 @@ +module.exports = [ + 'country', + 'macroregion', + 'region', + 'macrocounty', + 'county', + 'localadmin', + 'locality', + 'borough', + 'neighbourhood' +]; diff --git a/middleware/localNamingConventions.js b/middleware/localNamingConventions.js index dd026bb3..e2851370 100644 --- a/middleware/localNamingConventions.js +++ b/middleware/localNamingConventions.js @@ -23,6 +23,9 @@ function applyLocalNamingConventions(req, res, next) { // loop through data items and flip relevant number/street res.data.filter(function(place){ + // do nothing for records with no admin info + if (!place.parent || !place.parent.country_a) { return false; } + // relevant for some countries var flip = place.parent.country_a.some(function(country) { return _.includes(flipNumberAndStreetCountries, country); diff --git a/middleware/normalizeParentIds.js b/middleware/normalizeParentIds.js new file mode 100644 index 00000000..e0e499e2 --- /dev/null +++ b/middleware/normalizeParentIds.js @@ -0,0 +1,56 @@ +var logger = require('pelias-logger').get('api'); +var Document = require('pelias-model').Document; + +var placeTypes = require('../helper/placeTypes'); + +/** + * Convert WOF integer ids to Pelias formatted ids that can be used by the /place endpoint. + * This should probably be moved to the import pipeline once we are happy with the way this works. + */ + +function setup() { + return function (req, res, next) { + // do nothing if no result data set + if (!res || !res.data) { + return next(); + } + + res.data = res.data.map(normalizeParentIds); + + next(); + }; +} + +/** + * Update all parent ids in the admin hierarchy + * + * @param {object} place + * @return {object} + */ +function normalizeParentIds(place) { + + if (place) { + placeTypes.forEach(function (placeType) { + if (place[placeType] && place[placeType].length > 0 && place[placeType][0]) { + place[placeType + '_gid'] = [ makeNewId(placeType, place[placeType + '_gid']) ]; + } + }); + } + + return place; +} + +/** + * Generate a valid Pelias ids from placetype and WOF id. + * Assumes all of the incoming ids are WOF ids. + * + * @param {string} placeType + * @param {number} id + * @return {string} + */ +function makeNewId(placeType, id) { + var doc = new Document('whosonfirst', placeType, id); + return doc.getGid(); +} + +module.exports = setup; diff --git a/middleware/renamePlacenames.js b/middleware/renamePlacenames.js index 152ad404..d89a43ad 100644 --- a/middleware/renamePlacenames.js +++ b/middleware/renamePlacenames.js @@ -1,16 +1,6 @@ var _ = require('lodash'); -/** - - P is a preferred English name - - Q is a preferred name (in other languages) - - V is a well-known (but unofficial) variant for the place - (e.g. "New York City" for New York) - - S is either a synonym or a colloquial name for the place - (e.g. "Big Apple" for New York), or a version of the name which - is stripped of accent characters. - - A is an abbreviation or code for the place (e.g. "NYC" for New - York) - */ +var PARENT_PROPS = require('../helper/placeTypes'); var ADDRESS_PROPS = { 'number': 'housenumber', @@ -18,32 +8,6 @@ var ADDRESS_PROPS = { 'street': 'street' }; -var PARENT_PROPS = [ - 'country', - 'country_id', - 'country_a', - 'macroregion', - 'macroregion_id', - 'macroregion_a', - 'region', - 'region_id', - 'region_a', - 'macrocounty', - 'macrocounty_id', - 'macrocounty_a', - 'county', - 'county_id', - 'county_a', - 'localadmin', - 'localadmin_id', - 'localadmin_a', - 'locality', - 'locality_id', - 'locality_a', - 'neighbourhood', - 'neighbourhood_id' -]; - function setup() { return renamePlacenames; @@ -74,6 +38,8 @@ function renameOneRecord(place) { if (place.parent) { PARENT_PROPS.forEach(function (prop) { place[prop] = place.parent[prop]; + place[prop + '_a'] = place.parent[prop + '_a']; + place[prop + '_gid'] = place.parent[prop + '_id']; }); } diff --git a/package.json b/package.json index 49e00498..0c0cc1f7 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "scripts": { "start": "node index.js", "test": "npm run unit", - "unit": "node test/unit/run.js | tap-dot", + "unit": "./bin/units", "ciao": "node node_modules/ciao/bin/ciao -c test/ciao.json test/ciao", "coverage": "node_modules/.bin/istanbul cover test/unit/run.js", "audit": "npm shrinkwrap; node node_modules/nsp/bin/nspCLI.js audit-shrinkwrap; rm npm-shrinkwrap.json;", @@ -39,7 +39,7 @@ "async": "^1.5.2", "check-types": "^6.0.0", "cluster2": "git://github.com/missinglink/cluster2.git#node_zero_twelve", - "elasticsearch": "^10.1.3", + "elasticsearch": "^11.0.0", "express": "^4.8.8", "express-http-proxy": "^0.6.0", "extend": "3.0.0", @@ -53,6 +53,7 @@ "morgan": "1.7.0", "pelias-config": "^1.0.1", "pelias-logger": "^0.0.8", + "pelias-model": "^3.1.0", "pelias-query": "6.2.0", "pelias-suggester-pipeline": "2.0.4", "stats-lite": "1.0.3", @@ -66,7 +67,7 @@ "nsp": "^2.2.0", "precommit-hook": "^3.0.0", "proxyquire": "^1.4.0", - "tap-dot": "1.0.4", + "tap-dot": "1.0.5", "tape": "^4.4.0" }, "pre-commit": [ diff --git a/query/autocomplete.js b/query/autocomplete.js index 4bc98c7c..ffc57396 100644 --- a/query/autocomplete.js +++ b/query/autocomplete.js @@ -41,6 +41,9 @@ query.score( views.focus_selected_layers( views.ngrams_strict ) ); query.score( peliasQuery.view.popularity( views.ngrams_strict ) ); query.score( peliasQuery.view.population( views.ngrams_strict ) ); +// non-scoring hard filters +query.filter( peliasQuery.view.sources ); + // -------------------------------- /** @@ -51,6 +54,11 @@ function generateQuery( clean ){ var vs = new peliasQuery.Vars( defaults ); + // sources + if( check.array(clean.sources) && clean.sources.length ){ + vs.var( 'sources', clean.sources ); + } + // mark the name as incomplete (user has not yet typed a comma) vs.var( 'input:name:isComplete', false ); diff --git a/routes/v1.js b/routes/v1.js index eacd0a40..c8c88de1 100644 --- a/routes/v1.js +++ b/routes/v1.js @@ -35,7 +35,8 @@ var postProc = { renamePlacenames: require('../middleware/renamePlacenames'), geocodeJSON: require('../middleware/geocodeJSON'), sendJSON: require('../middleware/sendJSON'), - parseBoundingBox: require('../middleware/parseBBox') + parseBoundingBox: require('../middleware/parseBBox'), + normalizeParentIds: require('../middleware/normalizeParentIds') }; /** @@ -67,6 +68,7 @@ function addRoutes(app, peliasConfig) { postProc.localNamingConventions(), postProc.renamePlacenames(), postProc.parseBoundingBox(), + postProc.normalizeParentIds(), postProc.geocodeJSON(peliasConfig, base), postProc.sendJSON ]), @@ -79,6 +81,7 @@ function addRoutes(app, peliasConfig) { postProc.localNamingConventions(), postProc.renamePlacenames(), postProc.parseBoundingBox(), + postProc.normalizeParentIds(), postProc.geocodeJSON(peliasConfig, base), postProc.sendJSON ]), @@ -94,6 +97,7 @@ function addRoutes(app, peliasConfig) { postProc.localNamingConventions(), postProc.renamePlacenames(), postProc.parseBoundingBox(), + postProc.normalizeParentIds(), postProc.geocodeJSON(peliasConfig, base), postProc.sendJSON ]), @@ -103,6 +107,7 @@ function addRoutes(app, peliasConfig) { postProc.localNamingConventions(), postProc.renamePlacenames(), postProc.parseBoundingBox(), + postProc.normalizeParentIds(), postProc.geocodeJSON(peliasConfig, base), postProc.sendJSON ]), diff --git a/sanitiser/_size.js b/sanitiser/_size.js index 3fb8115b..b2b5e62e 100644 --- a/sanitiser/_size.js +++ b/sanitiser/_size.js @@ -5,32 +5,40 @@ var MIN_SIZE = 1, DEFAULT_SIZE = 10; // validate inputs, convert types and apply defaults -function sanitize( raw, clean ){ - - // error & warning messages - var messages = { errors: [], warnings: [] }; - - // coercions - clean.size = parseInt( raw.size, 10 ); - - // invalid numeric input - if( isNaN( clean.size ) ){ - clean.size = DEFAULT_SIZE; - } - // ensure size falls within defined range - else if( clean.size > MAX_SIZE ){ - // set the max size - messages.warnings.push('out-of-range integer \'size\', using MAX_SIZE'); - clean.size = MAX_SIZE; - } - else if( clean.size < MIN_SIZE ){ - // set the min size - messages.warnings.push('out-of-range integer \'size\', using MIN_SIZE'); - clean.size = MIN_SIZE; - } - - return messages; +function setup( size_min, size_max, size_def ){ + + // allow caller to inject custom min/max/default values + if( !check.number( size_min ) ){ size_min = MIN_SIZE; } + if( !check.number( size_max ) ){ size_max = MAX_SIZE; } + if( !check.number( size_def ) ){ size_def = DEFAULT_SIZE; } + + return function sanitize( raw, clean ){ + + // error & warning messages + var messages = { errors: [], warnings: [] }; + + // coercions + clean.size = parseInt( raw.size, 10 ); + + // invalid numeric input + if( isNaN( clean.size ) ){ + clean.size = size_def; + } + // ensure size falls within defined range + else if( clean.size > size_max ){ + // set the max size + messages.warnings.push('out-of-range integer \'size\', using MAX_SIZE'); + clean.size = size_max; + } + else if( clean.size < size_min ){ + // set the min size + messages.warnings.push('out-of-range integer \'size\', using MIN_SIZE'); + clean.size = size_min; + } + + return messages; + }; } // export function -module.exports = sanitize; +module.exports = setup; diff --git a/sanitiser/autocomplete.js b/sanitiser/autocomplete.js index c6dc08da..f9698956 100644 --- a/sanitiser/autocomplete.js +++ b/sanitiser/autocomplete.js @@ -1,8 +1,14 @@ +var type_mapping = require('../helper/type_mapping'); + var sanitizeAll = require('../sanitiser/sanitizeAll'), sanitizers = { singleScalarParameters: require('../sanitiser/_single_scalar_parameters'), text: require('../sanitiser/_text'), - size: require('../sanitiser/_size'), + size: require('../sanitiser/_size')(10, 10, 10), + layers: require('../sanitiser/_targets')('layers', type_mapping.layer_mapping), + sources: require('../sanitiser/_targets')('sources', type_mapping.source_mapping), + // depends on the layers and sources sanitisers, must be run after them + sources_and_layers: require('../sanitiser/_sources_and_layers'), private: require('../sanitiser/_flag_bool')('private', false), geo_autocomplete: require('../sanitiser/_geo_autocomplete'), }; diff --git a/sanitiser/reverse.js b/sanitiser/reverse.js index c8e1e3f4..a85e5bf8 100644 --- a/sanitiser/reverse.js +++ b/sanitiser/reverse.js @@ -8,7 +8,7 @@ var sanitizeAll = require('../sanitiser/sanitizeAll'), sources: require('../sanitiser/_targets')('sources', type_mapping.source_mapping), // depends on the layers and sources sanitisers, must be run after them sources_and_layers: require('../sanitiser/_sources_and_layers'), - size: require('../sanitiser/_size'), + size: require('../sanitiser/_size')(/* use defaults*/), private: require('../sanitiser/_flag_bool')('private', false), geo_reverse: require('../sanitiser/_geo_reverse'), boundary_country: require('../sanitiser/_boundary_country'), diff --git a/sanitiser/search.js b/sanitiser/search.js index 8b26e54e..b2976af3 100644 --- a/sanitiser/search.js +++ b/sanitiser/search.js @@ -5,7 +5,7 @@ var sanitizeAll = require('../sanitiser/sanitizeAll'), quattroshapes_deprecation: require('../sanitiser/_deprecate_quattroshapes'), singleScalarParameters: require('../sanitiser/_single_scalar_parameters'), text: require('../sanitiser/_text'), - size: require('../sanitiser/_size'), + size: require('../sanitiser/_size')(/* use defaults*/), layers: require('../sanitiser/_targets')('layers', type_mapping.layer_mapping), sources: require('../sanitiser/_targets')('sources', type_mapping.source_mapping), // depends on the layers and sources sanitisers, must be run after them diff --git a/test/ciao/autocomplete/layers_alias_address.coffee b/test/ciao/autocomplete/layers_alias_address.coffee new file mode 100644 index 00000000..d393971a --- /dev/null +++ b/test/ciao/autocomplete/layers_alias_address.coffee @@ -0,0 +1,34 @@ + +#> layer alias +path: '/v1/autocomplete?text=a&layers=address' + +#? 200 ok +response.statusCode.should.be.equal 200 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.not.exist json.geocoding.errors + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +json.geocoding.query.layers.should.eql ["address"] diff --git a/test/ciao/autocomplete/layers_alias_coarse.coffee b/test/ciao/autocomplete/layers_alias_coarse.coffee new file mode 100644 index 00000000..4a359bdd --- /dev/null +++ b/test/ciao/autocomplete/layers_alias_coarse.coffee @@ -0,0 +1,46 @@ + +#> layer alias +path: '/v1/autocomplete?text=a&layers=coarse' + +#? 200 ok +response.statusCode.should.be.equal 200 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.not.exist json.geocoding.errors + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +json.geocoding.query.layers.should.eql [ "continent", + "macrocountry", + "country", + "dependency", + "region", + "locality", + "localadmin", + "county", + "macrohood", + "neighbourhood", + "microhood", + "disputed" +] diff --git a/test/ciao/autocomplete/layers_invalid.coffee b/test/ciao/autocomplete/layers_invalid.coffee new file mode 100644 index 00000000..b4ce0565 --- /dev/null +++ b/test/ciao/autocomplete/layers_invalid.coffee @@ -0,0 +1,34 @@ + +#> layer alias +path: '/v1/autocomplete?text=a&layers=notlayer' + +#? 200 ok +response.statusCode.should.be.equal 400 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.exist json.geocoding.errors +json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,country,region,county,locality,continent,macrocountry,dependency,localadmin,macrohood,neighbourhood,microhood,disputed' ] + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 diff --git a/test/ciao/autocomplete/layers_mix_invalid_valid.coffee b/test/ciao/autocomplete/layers_mix_invalid_valid.coffee new file mode 100644 index 00000000..8bac1ccf --- /dev/null +++ b/test/ciao/autocomplete/layers_mix_invalid_valid.coffee @@ -0,0 +1,35 @@ + +#> layer alias +path: '/v1/autocomplete?text=a&layers=country,notlayer' + +#? 200 ok +response.statusCode.should.be.equal 400 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.exist json.geocoding.errors +json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,country,region,county,locality,continent,macrocountry,dependency,localadmin,macrohood,neighbourhood,microhood,disputed' ] + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +should.not.exist json.geocoding.query['layers'] diff --git a/test/ciao/autocomplete/layers_multiple.coffee b/test/ciao/autocomplete/layers_multiple.coffee new file mode 100644 index 00000000..fed6f6f7 --- /dev/null +++ b/test/ciao/autocomplete/layers_multiple.coffee @@ -0,0 +1,34 @@ + +#> layer alias +path: '/v1/autocomplete?text=a&layers=country,region' + +#? 200 ok +response.statusCode.should.be.equal 200 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.not.exist json.geocoding.errors + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +json.geocoding.query.layers.should.eql ["country","region"] diff --git a/test/ciao/autocomplete/layers_single.coffee b/test/ciao/autocomplete/layers_single.coffee new file mode 100644 index 00000000..fdae272a --- /dev/null +++ b/test/ciao/autocomplete/layers_single.coffee @@ -0,0 +1,34 @@ + +#> layer alias +path: '/v1/autocomplete?text=a&layers=country' + +#? 200 ok +response.statusCode.should.be.equal 200 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.not.exist json.geocoding.errors + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +json.geocoding.query.layers.should.eql ["country"] diff --git a/test/ciao/autocomplete/size_not_default.coffee b/test/ciao/autocomplete/size_not_default.coffee new file mode 100644 index 00000000..8baf233d --- /dev/null +++ b/test/ciao/autocomplete/size_not_default.coffee @@ -0,0 +1,34 @@ + +#> set size (autocomplete does not allow size to be changed) +path: '/v1/autocomplete?text=a&size=3' + +#? 200 ok +response.statusCode.should.be.equal 200 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.not.exist json.geocoding.errors + +#? expected warnings +should.exist json.geocoding.warnings +json.geocoding.warnings.should.eql [ 'out-of-range integer \'size\', using MIN_SIZE' ] + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 # should remain the default size diff --git a/test/ciao/autocomplete/sources_invalid.coffee b/test/ciao/autocomplete/sources_invalid.coffee new file mode 100644 index 00000000..38a12162 --- /dev/null +++ b/test/ciao/autocomplete/sources_invalid.coffee @@ -0,0 +1,34 @@ + +#> sources filter +path: '/v1/autocomplete?text=a&sources=openstreetmap,notasource' + +#? 200 ok +response.statusCode.should.be.equal 400 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.exist json.geocoding.errors +json.geocoding.errors.should.eql [ '\'notasource\' is an invalid sources parameter. Valid options: osm,oa,gn,wof,openstreetmap,openaddresses,geonames,whosonfirst' ] + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 diff --git a/test/ciao/autocomplete/sources_layers_invalid_combo.coffee b/test/ciao/autocomplete/sources_layers_invalid_combo.coffee new file mode 100644 index 00000000..3d27a200 --- /dev/null +++ b/test/ciao/autocomplete/sources_layers_invalid_combo.coffee @@ -0,0 +1,37 @@ + +#> sources and layers specified (invalid combo) +path: '/v1/autocomplete?text=a&sources=whosonfirst&layers=address' + +#? 200 ok +response.statusCode.should.be.equal 400 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.exist json.geocoding.errors +json.geocoding.errors.should.eql [ 'You have specified both the `sources` and `layers` parameters in a combination that will return no results: the whosonfirst source has nothing in the address layer' ] + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +json.geocoding.query.layers.should.eql ["address"] +json.geocoding.query.sources.should.eql ["whosonfirst"] +should.not.exist json.geocoding.query['type'] diff --git a/test/ciao/autocomplete/sources_layers_valid_combo.coffee b/test/ciao/autocomplete/sources_layers_valid_combo.coffee new file mode 100644 index 00000000..2779009f --- /dev/null +++ b/test/ciao/autocomplete/sources_layers_valid_combo.coffee @@ -0,0 +1,35 @@ + +#> sources and layers specified +path: '/v1/autocomplete?text=a&sources=openaddresses&layers=address' + +#? 200 ok +response.statusCode.should.be.equal 200 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.not.exist json.geocoding.errors + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +json.geocoding.query.layers.should.eql ["address"] +json.geocoding.query.sources.should.eql ["openaddresses"] diff --git a/test/ciao/autocomplete/sources_multiple.coffee b/test/ciao/autocomplete/sources_multiple.coffee new file mode 100644 index 00000000..5ddad82f --- /dev/null +++ b/test/ciao/autocomplete/sources_multiple.coffee @@ -0,0 +1,34 @@ + +#> sources filter +path: "/v1/autocomplete?text=a&sources=openstreetmap,geonames" + +#? 200 ok +response.statusCode.should.be.equal 200 +response.should.have.header "charset", 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.not.exist json.geocoding.errors + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +json.geocoding.query.sources.should.eql ["openstreetmap", "geonames"] diff --git a/test/ciao/autocomplete/sources_single.coffee b/test/ciao/autocomplete/sources_single.coffee new file mode 100644 index 00000000..d1df9c9d --- /dev/null +++ b/test/ciao/autocomplete/sources_single.coffee @@ -0,0 +1,34 @@ + +#> sources filter +path: '/v1/autocomplete?text=a&sources=openstreetmap' + +#? 200 ok +response.statusCode.should.be.equal 200 +response.should.have.header 'charset', 'utf8' +response.should.have.header 'content-type', 'application/json; charset=utf-8' + +#? valid geocoding block +should.exist json.geocoding +should.exist json.geocoding.version +should.exist json.geocoding.attribution +should.exist json.geocoding.query +should.exist json.geocoding.engine +should.exist json.geocoding.engine.name +should.exist json.geocoding.engine.author +should.exist json.geocoding.engine.version +should.exist json.geocoding.timestamp + +#? valid geojson +json.type.should.be.equal 'FeatureCollection' +json.features.should.be.instanceof Array + +#? expected errors +should.not.exist json.geocoding.errors + +#? expected warnings +should.not.exist json.geocoding.warnings + +#? inputs +json.geocoding.query['text'].should.eql 'a' +json.geocoding.query['size'].should.eql 10 +json.geocoding.query.sources.should.eql ["openstreetmap"] diff --git a/test/ciao_test_data.js b/test/ciao_test_data.js new file mode 100644 index 00000000..70b329f6 --- /dev/null +++ b/test/ciao_test_data.js @@ -0,0 +1,58 @@ + +/** + Test data required by the ciao test suite. + + Some tests will fail when run against an empty index, you can use this script + to insert some dummy data in to your index before running the tests. + + note: as this is dummy data, care should be taken in order to make sure these + documents don't end up in your production index; for that reason the HTTP port + has been hard-coded as port:9200. +**/ + +// we use the default config to avoid making calls to +// a cluster running on a non-standard port. +var client = require('elasticsearch').Client(), + async = require('async'), + actions = []; + +// add one record per 'type' in order to cause the _default_ mapping +// to be copied when the new type is created. +var types = ['venue','address','county','region','county','country','admin0','admin1','admin2'], + sources = ['test'], + layers = ['geonames']; + +// iterate over all types/sources/layers and index a test document +types.forEach( function( type, i1 ){ + sources.forEach( function( source, i2 ){ + layers.forEach( function( layer, i3 ){ + actions.push( function( done ){ + client.index({ + index: 'pelias', + type: type, + id: [i1,i2,i3].join(':'), + body: { + source: source, + layer: layer, + name: { default: 'test' }, + phrase: { default: 'test' }, + parent: { + country_a: ['USA'] + } + } + }); + done(); + }); + }); + }); +}); + +// call refresh so the index merges the changes +actions.push( function( done ){ + client.indices.refresh( { index: 'pelias' }, done); +}); + +// perform all actions in series +async.series( actions, function( err, resp ){ + console.log('test data inported'); +}); diff --git a/test/unit/fixture/autocomplete_with_source_filtering.js b/test/unit/fixture/autocomplete_with_source_filtering.js new file mode 100644 index 00000000..22c12a5d --- /dev/null +++ b/test/unit/fixture/autocomplete_with_source_filtering.js @@ -0,0 +1,85 @@ + +module.exports = { + 'query': { + 'filtered': { + 'query': { + 'bool': { + 'must': [{ + 'match': { + 'name.default': { + 'analyzer': 'peliasPhrase', + 'boost': 100, + 'query': 'test', + 'type': 'phrase', + 'operator': 'and' + } + } + }], + 'should':[{ + 'function_score': { + 'query': { + 'match': { + 'name.default': { + 'analyzer': 'peliasPhrase', + 'boost': 100, + 'query': 'test', + 'type': 'phrase', + 'operator': 'and' + } + } + }, + 'max_boost': 20, + 'score_mode': 'first', + 'boost_mode': 'replace', + 'functions': [{ + 'field_value_factor': { + 'modifier': 'log1p', + 'field': 'popularity', + 'missing': 1 + }, + 'weight': 1 + }] + } + },{ + 'function_score': { + 'query': { + 'match': { + 'name.default': { + 'analyzer': 'peliasPhrase', + 'boost': 100, + 'query': 'test', + 'type': 'phrase', + 'operator': 'and' + } + } + }, + 'max_boost': 20, + 'score_mode': 'first', + 'boost_mode': 'replace', + 'functions': [{ + 'field_value_factor': { + 'modifier': 'log1p', + 'field': 'population', + 'missing': 1 + }, + 'weight': 3 + }] + } + }] + } + }, + 'filter': { + 'bool': { + 'must': [{ + 'terms': { + 'source': ['test_source'] + } + }] + } + } + } + }, + 'sort': [ '_score' ], + 'size': 20, + 'track_scores': true +}; diff --git a/test/unit/fixture/search_with_source_filtering.js b/test/unit/fixture/search_with_source_filtering.js new file mode 100644 index 00000000..593eac5b --- /dev/null +++ b/test/unit/fixture/search_with_source_filtering.js @@ -0,0 +1,93 @@ + +module.exports = { + 'query': { + 'filtered': { + 'query': { + 'bool': { + 'must': [{ + 'match': { + 'name.default': { + 'query': 'test', + 'boost': 1, + 'analyzer': 'peliasOneEdgeGram' + } + } + }], + 'should': [{ + 'match': { + 'phrase.default': { + 'query': 'test', + 'analyzer': 'peliasPhrase', + 'type': 'phrase', + 'boost': 1, + 'slop': 2 + } + } + },{ + 'function_score': { + 'query': { + 'match': { + 'phrase.default': { + 'query': 'test', + 'analyzer': 'peliasPhrase', + 'type': 'phrase', + 'slop': 2, + 'boost': 1 + } + } + }, + 'max_boost': 20, + 'score_mode': 'first', + 'boost_mode': 'replace', + 'functions': [{ + 'field_value_factor': { + 'modifier': 'log1p', + 'field': 'popularity', + 'missing': 1 + }, + 'weight': 1 + }] + } + },{ + 'function_score': { + 'query': { + 'match': { + 'phrase.default': { + 'query': 'test', + 'analyzer': 'peliasPhrase', + 'type': 'phrase', + 'slop': 2, + 'boost': 1 + } + } + }, + 'max_boost': 20, + 'score_mode': 'first', + 'boost_mode': 'replace', + 'functions': [{ + 'field_value_factor': { + 'modifier': 'log1p', + 'field': 'population', + 'missing': 1 + }, + 'weight': 2 + }] + } + }] + } + }, + 'filter': { + 'bool': { + 'must': [{ + 'terms': { + 'source': ['test_source'] + } + }] + } + } + } + }, + 'sort': [ '_score' ], + 'size': 20, + 'track_scores': true +}; diff --git a/test/unit/helper/geojsonify.js b/test/unit/helper/geojsonify.js index 51704170..7297b67d 100644 --- a/test/unit/helper/geojsonify.js +++ b/test/unit/helper/geojsonify.js @@ -17,6 +17,8 @@ module.exports.tests.earth = function(test, common) { var earth = [{ '_type': 'geoname', '_id': '6295630', + 'source': 'whosonfirst', + 'layer': 'continent', 'name': { 'default': 'Earth' }, @@ -245,7 +247,7 @@ module.exports.tests.search = function(test, common) { 'country': [ 'United States' ], - 'country_id': [ + 'country_gid': [ '85633793' ], 'country_a': [ @@ -254,7 +256,7 @@ module.exports.tests.search = function(test, common) { 'macroregion': [ 'MacroRegion Name' ], - 'macroregion_id': [ + 'macroregion_gid': [ 'MacroRegion Id' ], 'macroregion_a': [ @@ -263,7 +265,7 @@ module.exports.tests.search = function(test, common) { 'region': [ 'New York' ], - 'region_id': [ + 'region_gid': [ '85688543' ], 'region_a': [ @@ -272,7 +274,7 @@ module.exports.tests.search = function(test, common) { 'macrocounty': [ 'MacroCounty Name' ], - 'macrocounty_id': [ + 'macrocounty_gid': [ 'MacroCounty Id' ], 'macrocounty_a': [ @@ -281,7 +283,7 @@ module.exports.tests.search = function(test, common) { 'county': [ 'Kings County' ], - 'county_id': [ + 'county_gid': [ '102082361' ], 'county_a': [ @@ -290,20 +292,20 @@ module.exports.tests.search = function(test, common) { 'borough': [ 'Brooklyn' ], - 'borough_id': [ + 'localadmin_gid': [ '404521211' ], 'borough_a': [ null ], - 'locality_id': [ + 'locality_gid': [ '85977539' ], 'locality_a': [ null ], 'neighbourhood': [], - 'neighbourhood_id': [] + 'neighbourhood_gid': [] } ]; @@ -321,31 +323,26 @@ module.exports.tests.search = function(test, common) { 'name': 'East New York', 'confidence': 0.888, 'country': 'United States', - 'country_id': '85633793', + 'country_gid': '85633793', 'country_a': 'USA', 'macroregion': 'MacroRegion Name', - 'macroregion_id': 'MacroRegion Id', + 'macroregion_gid': 'MacroRegion Id', 'macroregion_a': 'MacroRegion Abbreviation', 'region': 'New York', - 'region_id': '85688543', + 'region_gid': '85688543', 'region_a': 'NY', 'macrocounty': 'MacroCounty Name', - 'macrocounty_id': 'MacroCounty Id', + 'macrocounty_gid': 'MacroCounty Id', 'macrocounty_a': 'MacroCounty Abbreviation', 'county': 'Kings County', - 'county_id': '102082361', 'borough': 'Brooklyn', - 'borough_id': '404521211', + 'county_gid': '102082361', + 'localadmin_gid': '404521211', 'locality': 'New York', - 'locality_id': '85977539', - 'bounding_box': { - 'min_lat': 40.6514712164, - 'max_lat': 40.6737320588, - 'min_lon': -73.8967895508, - 'max_lon': -73.8665771484 - }, + 'locality_gid': '85977539', 'label': 'East New York, Brooklyn, New York, NY, United States' }, + 'bbox': [-73.8967895508,40.6514712164,-73.8665771484,40.6737320588], 'geometry': { 'type': 'Point', 'coordinates': [ diff --git a/test/unit/helper/labelSchema.js b/test/unit/helper/labelSchema.js index 9b0a35f3..117fc42c 100644 --- a/test/unit/helper/labelSchema.js +++ b/test/unit/helper/labelSchema.js @@ -1,4 +1,3 @@ - var schemas = require('../../../helper/labelSchema'); var alpha3 = require('../mock/alpha3.json'); diff --git a/test/unit/middleware/localNamingConventions.js b/test/unit/middleware/localNamingConventions.js index c544abe7..1d4102f0 100644 --- a/test/unit/middleware/localNamingConventions.js +++ b/test/unit/middleware/localNamingConventions.js @@ -77,8 +77,21 @@ module.exports.tests.flipNumberAndStreet = function(test, common) { } }; + var unknownCountryAddress = { + '_id': 'test4', + '_type': 'test', + 'name': { 'default': '123 Main Street' }, + 'center_point': { 'lon': 30.1, 'lat': -50 }, + 'address_parts': { + 'number': '123', + 'street': 'Main Street' + }, + 'parent': { + } + }; + var req = {}, - res = { data: [ ukAddress, deAddress, nlAddress ] }, + res = { data: [ ukAddress, deAddress, nlAddress, unknownCountryAddress ] }, middleware = localNamingConventions(); test('flipNumberAndStreet', function(t) { @@ -96,6 +109,10 @@ module.exports.tests.flipNumberAndStreet = function(test, common) { // this definition comes from pelias configuration t.equal( res.data[2].name.default, 'Keizersgracht 117', 'flipped name' ); + // addresses without a known country (either due to missing data or admin lookup + // being disabled), don't have the name flipped + t.equal( res.data[3].name.default, '123 Main Street', 'standard name'); + t.end(); }); }); diff --git a/test/unit/middleware/normalizeParentIds.js b/test/unit/middleware/normalizeParentIds.js new file mode 100644 index 00000000..472df0e3 --- /dev/null +++ b/test/unit/middleware/normalizeParentIds.js @@ -0,0 +1,91 @@ +var normalizer = require('../../../middleware/normalizeParentIds')(); + +module.exports.tests = {}; + +module.exports.tests.interface = function(test, common) { + test('WOF ids converted to Pelias ids', function(t) { + + var input = { + data: [{ + 'parent': { + 'country': ['United States'], // these shouldn't change + 'country_id': ['85633793'], + 'country_a': ['USA'] + }, + 'country': ['United States'], + 'country_gid': ['85633793'], + 'country_a': ['USA'], + 'macroregion': ['MacroRegion Name'], + 'macroregion_gid': ['foobar'], + 'macroregion_a': ['MacroRegion Abbreviation'], + 'region': ['New York'], + 'region_gid': ['85688543'], + 'region_a': ['NY'], + 'macrocounty': ['MacroCounty Name'], + 'macrocounty_gid': ['~~~~~'], + 'macrocounty_a': ['MacroCounty Abbreviation'], + 'county': ['Kings County'], + 'county_gid': ['102082361'], + 'county_a': [null], + 'localadmin': ['Brooklyn'], + 'localadmin_gid': ['404521211'], + 'localadmin_a': [null], + 'locality': ['Some Locality'], + 'locality_gid': ['85977539'], + 'locality_a': [null], + 'neighbourhood': [], + 'neighbourhood_gid': [] + }] + }; + + var expected = { + data: [{ + 'parent': { + 'country': ['United States'], + 'country_id': ['85633793'], + 'country_a': ['USA'] + }, + 'country': ['United States'], + 'country_gid': ['whosonfirst:country:85633793'], + 'country_a': ['USA'], + 'macroregion': ['MacroRegion Name'], + 'macroregion_gid': ['whosonfirst:macroregion:foobar'], + 'macroregion_a': ['MacroRegion Abbreviation'], + 'region': ['New York'], + 'region_gid': ['whosonfirst:region:85688543'], + 'region_a': ['NY'], + 'macrocounty': ['MacroCounty Name'], + 'macrocounty_gid': ['whosonfirst:macrocounty:~~~~~'], + 'macrocounty_a': ['MacroCounty Abbreviation'], + 'county': ['Kings County'], + 'county_gid': ['whosonfirst:county:102082361'], + 'county_a': [null], + 'localadmin': ['Brooklyn'], + 'localadmin_gid': ['whosonfirst:localadmin:404521211'], + 'localadmin_a': [null], + 'locality': ['Some Locality'], + 'locality_gid': ['whosonfirst:locality:85977539'], + 'locality_a': [null], + 'neighbourhood': [], + 'neighbourhood_gid': [] + }] + }; + + normalizer({}, input, function () { + t.deepEqual(input, expected); + t.end(); + }); + + }); +}; + +module.exports.all = function (tape, common) { + + function test(name, testFunction) { + return tape('[middleware] normalizeParentIds: ' + name, testFunction); + } + + for( var testCase in module.exports.tests ){ + module.exports.tests[testCase](test, common); + } +}; diff --git a/test/unit/query/autocomplete.js b/test/unit/query/autocomplete.js index dc973ddc..0e09457b 100644 --- a/test/unit/query/autocomplete.js +++ b/test/unit/query/autocomplete.js @@ -95,6 +95,19 @@ module.exports.tests.query = function(test, common) { t.deepEqual(compiled, expected, 'valid autocomplete query'); t.end(); }); + + test('valid sources filter', function(t) { + var query = generate({ + 'text': 'test', + 'sources': ['test_source'] + }); + + var compiled = JSON.parse( JSON.stringify( query ) ); + var expected = require('../fixture/autocomplete_with_source_filtering'); + + t.deepEqual(compiled, expected, 'valid autocomplete query with source filtering'); + t.end(); + }); }; module.exports.all = function (tape, common) { diff --git a/test/unit/query/search.js b/test/unit/query/search.js index f20122bb..e503311b 100644 --- a/test/unit/query/search.js +++ b/test/unit/query/search.js @@ -182,6 +182,19 @@ module.exports.tests.query = function(test, common) { t.end(); }); + test('valid sources filter', function(t) { + var query = generate({ + 'text': 'test', + 'sources': ['test_source'] + }); + + var compiled = JSON.parse( JSON.stringify( query ) ); + var expected = require('../fixture/search_with_source_filtering'); + + t.deepEqual(compiled, expected, 'valid search query with source filtering'); + t.end(); + }); + }; module.exports.all = function (tape, common) { diff --git a/test/unit/run.js b/test/unit/run.js index 5f2ad49b..6f64f2e6 100644 --- a/test/unit/run.js +++ b/test/unit/run.js @@ -28,6 +28,7 @@ var tests = [ require('./middleware/localNamingConventions'), require('./middleware/dedupe'), require('./middleware/parseBBox'), + require('./middleware/normalizeParentIds'), require('./query/autocomplete'), require('./query/autocomplete_defaults'), require('./query/search_defaults'), diff --git a/test/unit/sanitiser/_size.js b/test/unit/sanitiser/_size.js index e4e61ac9..67c8bd84 100644 --- a/test/unit/sanitiser/_size.js +++ b/test/unit/sanitiser/_size.js @@ -6,7 +6,7 @@ module.exports.tests.sanitize_size = function(test, common) { test('size=0', function(t) { var raw = { size: 0 }; var clean = {}; - var res = sanitize(raw, clean); + var res = sanitize(/*defaults*/)(raw, clean); t.equal(res.errors.length, 0, 'should return no errors'); t.equal(res.warnings.length, 1, 'should return warning'); t.equal(res.warnings[0], 'out-of-range integer \'size\', using MIN_SIZE', 'check warning text'); @@ -17,7 +17,7 @@ module.exports.tests.sanitize_size = function(test, common) { test('size=10000', function(t) { var raw = { size: 10000 }; var clean = {}; - var res = sanitize(raw, clean); + var res = sanitize(/*defaults*/)(raw, clean); t.equal(res.errors.length, 0, 'should return no errors'); t.equal(res.warnings.length, 1, 'should return warning'); t.equal(res.warnings[0], 'out-of-range integer \'size\', using MAX_SIZE', 'check warning text'); @@ -28,7 +28,7 @@ module.exports.tests.sanitize_size = function(test, common) { test('size not set', function(t) { var raw = {}; var clean = {}; - var res = sanitize(raw, clean); + var res = sanitize(/*defaults*/)(raw, clean); t.equal(res.errors.length, 0, 'should return no errors'); t.equal(res.warnings.length, 0, 'should return no warning'); t.equal(clean.size, 10, 'default to 10'); @@ -41,7 +41,7 @@ module.exports.tests.sanitize_size = function(test, common) { test('size=' + size, function (t) { var raw = {size: size}; var clean = {}; - var res = sanitize(raw, clean); + var res = sanitize(/*defaults*/)(raw, clean); t.equal(res.errors.length, 0, 'should return no errors'); t.equal(res.warnings.length, 0, 'should return warning'); t.equal(clean.size, 5, 'set to correct integer'); diff --git a/test/unit/sanitiser/autocomplete.js b/test/unit/sanitiser/autocomplete.js index 4044d7f1..26bf9afb 100644 --- a/test/unit/sanitiser/autocomplete.js +++ b/test/unit/sanitiser/autocomplete.js @@ -4,7 +4,7 @@ module.exports.tests = {}; module.exports.tests.sanitisers = function(test, common) { test('check sanitiser list', function (t) { - var expected = ['singleScalarParameters', 'text', 'size', 'private', 'geo_autocomplete' ]; + var expected = ['singleScalarParameters', 'text', 'size', 'layers', 'sources', 'sources_and_layers', 'private', 'geo_autocomplete' ]; t.deepEqual(Object.keys(autocomplete.sanitiser_list), expected); t.end(); });