mirror of https://github.com/pelias/api.git
Stephen Hess
9 years ago
38 changed files with 1041 additions and 108 deletions
@ -0,0 +1,5 @@
|
||||
#!/bin/bash |
||||
|
||||
set -euo pipefail |
||||
|
||||
node test/unit/run.js | ./node_modules/.bin/tap-dot |
@ -0,0 +1,11 @@
|
||||
module.exports = [ |
||||
'country', |
||||
'macroregion', |
||||
'region', |
||||
'macrocounty', |
||||
'county', |
||||
'localadmin', |
||||
'locality', |
||||
'borough', |
||||
'neighbourhood' |
||||
]; |
@ -0,0 +1,56 @@
|
||||
var logger = require('pelias-logger').get('api'); |
||||
var Document = require('pelias-model').Document; |
||||
|
||||
var placeTypes = require('../helper/placeTypes'); |
||||
|
||||
/** |
||||
* Convert WOF integer ids to Pelias formatted ids that can be used by the /place endpoint. |
||||
* This should probably be moved to the import pipeline once we are happy with the way this works. |
||||
*/ |
||||
|
||||
function setup() { |
||||
return function (req, res, next) { |
||||
// do nothing if no result data set
|
||||
if (!res || !res.data) { |
||||
return next(); |
||||
} |
||||
|
||||
res.data = res.data.map(normalizeParentIds); |
||||
|
||||
next(); |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Update all parent ids in the admin hierarchy |
||||
* |
||||
* @param {object} place |
||||
* @return {object} |
||||
*/ |
||||
function normalizeParentIds(place) { |
||||
|
||||
if (place) { |
||||
placeTypes.forEach(function (placeType) { |
||||
if (place[placeType] && place[placeType].length > 0 && place[placeType][0]) { |
||||
place[placeType + '_gid'] = [ makeNewId(placeType, place[placeType + '_gid']) ]; |
||||
} |
||||
}); |
||||
} |
||||
|
||||
return place; |
||||
} |
||||
|
||||
/** |
||||
* Generate a valid Pelias ids from placetype and WOF id. |
||||
* Assumes all of the incoming ids are WOF ids. |
||||
* |
||||
* @param {string} placeType |
||||
* @param {number} id |
||||
* @return {string} |
||||
*/ |
||||
function makeNewId(placeType, id) { |
||||
var doc = new Document('whosonfirst', placeType, id); |
||||
return doc.getGid(); |
||||
} |
||||
|
||||
module.exports = setup; |
@ -0,0 +1,34 @@
|
||||
|
||||
#> layer alias |
||||
path: '/v1/autocomplete?text=a&layers=address' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 200 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.not.exist json.geocoding.errors |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
json.geocoding.query.layers.should.eql ["address"] |
@ -0,0 +1,46 @@
|
||||
|
||||
#> layer alias |
||||
path: '/v1/autocomplete?text=a&layers=coarse' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 200 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.not.exist json.geocoding.errors |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
json.geocoding.query.layers.should.eql [ "continent", |
||||
"macrocountry", |
||||
"country", |
||||
"dependency", |
||||
"region", |
||||
"locality", |
||||
"localadmin", |
||||
"county", |
||||
"macrohood", |
||||
"neighbourhood", |
||||
"microhood", |
||||
"disputed" |
||||
] |
@ -0,0 +1,34 @@
|
||||
|
||||
#> layer alias |
||||
path: '/v1/autocomplete?text=a&layers=notlayer' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 400 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.exist json.geocoding.errors |
||||
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,country,region,county,locality,continent,macrocountry,dependency,localadmin,macrohood,neighbourhood,microhood,disputed' ] |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
@ -0,0 +1,35 @@
|
||||
|
||||
#> layer alias |
||||
path: '/v1/autocomplete?text=a&layers=country,notlayer' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 400 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.exist json.geocoding.errors |
||||
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,country,region,county,locality,continent,macrocountry,dependency,localadmin,macrohood,neighbourhood,microhood,disputed' ] |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
should.not.exist json.geocoding.query['layers'] |
@ -0,0 +1,34 @@
|
||||
|
||||
#> layer alias |
||||
path: '/v1/autocomplete?text=a&layers=country,region' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 200 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.not.exist json.geocoding.errors |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
json.geocoding.query.layers.should.eql ["country","region"] |
@ -0,0 +1,34 @@
|
||||
|
||||
#> layer alias |
||||
path: '/v1/autocomplete?text=a&layers=country' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 200 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.not.exist json.geocoding.errors |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
json.geocoding.query.layers.should.eql ["country"] |
@ -0,0 +1,34 @@
|
||||
|
||||
#> set size (autocomplete does not allow size to be changed) |
||||
path: '/v1/autocomplete?text=a&size=3' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 200 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.not.exist json.geocoding.errors |
||||
|
||||
#? expected warnings |
||||
should.exist json.geocoding.warnings |
||||
json.geocoding.warnings.should.eql [ 'out-of-range integer \'size\', using MIN_SIZE' ] |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 # should remain the default size |
@ -0,0 +1,34 @@
|
||||
|
||||
#> sources filter |
||||
path: '/v1/autocomplete?text=a&sources=openstreetmap,notasource' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 400 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.exist json.geocoding.errors |
||||
json.geocoding.errors.should.eql [ '\'notasource\' is an invalid sources parameter. Valid options: osm,oa,gn,wof,openstreetmap,openaddresses,geonames,whosonfirst' ] |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
@ -0,0 +1,37 @@
|
||||
|
||||
#> sources and layers specified (invalid combo) |
||||
path: '/v1/autocomplete?text=a&sources=whosonfirst&layers=address' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 400 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.exist json.geocoding.errors |
||||
json.geocoding.errors.should.eql [ 'You have specified both the `sources` and `layers` parameters in a combination that will return no results: the whosonfirst source has nothing in the address layer' ] |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
json.geocoding.query.layers.should.eql ["address"] |
||||
json.geocoding.query.sources.should.eql ["whosonfirst"] |
||||
should.not.exist json.geocoding.query['type'] |
@ -0,0 +1,35 @@
|
||||
|
||||
#> sources and layers specified |
||||
path: '/v1/autocomplete?text=a&sources=openaddresses&layers=address' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 200 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.not.exist json.geocoding.errors |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
json.geocoding.query.layers.should.eql ["address"] |
||||
json.geocoding.query.sources.should.eql ["openaddresses"] |
@ -0,0 +1,34 @@
|
||||
|
||||
#> sources filter |
||||
path: "/v1/autocomplete?text=a&sources=openstreetmap,geonames" |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 200 |
||||
response.should.have.header "charset", 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.not.exist json.geocoding.errors |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
json.geocoding.query.sources.should.eql ["openstreetmap", "geonames"] |
@ -0,0 +1,34 @@
|
||||
|
||||
#> sources filter |
||||
path: '/v1/autocomplete?text=a&sources=openstreetmap' |
||||
|
||||
#? 200 ok |
||||
response.statusCode.should.be.equal 200 |
||||
response.should.have.header 'charset', 'utf8' |
||||
response.should.have.header 'content-type', 'application/json; charset=utf-8' |
||||
|
||||
#? valid geocoding block |
||||
should.exist json.geocoding |
||||
should.exist json.geocoding.version |
||||
should.exist json.geocoding.attribution |
||||
should.exist json.geocoding.query |
||||
should.exist json.geocoding.engine |
||||
should.exist json.geocoding.engine.name |
||||
should.exist json.geocoding.engine.author |
||||
should.exist json.geocoding.engine.version |
||||
should.exist json.geocoding.timestamp |
||||
|
||||
#? valid geojson |
||||
json.type.should.be.equal 'FeatureCollection' |
||||
json.features.should.be.instanceof Array |
||||
|
||||
#? expected errors |
||||
should.not.exist json.geocoding.errors |
||||
|
||||
#? expected warnings |
||||
should.not.exist json.geocoding.warnings |
||||
|
||||
#? inputs |
||||
json.geocoding.query['text'].should.eql 'a' |
||||
json.geocoding.query['size'].should.eql 10 |
||||
json.geocoding.query.sources.should.eql ["openstreetmap"] |
@ -0,0 +1,58 @@
|
||||
|
||||
/** |
||||
Test data required by the ciao test suite. |
||||
|
||||
Some tests will fail when run against an empty index, you can use this script |
||||
to insert some dummy data in to your index before running the tests. |
||||
|
||||
note: as this is dummy data, care should be taken in order to make sure these |
||||
documents don't end up in your production index; for that reason the HTTP port |
||||
has been hard-coded as port:9200. |
||||
**/ |
||||
|
||||
// we use the default config to avoid making calls to
|
||||
// a cluster running on a non-standard port.
|
||||
var client = require('elasticsearch').Client(), |
||||
async = require('async'), |
||||
actions = []; |
||||
|
||||
// add one record per 'type' in order to cause the _default_ mapping
|
||||
// to be copied when the new type is created.
|
||||
var types = ['venue','address','county','region','county','country','admin0','admin1','admin2'], |
||||
sources = ['test'], |
||||
layers = ['geonames']; |
||||
|
||||
// iterate over all types/sources/layers and index a test document
|
||||
types.forEach( function( type, i1 ){ |
||||
sources.forEach( function( source, i2 ){ |
||||
layers.forEach( function( layer, i3 ){ |
||||
actions.push( function( done ){ |
||||
client.index({ |
||||
index: 'pelias', |
||||
type: type, |
||||
id: [i1,i2,i3].join(':'), |
||||
body: { |
||||
source: source, |
||||
layer: layer, |
||||
name: { default: 'test' }, |
||||
phrase: { default: 'test' }, |
||||
parent: { |
||||
country_a: ['USA'] |
||||
} |
||||
} |
||||
}); |
||||
done(); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
// call refresh so the index merges the changes
|
||||
actions.push( function( done ){ |
||||
client.indices.refresh( { index: 'pelias' }, done); |
||||
}); |
||||
|
||||
// perform all actions in series
|
||||
async.series( actions, function( err, resp ){ |
||||
console.log('test data inported'); |
||||
}); |
@ -0,0 +1,85 @@
|
||||
|
||||
module.exports = { |
||||
'query': { |
||||
'filtered': { |
||||
'query': { |
||||
'bool': { |
||||
'must': [{ |
||||
'match': { |
||||
'name.default': { |
||||
'analyzer': 'peliasPhrase', |
||||
'boost': 100, |
||||
'query': 'test', |
||||
'type': 'phrase', |
||||
'operator': 'and' |
||||
} |
||||
} |
||||
}], |
||||
'should':[{ |
||||
'function_score': { |
||||
'query': { |
||||
'match': { |
||||
'name.default': { |
||||
'analyzer': 'peliasPhrase', |
||||
'boost': 100, |
||||
'query': 'test', |
||||
'type': 'phrase', |
||||
'operator': 'and' |
||||
} |
||||
} |
||||
}, |
||||
'max_boost': 20, |
||||
'score_mode': 'first', |
||||
'boost_mode': 'replace', |
||||
'functions': [{ |
||||
'field_value_factor': { |
||||
'modifier': 'log1p', |
||||
'field': 'popularity', |
||||
'missing': 1 |
||||
}, |
||||
'weight': 1 |
||||
}] |
||||
} |
||||
},{ |
||||
'function_score': { |
||||
'query': { |
||||
'match': { |
||||
'name.default': { |
||||
'analyzer': 'peliasPhrase', |
||||
'boost': 100, |
||||
'query': 'test', |
||||
'type': 'phrase', |
||||
'operator': 'and' |
||||
} |
||||
} |
||||
}, |
||||
'max_boost': 20, |
||||
'score_mode': 'first', |
||||
'boost_mode': 'replace', |
||||
'functions': [{ |
||||
'field_value_factor': { |
||||
'modifier': 'log1p', |
||||
'field': 'population', |
||||
'missing': 1 |
||||
}, |
||||
'weight': 3 |
||||
}] |
||||
} |
||||
}] |
||||
} |
||||
}, |
||||
'filter': { |
||||
'bool': { |
||||
'must': [{ |
||||
'terms': { |
||||
'source': ['test_source'] |
||||
} |
||||
}] |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
'sort': [ '_score' ], |
||||
'size': 20, |
||||
'track_scores': true |
||||
}; |
@ -0,0 +1,93 @@
|
||||
|
||||
module.exports = { |
||||
'query': { |
||||
'filtered': { |
||||
'query': { |
||||
'bool': { |
||||
'must': [{ |
||||
'match': { |
||||
'name.default': { |
||||
'query': 'test', |
||||
'boost': 1, |
||||
'analyzer': 'peliasOneEdgeGram' |
||||
} |
||||
} |
||||
}], |
||||
'should': [{ |
||||
'match': { |
||||
'phrase.default': { |
||||
'query': 'test', |
||||
'analyzer': 'peliasPhrase', |
||||
'type': 'phrase', |
||||
'boost': 1, |
||||
'slop': 2 |
||||
} |
||||
} |
||||
},{ |
||||
'function_score': { |
||||
'query': { |
||||
'match': { |
||||
'phrase.default': { |
||||
'query': 'test', |
||||
'analyzer': 'peliasPhrase', |
||||
'type': 'phrase', |
||||
'slop': 2, |
||||
'boost': 1 |
||||
} |
||||
} |
||||
}, |
||||
'max_boost': 20, |
||||
'score_mode': 'first', |
||||
'boost_mode': 'replace', |
||||
'functions': [{ |
||||
'field_value_factor': { |
||||
'modifier': 'log1p', |
||||
'field': 'popularity', |
||||
'missing': 1 |
||||
}, |
||||
'weight': 1 |
||||
}] |
||||
} |
||||
},{ |
||||
'function_score': { |
||||
'query': { |
||||
'match': { |
||||
'phrase.default': { |
||||
'query': 'test', |
||||
'analyzer': 'peliasPhrase', |
||||
'type': 'phrase', |
||||
'slop': 2, |
||||
'boost': 1 |
||||
} |
||||
} |
||||
}, |
||||
'max_boost': 20, |
||||
'score_mode': 'first', |
||||
'boost_mode': 'replace', |
||||
'functions': [{ |
||||
'field_value_factor': { |
||||
'modifier': 'log1p', |
||||
'field': 'population', |
||||
'missing': 1 |
||||
}, |
||||
'weight': 2 |
||||
}] |
||||
} |
||||
}] |
||||
} |
||||
}, |
||||
'filter': { |
||||
'bool': { |
||||
'must': [{ |
||||
'terms': { |
||||
'source': ['test_source'] |
||||
} |
||||
}] |
||||
} |
||||
} |
||||
} |
||||
}, |
||||
'sort': [ '_score' ], |
||||
'size': 20, |
||||
'track_scores': true |
||||
}; |
@ -0,0 +1,91 @@
|
||||
var normalizer = require('../../../middleware/normalizeParentIds')(); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = function(test, common) { |
||||
test('WOF ids converted to Pelias ids', function(t) { |
||||
|
||||
var input = { |
||||
data: [{ |
||||
'parent': { |
||||
'country': ['United States'], // these shouldn't change
|
||||
'country_id': ['85633793'], |
||||
'country_a': ['USA'] |
||||
}, |
||||
'country': ['United States'], |
||||
'country_gid': ['85633793'], |
||||
'country_a': ['USA'], |
||||
'macroregion': ['MacroRegion Name'], |
||||
'macroregion_gid': ['foobar'], |
||||
'macroregion_a': ['MacroRegion Abbreviation'], |
||||
'region': ['New York'], |
||||
'region_gid': ['85688543'], |
||||
'region_a': ['NY'], |
||||
'macrocounty': ['MacroCounty Name'], |
||||
'macrocounty_gid': ['~~~~~'], |
||||
'macrocounty_a': ['MacroCounty Abbreviation'], |
||||
'county': ['Kings County'], |
||||
'county_gid': ['102082361'], |
||||
'county_a': [null], |
||||
'localadmin': ['Brooklyn'], |
||||
'localadmin_gid': ['404521211'], |
||||
'localadmin_a': [null], |
||||
'locality': ['Some Locality'], |
||||
'locality_gid': ['85977539'], |
||||
'locality_a': [null], |
||||
'neighbourhood': [], |
||||
'neighbourhood_gid': [] |
||||
}] |
||||
}; |
||||
|
||||
var expected = { |
||||
data: [{ |
||||
'parent': { |
||||
'country': ['United States'], |
||||
'country_id': ['85633793'], |
||||
'country_a': ['USA'] |
||||
}, |
||||
'country': ['United States'], |
||||
'country_gid': ['whosonfirst:country:85633793'], |
||||
'country_a': ['USA'], |
||||
'macroregion': ['MacroRegion Name'], |
||||
'macroregion_gid': ['whosonfirst:macroregion:foobar'], |
||||
'macroregion_a': ['MacroRegion Abbreviation'], |
||||
'region': ['New York'], |
||||
'region_gid': ['whosonfirst:region:85688543'], |
||||
'region_a': ['NY'], |
||||
'macrocounty': ['MacroCounty Name'], |
||||
'macrocounty_gid': ['whosonfirst:macrocounty:~~~~~'], |
||||
'macrocounty_a': ['MacroCounty Abbreviation'], |
||||
'county': ['Kings County'], |
||||
'county_gid': ['whosonfirst:county:102082361'], |
||||
'county_a': [null], |
||||
'localadmin': ['Brooklyn'], |
||||
'localadmin_gid': ['whosonfirst:localadmin:404521211'], |
||||
'localadmin_a': [null], |
||||
'locality': ['Some Locality'], |
||||
'locality_gid': ['whosonfirst:locality:85977539'], |
||||
'locality_a': [null], |
||||
'neighbourhood': [], |
||||
'neighbourhood_gid': [] |
||||
}] |
||||
}; |
||||
|
||||
normalizer({}, input, function () { |
||||
t.deepEqual(input, expected); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
}; |
||||
|
||||
module.exports.all = function (tape, common) { |
||||
|
||||
function test(name, testFunction) { |
||||
return tape('[middleware] normalizeParentIds: ' + name, testFunction); |
||||
} |
||||
|
||||
for( var testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
Loading…
Reference in new issue