mirror of https://github.com/pelias/api.git
Stephen K Hess
8 years ago
committed by
GitHub
38 changed files with 3294 additions and 485 deletions
@ -0,0 +1,31 @@
|
||||
const text_analyzer = require('pelias-text-analyzer'); |
||||
const _ = require('lodash'); |
||||
const iso3166 = require('iso3166-1'); |
||||
|
||||
function setup(should_execute) { |
||||
function controller( req, res, next ){ |
||||
// bail early if req/res don't pass conditions for execution
|
||||
if (!should_execute(req, res)) { |
||||
return next(); |
||||
} |
||||
|
||||
// parse text with query parser
|
||||
const parsed_text = text_analyzer.parse(req.clean.text); |
||||
|
||||
if (parsed_text !== undefined) { |
||||
// if a known ISO2 country was parsed, convert it to ISO3
|
||||
if (_.has(parsed_text, 'country') && iso3166.is2(_.toUpper(parsed_text.country))) { |
||||
parsed_text.country = iso3166.to3(_.toUpper(parsed_text.country)); |
||||
} |
||||
|
||||
req.clean.parsed_text = parsed_text; |
||||
} |
||||
|
||||
return next(); |
||||
|
||||
} |
||||
|
||||
return controller; |
||||
} |
||||
|
||||
module.exports = setup; |
@ -0,0 +1,18 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// return true if any setup parameter is a key of request.clean.parsed_text
|
||||
// "arguments" is only available in long-form function declarations, cannot be shortened to fat arrow syntax
|
||||
// potential improvement: inject set operator to allow for any/all functionality
|
||||
module.exports = function() { |
||||
// save off requested properties since arguments can't be referenced later
|
||||
const properties = _.values(arguments); |
||||
|
||||
// return true if any of the supplied properties are in clean.parsed_text
|
||||
return (request, response) => !_.isEmpty( |
||||
_.intersection( |
||||
properties, |
||||
_.keys(_.get(request, ['clean', 'parsed_text'], {})) |
||||
) |
||||
); |
||||
|
||||
}; |
@ -0,0 +1,4 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// returns true IFF req.clean has a key with the supplied name
|
||||
module.exports = (parameter) => (req, res) => (_.has(req, ['clean', parameter])); |
@ -0,0 +1,4 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// returns true IFF req.clean.parser is addressit
|
||||
module.exports = (req, res) => (_.get(req, 'clean.parser') === 'addressit'); |
@ -0,0 +1,7 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// return true IFF req.clean.layers is empty OR there are non-venue/address/street layers
|
||||
module.exports = (req, res) => ( |
||||
!_.isEmpty(_.get(req, 'clean.layers', [])) && |
||||
_.isEmpty(_.difference(req.clean.layers, ['venue', 'address', 'street'])) |
||||
); |
@ -0,0 +1,9 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// returns true IFF 'whosonfirst' is the only requested source
|
||||
module.exports = (req, res) => ( |
||||
_.isEqual( |
||||
_.get(req, 'clean.sources', []), |
||||
['whosonfirst'] |
||||
) |
||||
); |
@ -0,0 +1,113 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
const searchService = require('../service/search'); |
||||
const logger = require('pelias-logger').get('api'); |
||||
const logging = require( '../helper/logging' ); |
||||
const retry = require('retry'); |
||||
|
||||
function isRequestTimeout(err) { |
||||
return _.get(err, 'status') === 408; |
||||
} |
||||
|
||||
function setup( apiConfig, esclient, query, should_execute ){ |
||||
function controller( req, res, next ){ |
||||
if (!should_execute(req, res)) { |
||||
return next(); |
||||
} |
||||
|
||||
const cleanOutput = _.cloneDeep(req.clean); |
||||
if (logging.isDNT(req)) { |
||||
logging.removeFields(cleanOutput); |
||||
} |
||||
// log clean parameters for stats
|
||||
logger.info('[req]', `endpoint=${req.path}`, cleanOutput); |
||||
|
||||
const renderedQuery = query(req.clean, res); |
||||
|
||||
// if there's no query to call ES with, skip the service
|
||||
if (_.isUndefined(renderedQuery)) { |
||||
return next(); |
||||
} |
||||
|
||||
// options for retry
|
||||
// maxRetries is from the API config with default of 3
|
||||
// factor of 1 means that each retry attempt will esclient requestTimeout
|
||||
const operationOptions = { |
||||
retries: _.get(apiConfig, 'requestRetries', 3), |
||||
factor: 1, |
||||
minTimeout: _.get(esclient, 'transport.requestTimeout') |
||||
}; |
||||
|
||||
// setup a new operation
|
||||
const operation = retry.operation(operationOptions); |
||||
|
||||
// elasticsearch command
|
||||
const cmd = { |
||||
index: apiConfig.indexName, |
||||
searchType: 'dfs_query_then_fetch', |
||||
body: renderedQuery.body |
||||
}; |
||||
|
||||
logger.debug( '[ES req]', cmd ); |
||||
|
||||
operation.attempt((currentAttempt) => { |
||||
// query elasticsearch
|
||||
searchService( esclient, cmd, function( err, docs, meta ){ |
||||
// returns true if the operation should be attempted again
|
||||
// (handles bookkeeping of maxRetries)
|
||||
// only consider for status 408 (request timeout)
|
||||
if (isRequestTimeout(err) && operation.retry(err)) { |
||||
logger.info(`request timed out on attempt ${currentAttempt}, retrying`); |
||||
return; |
||||
} |
||||
|
||||
// if execution has gotten this far then one of three things happened:
|
||||
// - the request didn't time out
|
||||
// - maxRetries has been hit so we're giving up
|
||||
// - another error occurred
|
||||
// in either case, handle the error or results
|
||||
|
||||
// error handler
|
||||
if( err ){ |
||||
// push err.message or err onto req.errors
|
||||
req.errors.push( _.get(err, 'message', err)); |
||||
} |
||||
else { |
||||
// log that a retry was successful
|
||||
// most requests succeed on first attempt so this declutters log files
|
||||
if (currentAttempt > 1) { |
||||
logger.info(`succeeded on retry ${currentAttempt-1}`); |
||||
} |
||||
|
||||
// because this is used in response to placeholder, there may already
|
||||
// be results. if there are no results from this ES call, don't overwrite
|
||||
// what's already there from placeholder.
|
||||
if (!_.isEmpty(docs)) { |
||||
res.data = docs; |
||||
res.meta = meta || {}; |
||||
// store the query_type for subsequent middleware
|
||||
res.meta.query_type = renderedQuery.type; |
||||
|
||||
const messageParts = [ |
||||
'[controller:search]', |
||||
`[queryType:${renderedQuery.type}]`, |
||||
`[es_result_count:${docs.length}]` |
||||
]; |
||||
|
||||
logger.info(messageParts.join(' ')); |
||||
|
||||
} |
||||
|
||||
} |
||||
logger.debug('[ES response]', docs); |
||||
next(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
} |
||||
|
||||
return controller; |
||||
} |
||||
|
||||
module.exports = setup; |
@ -0,0 +1,192 @@
|
||||
const peliasQuery = require('pelias-query'); |
||||
const defaults = require('./search_defaults'); |
||||
const logger = require('pelias-logger').get('api'); |
||||
const _ = require('lodash'); |
||||
const check = require('check-types'); |
||||
|
||||
//------------------------------
|
||||
// general-purpose search query
|
||||
//------------------------------
|
||||
const addressUsingIdsQuery = new peliasQuery.layout.AddressesUsingIdsQuery(); |
||||
|
||||
// scoring boost
|
||||
addressUsingIdsQuery.score( peliasQuery.view.focus_only_function( peliasQuery.view.phrase ) ); |
||||
// --------------------------------
|
||||
|
||||
// non-scoring hard filters
|
||||
addressUsingIdsQuery.filter( peliasQuery.view.boundary_country ); |
||||
addressUsingIdsQuery.filter( peliasQuery.view.boundary_circle ); |
||||
addressUsingIdsQuery.filter( peliasQuery.view.boundary_rect ); |
||||
addressUsingIdsQuery.filter( peliasQuery.view.sources ); |
||||
// --------------------------------
|
||||
|
||||
// This query is a departure from traditional Pelias queries where textual
|
||||
// names of admin areas were looked up. This query uses the ids returned by
|
||||
// placeholder for lookups which dramatically reduces the amount of information
|
||||
// that ES has to store and allows us to have placeholder handle altnames on
|
||||
// behalf of Pelias.
|
||||
//
|
||||
// For the happy path, an input like '30 West 26th Street, Manhattan' would result
|
||||
// in:
|
||||
// neighbourhood_id in []
|
||||
// borough_id in [421205771]
|
||||
// locality_id in [85945171, 85940551, 85972655]
|
||||
// localadmin_id in [404502889, 404499147, 404502891, 85972655]
|
||||
//
|
||||
// Where the ids are for all the various Manhattans. Each of those could
|
||||
// conceivably be the Manhattan that the user was referring to so so all must be
|
||||
// queried for at the same time.
|
||||
//
|
||||
// A counter example for this is '1 West Market Street, York, PA' where York, PA
|
||||
// can be interpreted as a locality OR county. From experience, when there's
|
||||
// ambiguity between locality and county for an input, the user is, with complete
|
||||
// metaphysical certitude, referring to the city. If they were referring to the
|
||||
// county, they would have entered 'York County, PA'. The point is that it's
|
||||
// insufficient to just query for all ids because, in this case, '1 West Market Street'
|
||||
// in other cities in York County, PA would be returned and would be both jarring
|
||||
// to the user and almost certainly leads to incorrect results. For example,
|
||||
// the following could be returned (all are towns in York County, PA):
|
||||
// - 1 West Market Street, Dallastown, PA
|
||||
// - 1 West Market Street, Fawn Grove, PA
|
||||
// - 1 West Market Street, Shrewsbury, PA
|
||||
// etc.
|
||||
//
|
||||
// To avoid this calamitous response, this query takes the approach of
|
||||
// "granularity bands". That is, if there are any ids in the first set of any
|
||||
// of these granularities:
|
||||
// - neighbourhood
|
||||
// - borough
|
||||
// - locality
|
||||
// - localadmin
|
||||
// - region
|
||||
// - macroregion
|
||||
// - dependency
|
||||
// - country
|
||||
//
|
||||
// then query for all ids in only those layers. Falling back, if there are
|
||||
// no ids in those layers, query for the county/macrocounty layers.
|
||||
//
|
||||
// This methodology ensures that no happened-to-match-on-county results are returned.
|
||||
//
|
||||
// The decision was made to include all other layers in one to solve the issue
|
||||
// where a country and city share a name, such as Mexico, which could be
|
||||
// interpreted as a country AND city (in Missouri). The data itself will sort
|
||||
// out which is correct. That is, it's unlikely that "11 Rock Springs Dr" exists
|
||||
// in Mexico the country due to naming conventions and would be filtered out
|
||||
// (though it could, but that's good because it's legitimate)
|
||||
|
||||
const granularity_bands = [ |
||||
['neighbourhood', 'borough', 'locality', 'localadmin', 'region', 'macroregion', 'dependency', 'country'], |
||||
['county', 'macrocounty'] |
||||
]; |
||||
|
||||
// returns IFF there are *any* results in the granularity band
|
||||
function anyResultsAtGranularityBand(results, band) { |
||||
return results.some(result => _.includes(band, result.layer)); |
||||
} |
||||
|
||||
// returns the ids of results at the requested layer
|
||||
function getIdsAtLayer(results, layer) { |
||||
return results.filter(result => result.layer === layer).map(_.property('source_id')); |
||||
} |
||||
|
||||
/** |
||||
map request variables to query variables for all inputs |
||||
provided by this HTTP request. This function operates on res.data which is the |
||||
Document-ified placeholder repsonse. |
||||
**/ |
||||
function generateQuery( clean, res ){ |
||||
const vs = new peliasQuery.Vars( defaults ); |
||||
const results = _.defaultTo(res.data, []); |
||||
|
||||
const logParts = ['query:address_search_using_ids', 'parser:libpostal']; |
||||
|
||||
// sources
|
||||
if( !_.isEmpty(clean.sources) ) { |
||||
vs.var( 'sources', clean.sources); |
||||
logParts.push('param:sources'); |
||||
} |
||||
|
||||
// size
|
||||
if( clean.querySize ) { |
||||
vs.var( 'size', clean.querySize ); |
||||
logParts.push('param:querySize'); |
||||
} |
||||
|
||||
if( ! _.isEmpty(clean.parsed_text.number) ){ |
||||
vs.var( 'input:housenumber', clean.parsed_text.number ); |
||||
} |
||||
vs.var( 'input:street', clean.parsed_text.street ); |
||||
|
||||
// find the first granularity band for which there are results
|
||||
const granularity_band = granularity_bands.find(band => anyResultsAtGranularityBand(results, band)); |
||||
|
||||
// if there's a granularity band, accumulate the ids from each layer in the band
|
||||
// into an object mapping layer->ids of those layers
|
||||
if (granularity_band) { |
||||
const layers_to_ids = granularity_band.reduce((acc, layer) => { |
||||
acc[layer] = getIdsAtLayer(res.data, layer); |
||||
return acc; |
||||
}, {}); |
||||
|
||||
// use an object here instead of calling `set` since that flattens out an
|
||||
// object into key/value pairs and makes identifying layers harder in query module
|
||||
vs.var('input:layers', layers_to_ids); |
||||
|
||||
} |
||||
|
||||
// focus point
|
||||
if( check.number(clean['focus.point.lat']) && |
||||
check.number(clean['focus.point.lon']) ){ |
||||
vs.set({ |
||||
'focus:point:lat': clean['focus.point.lat'], |
||||
'focus:point:lon': clean['focus.point.lon'] |
||||
}); |
||||
} |
||||
|
||||
// boundary rect
|
||||
if( check.number(clean['boundary.rect.min_lat']) && |
||||
check.number(clean['boundary.rect.max_lat']) && |
||||
check.number(clean['boundary.rect.min_lon']) && |
||||
check.number(clean['boundary.rect.max_lon']) ){ |
||||
vs.set({ |
||||
'boundary:rect:top': clean['boundary.rect.max_lat'], |
||||
'boundary:rect:right': clean['boundary.rect.max_lon'], |
||||
'boundary:rect:bottom': clean['boundary.rect.min_lat'], |
||||
'boundary:rect:left': clean['boundary.rect.min_lon'] |
||||
}); |
||||
} |
||||
|
||||
// boundary circle
|
||||
if( check.number(clean['boundary.circle.lat']) && |
||||
check.number(clean['boundary.circle.lon']) ){ |
||||
vs.set({ |
||||
'boundary:circle:lat': clean['boundary.circle.lat'], |
||||
'boundary:circle:lon': clean['boundary.circle.lon'] |
||||
}); |
||||
|
||||
if( check.number(clean['boundary.circle.radius']) ){ |
||||
vs.set({ |
||||
'boundary:circle:radius': Math.round( clean['boundary.circle.radius'] ) + 'km' |
||||
}); |
||||
} |
||||
} |
||||
|
||||
// boundary country
|
||||
if( check.string(clean['boundary.country']) ){ |
||||
vs.set({ |
||||
'boundary:country': clean['boundary.country'] |
||||
}); |
||||
} |
||||
|
||||
// format the log parts into a single coherent string
|
||||
logger.info(logParts.map(part => `[${part}]`).join(' ')); |
||||
|
||||
return { |
||||
type: 'fallback', |
||||
body: addressUsingIdsQuery.render(vs) |
||||
}; |
||||
|
||||
} |
||||
|
||||
module.exports = generateQuery; |
@ -0,0 +1,32 @@
|
||||
const sanitizeAll = require('../sanitizer/sanitizeAll'), |
||||
sanitizers = { |
||||
text: require('../sanitizer/_text_addressit') |
||||
}; |
||||
|
||||
const sanitize = function(req, cb) { sanitizeAll(req, sanitizers, cb); }; |
||||
const logger = require('pelias-logger').get('api'); |
||||
const logging = require( '../helper/logging' ); |
||||
|
||||
// middleware
|
||||
module.exports = (should_execute) => { |
||||
return function(req, res, next) { |
||||
// if res.data already has results then don't call the _text_autocomplete sanitizer
|
||||
// this has been put into place for when the libpostal integration way of querying
|
||||
// ES doesn't return anything and we want to fallback to the old logic
|
||||
if (!should_execute(req, res)) { |
||||
return next(); |
||||
} |
||||
|
||||
// log the query that caused a fallback since libpostal+new-queries didn't return anything
|
||||
if (req.path === '/v1/search') { |
||||
const queryText = logging.isDNT(req) ? '[text removed]' : req.clean.text; |
||||
logger.info(`fallback queryText: ${queryText}`); |
||||
} |
||||
|
||||
sanitize( req, function( err, clean ){ |
||||
next(); |
||||
}); |
||||
|
||||
}; |
||||
|
||||
}; |
@ -1,30 +0,0 @@
|
||||
var sanitizeAll = require('../sanitizer/sanitizeAll'), |
||||
sanitizers = { |
||||
text: require('../sanitizer/_text_addressit') |
||||
}; |
||||
|
||||
var sanitize = function(req, cb) { sanitizeAll(req, sanitizers, cb); }; |
||||
var logger = require('pelias-logger').get('api'); |
||||
var logging = require( '../helper/logging' ); |
||||
var _ = require('lodash'); |
||||
|
||||
// middleware
|
||||
module.exports.middleware = function( req, res, next ){ |
||||
// if res.data already has results then don't call the _text_autocomplete sanitizer
|
||||
// this has been put into place for when the libpostal integration way of querying
|
||||
// ES doesn't return anything and we want to fallback to the old logic
|
||||
if (_.get(res, 'data', []).length > 0) { |
||||
return next(); |
||||
} |
||||
|
||||
// log the query that caused a fallback since libpostal+new-queries didn't return anything
|
||||
if (req.path === '/v1/search') { |
||||
const queryText = logging.isDNT(req) ? '[text removed]' : req.clean.text; |
||||
logger.info(`fallback queryText: ${queryText}`); |
||||
} |
||||
|
||||
sanitize( req, function( err, clean ){ |
||||
next(); |
||||
}); |
||||
|
||||
}; |
@ -0,0 +1,290 @@
|
||||
'use strict'; |
||||
|
||||
const proxyquire = require('proxyquire').noCallThru(); |
||||
const _ = require('lodash'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', t => { |
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => undefined |
||||
} |
||||
}); |
||||
|
||||
t.equal(typeof controller, 'function', 'libpostal is a function'); |
||||
t.equal(typeof controller(), 'function', 'libpostal returns a controller'); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.should_execute = (test, common) => { |
||||
test('should_execute returning false should not call text-analyzer', t => { |
||||
const should_execute = (req, res) => { |
||||
// req and res should be passed to should_execute
|
||||
t.deepEquals(req, { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}); |
||||
t.deepEquals(res, { b: 2 }); |
||||
return false; |
||||
}; |
||||
|
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => { |
||||
t.fail('parse should not have been called'); |
||||
} |
||||
} |
||||
})(should_execute); |
||||
|
||||
const req = { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}; |
||||
const res = { b: 2 }; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}, 'req should not have been modified'); |
||||
t.deepEquals(res, { b: 2 }); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('should_execute returning false should not call text-analyzer', t => { |
||||
t.plan(5); |
||||
|
||||
const should_execute = (req, res) => { |
||||
// req and res should be passed to should_execute
|
||||
t.deepEquals(req, { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}); |
||||
t.deepEquals(res, { b: 2 }); |
||||
return true; |
||||
}; |
||||
|
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: (query) => { |
||||
t.equals(query, 'original query'); |
||||
return undefined; |
||||
} |
||||
} |
||||
})(should_execute); |
||||
|
||||
const req = { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}; |
||||
const res = { b: 2 }; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}, 'req should not have been modified'); |
||||
t.deepEquals(res, { b: 2 }); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.parse_is_called = (test, common) => { |
||||
test('parse returning undefined should not overwrite clean.parsed_text', t => { |
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => undefined |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('parse returning something should overwrite clean.parsed_text', t => { |
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => 'replacement parsed_text' |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: 'replacement parsed_text' |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.iso2_conversion = (test, common) => { |
||||
test('no country in parse response should not leave country unset', t => { |
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => ({ |
||||
locality: 'this is the locality' |
||||
}) |
||||
}, |
||||
'iso3166-1': { |
||||
is2: () => t.fail('should not have been called'), |
||||
to3: () => t.fail('should not have been called') |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: { |
||||
locality: 'this is the locality' |
||||
} |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('unknown country should not be converted', t => { |
||||
t.plan(3); |
||||
|
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => ({ |
||||
country: 'unknown country code' |
||||
}) |
||||
}, |
||||
'iso3166-1': { |
||||
is2: country => { |
||||
t.equals(country, 'UNKNOWN COUNTRY CODE'); |
||||
return false; |
||||
}, |
||||
to3: () => t.fail('should not have been called') |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: { |
||||
country: 'unknown country code' |
||||
} |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('ISO2 country should be converted to ISO3', t => { |
||||
t.plan(4); |
||||
|
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => ({ |
||||
country: 'ISO2 COUNTRY CODE' |
||||
}) |
||||
}, |
||||
'iso3166-1': { |
||||
is2: country => { |
||||
t.equals(country, 'ISO2 COUNTRY CODE'); |
||||
return true; |
||||
}, |
||||
to3: country => { |
||||
t.equals(country, 'ISO2 COUNTRY CODE'); |
||||
return 'ISO3 COUNTRY CODE'; |
||||
} |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: { |
||||
country: 'ISO3 COUNTRY CODE' |
||||
} |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
|
||||
function test(name, testFunction) { |
||||
return tape(`GET /libpostal ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,94 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const has_any_parsed_text_property = require('../../../../controller/predicates/has_any_parsed_text_property'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.ok(_.isFunction(has_any_parsed_text_property), 'has_any_parsed_text_property is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('defined request.clean.parsed_text.property should return true', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: { |
||||
property: 'value' |
||||
} |
||||
} |
||||
}; |
||||
|
||||
t.ok(has_any_parsed_text_property('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('clean.parsed_text with any property should return true ', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: { |
||||
property2: 'value2', |
||||
property3: 'value3' |
||||
} |
||||
} |
||||
}; |
||||
|
||||
t.ok(has_any_parsed_text_property('property1', 'property3')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('undefined request should return false', (t) => { |
||||
t.notOk(has_any_parsed_text_property('property')()); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('undefined request.clean should return false', (t) => { |
||||
const req = {}; |
||||
|
||||
t.notOk(has_any_parsed_text_property('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('undefined request.clean.parsed_text should return false', (t) => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(has_any_parsed_text_property('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean.parsed_text with none of the supplied properties should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: {} |
||||
} |
||||
}; |
||||
|
||||
t.notOk(has_any_parsed_text_property('property1', 'property2')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /has_any_parsed_text_property ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,63 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const has_request_parameter = require('../../../../controller/predicates/has_request_parameter'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', t => { |
||||
t.equal(typeof has_request_parameter, 'function', 'has_request_parameter is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('request with specified parameter should return true', t => { |
||||
[[], {}, 'string value', 17].forEach(val => { |
||||
const req = { |
||||
clean: { |
||||
'parameter name': val |
||||
} |
||||
}; |
||||
|
||||
t.ok(has_request_parameter('parameter name')(req)); |
||||
|
||||
}); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('request with undefined clean should return false', t => { |
||||
const req = {}; |
||||
|
||||
t.notOk(has_request_parameter('parameter name')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean without specified parameter should return false', t => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(has_request_parameter('parameter name')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /has_request_parameter ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,73 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const is_addressit_parse = require('../../../../controller/predicates/is_addressit_parse'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', t => { |
||||
t.ok(_.isFunction(is_addressit_parse), 'is_addressit_parse is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('request.clean.parser=addressit should return true', t => { |
||||
const req = { |
||||
clean: { |
||||
parser: 'addressit' |
||||
} |
||||
}; |
||||
|
||||
t.ok(is_addressit_parse(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('undefined request should return false', t => { |
||||
t.notOk(is_addressit_parse(undefined)); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('undefined request.clean should return false', t => { |
||||
const req = {}; |
||||
|
||||
t.notOk(is_addressit_parse(req)); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('undefined request.clean.parser should return false', t => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(is_addressit_parse(req)); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('non-\'addressit\' request.clean.parser should return false', t => { |
||||
const req = { |
||||
clean: { |
||||
parser: 'not addressit' |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_addressit_parse(req)); |
||||
t.end(); |
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /is_addressit_parse ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,111 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const is_only_non_admin_layers = require('../../../../controller/predicates/is_only_non_admin_layers'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', t => { |
||||
t.equal(typeof is_only_non_admin_layers, 'function', 'is_only_non_admin_layers is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('request with specified parameter should return true', t => { |
||||
[ |
||||
['venue', 'address', 'street'], |
||||
['venue', 'address'], |
||||
['venue', 'street'], |
||||
['address', 'street'], |
||||
['venue'], |
||||
['address'], |
||||
['street'] |
||||
].forEach(layers => { |
||||
const req = { |
||||
clean: { |
||||
layers: layers |
||||
} |
||||
}; |
||||
|
||||
t.ok(is_only_non_admin_layers(req)); |
||||
|
||||
}); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('request with undefined clean should return false', t => { |
||||
const req = {}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean without layers parameter should return false', t => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request with empty layers should return false', t => { |
||||
const req = { |
||||
clean: { |
||||
layers: [] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean.layers without venue, address, or street should return false', t => { |
||||
const req = { |
||||
clean: { |
||||
layers: ['locality'] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean.layers with other layers besides venue, address, or street should return false', t => { |
||||
['venue', 'address', 'street'].forEach(non_admin_layer => { |
||||
const req = { |
||||
clean: { |
||||
layers: ['locality', non_admin_layer] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
|
||||
}); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /is_only_non_admin_layers ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,107 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const is_request_sources_only_whosonfirst = require('../../../../controller/predicates/is_request_sources_only_whosonfirst'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.ok(_.isFunction(is_request_sources_only_whosonfirst), 'is_request_sources_only_whosonfirst is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('sources only \'whosonfirst\' should return true', (t) => { |
||||
const req = { |
||||
clean: { |
||||
sources: [ |
||||
'whosonfirst' |
||||
] |
||||
} |
||||
}; |
||||
|
||||
t.ok(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('undefined req should return false', (t) => { |
||||
t.notOk(is_request_sources_only_whosonfirst(undefined)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('undefined req.clean should return false', (t) => { |
||||
const req = {}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('undefined req.clean.sources should return false', (t) => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('empty req.clean.sources should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
sources: [] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('sources not \'whosonfirst\' should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
sources: [ |
||||
'not whosonfirst' |
||||
] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('sources other than \'whosonfirst\' should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
sources: [ |
||||
'whosonfirst', 'not whosonfirst' |
||||
] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /is_request_sources_only_whosonfirst ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,569 @@
|
||||
'use strict'; |
||||
|
||||
const setup = require('../../../controller/search_with_ids'); |
||||
const proxyquire = require('proxyquire').noCallThru(); |
||||
const mocklogger = require('pelias-mock-logger'); |
||||
const _ = require('lodash'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.ok(_.isFunction(setup), 'setup is a function'); |
||||
t.ok(_.isFunction(setup()), 'setup returns a controller'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.success = (test, common) => { |
||||
test('successful request to search service should replace data and meta', (t) => { |
||||
t.plan(5); |
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
t.equal(esclient, 'this is the esclient'); |
||||
t.deepEqual(cmd, { |
||||
index: 'indexName value', |
||||
searchType: 'dfs_query_then_fetch', |
||||
body: 'this is the query body' |
||||
}); |
||||
|
||||
const docs = [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
]; |
||||
const meta = { key: 'replacement meta value' }; |
||||
|
||||
callback(undefined, docs, meta); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ name: 'original result #1'}, |
||||
{ name: 'original result #2'} |
||||
], |
||||
meta: { |
||||
key: 'original meta value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
], |
||||
meta: { |
||||
key: 'replacement meta value', |
||||
query_type: 'this is the query type' |
||||
} |
||||
}); |
||||
|
||||
t.ok(logger.isInfoMessage('[controller:search] [queryType:this is the query type] [es_result_count:2]')); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('undefined meta should set empty object into res', (t) => { |
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
const docs = [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
]; |
||||
|
||||
callback(undefined, docs, undefined); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ name: 'original result #1'}, |
||||
{ name: 'original result #2'} |
||||
], |
||||
meta: { |
||||
key: 'original meta value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
], |
||||
meta: { |
||||
query_type: 'this is the query type' |
||||
} |
||||
}); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('undefined docs in response should not overwrite existing results', (t) => { |
||||
t.plan(1+3); // ensures that search service was called, then req+res+logger tests
|
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
t.pass('search service was called'); |
||||
|
||||
const meta = { key: 'new value' }; |
||||
|
||||
callback(undefined, undefined, meta); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ id: 1 }, |
||||
{ id: 2 } |
||||
], |
||||
meta: { |
||||
key: 'value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ id: 1 }, |
||||
{ id: 2 } |
||||
], |
||||
meta: { key: 'value' } |
||||
}); |
||||
|
||||
t.notOk(logger.isInfoMessage(/[controller:search] [queryType:this is the query type] [es_result_count:0]/)); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('empty docs in response should not overwrite existing results', (t) => { |
||||
t.plan(4); |
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
t.pass('search service was called'); |
||||
|
||||
const meta = { key: 'value' }; |
||||
|
||||
callback(undefined, [], meta); |
||||
} |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ name: 'pre-existing result #1' }, |
||||
{ name: 'pre-existing result #2' } |
||||
], |
||||
meta: { |
||||
key: 'value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ name: 'pre-existing result #1' }, |
||||
{ name: 'pre-existing result #2' } |
||||
], |
||||
meta: { key: 'value' } |
||||
}); |
||||
|
||||
t.notOk(logger.isInfoMessage(/[controller:search] [queryType:this is the query type] [es_result_count:0]/)); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('successful request on retry to search service should log info message', (t) => { |
||||
t.plan(3+2+2); // 3 search service calls, 2 log messages, 1 req, 1 res
|
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
let searchServiceCallCount = 0; |
||||
|
||||
const timeoutError = { |
||||
status: 408, |
||||
displayName: 'RequestTimeout', |
||||
message: 'Request Timeout after 17ms' |
||||
}; |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
t.pass('search service was called'); |
||||
|
||||
if (searchServiceCallCount < 2) { |
||||
// note that the searchService got called
|
||||
searchServiceCallCount++; |
||||
callback(timeoutError); |
||||
} else { |
||||
const docs = [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
]; |
||||
const meta = { key: 'replacement meta value' }; |
||||
|
||||
callback(undefined, docs, meta); |
||||
} |
||||
|
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ name: 'original result #1'}, |
||||
{ name: 'original result #2'} |
||||
], |
||||
meta: { |
||||
key: 'original meta value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
], |
||||
meta: { |
||||
key: 'replacement meta value', |
||||
query_type: 'this is the query type' |
||||
} |
||||
}); |
||||
|
||||
t.ok(logger.isInfoMessage('[controller:search] [queryType:this is the query type] [es_result_count:2]')); |
||||
t.ok(logger.isInfoMessage('succeeded on retry 2')); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.service_errors = (test, common) => { |
||||
test('default # of request timeout retries should be 3', (t) => { |
||||
// test for 1 initial search service, 3 retries, 1 log messages, 1 req, and 1 res
|
||||
t.plan(1 + 3 + 1 + 2); |
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
}); |
||||
|
||||
const timeoutError = { |
||||
status: 408, |
||||
displayName: 'RequestTimeout', |
||||
message: 'Request Timeout after 17ms' |
||||
}; |
||||
|
||||
// a controller that validates that the search service was called
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
// note that the searchService got called
|
||||
t.pass('search service was called'); |
||||
|
||||
callback(timeoutError); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = {}; |
||||
|
||||
const next = () => { |
||||
t.ok(logger.getInfoMessages(), [ |
||||
'request timed out on attempt 1, retrying', |
||||
'request timed out on attempt 2, retrying', |
||||
'request timed out on attempt 3, retrying' |
||||
]); |
||||
|
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [timeoutError.message], |
||||
warnings: [] |
||||
}); |
||||
t.deepEqual(res, {}); |
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('explicit apiConfig.requestRetries should retry that many times', (t) => { |
||||
t.plan(1 + 17); // test for initial search service call and 17 retries
|
||||
|
||||
const config = { |
||||
indexName: 'indexName value', |
||||
requestRetries: 17 |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ }); |
||||
|
||||
const timeoutError = { |
||||
status: 408, |
||||
displayName: 'RequestTimeout', |
||||
message: 'Request Timeout after 17ms' |
||||
}; |
||||
|
||||
// a controller that validates that the search service was called
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
// note that the searchService got called
|
||||
t.pass('search service was called'); |
||||
|
||||
callback(timeoutError); |
||||
} |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = {}; |
||||
|
||||
controller(req, res, () => t.end() ); |
||||
|
||||
}); |
||||
|
||||
test('only status code 408 should be considered a retryable request', (t) => { |
||||
t.plan(2); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value', |
||||
requestRetries: 17 |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ }); |
||||
|
||||
const nonTimeoutError = { |
||||
status: 500, |
||||
displayName: 'InternalServerError', |
||||
message: 'an internal server error occurred' |
||||
}; |
||||
|
||||
// a controller that validates that the search service was called
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
// note that the searchService got called
|
||||
t.pass('search service was called'); |
||||
|
||||
callback(nonTimeoutError); |
||||
} |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = {}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [nonTimeoutError.message], |
||||
warnings: [] |
||||
}); |
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('string error should not retry and be logged as-is', (t) => { |
||||
t.plan(2); // service call + error is in req.errors
|
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ }); |
||||
|
||||
// a controller that validates that the search service was called
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
// note that the searchService got called
|
||||
t.pass('search service was called'); |
||||
|
||||
callback('this is an error string'); |
||||
} |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = {}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: ['this is an error string'], |
||||
warnings: [] |
||||
}); |
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.should_execute = (test, common) => { |
||||
test('should_execute returning false and empty req.errors should call next', (t) => { |
||||
const esclient = () => t.fail('esclient should not have been called'); |
||||
const query = () => t.fail('query should not have been called'); |
||||
const should_execute = () => false; |
||||
const controller = setup( {}, esclient, query, should_execute ); |
||||
|
||||
const req = { }; |
||||
const res = { }; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(res, { }); |
||||
t.end(); |
||||
}; |
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.undefined_query = (test, common) => { |
||||
test('query returning undefined should not call service', (t) => { |
||||
t.plan(0, 'test will fail if search service actually gets called'); |
||||
|
||||
// a function that returns undefined
|
||||
const query = () => undefined; |
||||
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': () => { |
||||
t.fail('search service should not have been called'); |
||||
} |
||||
})(undefined, undefined, query, () => true ); |
||||
|
||||
const next = () => t.end(); |
||||
|
||||
controller({}, {}, next); |
||||
|
||||
}); |
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /search ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,27 @@
|
||||
'use strict'; |
||||
|
||||
module.exports = class MockQuery { |
||||
constructor() { |
||||
this._score_functions = []; |
||||
this._filter_functions = []; |
||||
} |
||||
|
||||
render(vs) { |
||||
return { |
||||
vs: vs, |
||||
score_functions: this._score_functions, |
||||
filter_functions: this._filter_functions |
||||
}; |
||||
} |
||||
|
||||
score(view) { |
||||
this._score_functions.push(view); |
||||
return this; |
||||
} |
||||
|
||||
filter(view) { |
||||
this._filter_functions.push(view); |
||||
return this; |
||||
} |
||||
|
||||
}; |
@ -0,0 +1,553 @@
|
||||
const generateQuery = require('../../../query/address_search_using_ids'); |
||||
const _ = require('lodash'); |
||||
const proxyquire = require('proxyquire').noCallThru(); |
||||
const mock_logger = require('pelias-mock-logger'); |
||||
const MockQuery = require('./MockQuery'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.ok(_.isFunction(generateQuery)); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
// helper for canned views
|
||||
const views = { |
||||
focus_only_function: () => 'focus_only_function', |
||||
boundary_country: 'boundary_country view', |
||||
boundary_circle: 'boundary_circle view', |
||||
boundary_rect: 'boundary_rect view', |
||||
sources: 'sources view' |
||||
}; |
||||
|
||||
module.exports.tests.base_query = (test, common) => { |
||||
test('basic', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
} |
||||
}; |
||||
const res = { |
||||
data: [] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.type, 'fallback'); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('input:housenumber').toString(), 'housenumber value'); |
||||
t.equals(generatedQuery.body.vs.var('input:street').toString(), 'street value'); |
||||
t.notOk(generatedQuery.body.vs.isset('sources')); |
||||
t.equals(generatedQuery.body.vs.var('size').toString(), 20); |
||||
|
||||
t.deepEquals(generatedQuery.body.score_functions, [ |
||||
'focus_only_function' |
||||
]); |
||||
|
||||
t.deepEquals(generatedQuery.body.filter_functions, [ |
||||
'boundary_country view', |
||||
'boundary_circle view', |
||||
'boundary_rect view', |
||||
'sources view' |
||||
]); |
||||
|
||||
t.deepEquals(logger.getInfoMessages(), ['[query:address_search_using_ids] [parser:libpostal]']); |
||||
t.end(); |
||||
|
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.other_parameters = (test, common) => { |
||||
test('explicit size set', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
querySize: 'querySize value' |
||||
}; |
||||
const res = { |
||||
data: [] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('size').toString(), 'querySize value'); |
||||
t.deepEquals(logger.getInfoMessages(), ['[query:address_search_using_ids] [parser:libpostal] [param:querySize]']); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('explicit sources set', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
sources: ['source 1', 'source 2'] |
||||
}; |
||||
const res = { |
||||
data: [] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.deepEquals(generatedQuery.body.vs.var('sources').toString(), ['source 1', 'source 2']); |
||||
t.deepEquals(logger.getInfoMessages(), ['[query:address_search_using_ids] [parser:libpostal] [param:sources]']); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.granularity_bands = (test, common) => { |
||||
test('neighbourhood/borough/locality/localadmin granularity band', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
} |
||||
}; |
||||
const res = { |
||||
data: [ |
||||
{ |
||||
layer: 'neighbourhood', |
||||
source_id: 1 |
||||
}, |
||||
{ |
||||
layer: 'borough', |
||||
source_id: 2 |
||||
}, |
||||
{ |
||||
layer: 'locality', |
||||
source_id: 3 |
||||
}, |
||||
{ |
||||
layer: 'localadmin', |
||||
source_id: 4 |
||||
}, |
||||
{ |
||||
layer: 'county', |
||||
source_id: 5 |
||||
}, |
||||
{ |
||||
layer: 'macrocounty', |
||||
source_id: 6 |
||||
}, |
||||
{ |
||||
layer: 'region', |
||||
source_id: 7 |
||||
}, |
||||
{ |
||||
layer: 'macroregion', |
||||
source_id: 8 |
||||
}, |
||||
{ |
||||
layer: 'dependency', |
||||
source_id: 9 |
||||
}, |
||||
{ |
||||
layer: 'country', |
||||
source_id: 10 |
||||
}, |
||||
{ |
||||
layer: 'neighbourhood', |
||||
source_id: 11 |
||||
}, |
||||
{ |
||||
layer: 'borough', |
||||
source_id: 12 |
||||
}, |
||||
{ |
||||
layer: 'locality', |
||||
source_id: 13 |
||||
}, |
||||
{ |
||||
layer: 'localadmin', |
||||
source_id: 14 |
||||
}, |
||||
{ |
||||
layer: 'county', |
||||
source_id: 15 |
||||
}, |
||||
{ |
||||
layer: 'macrocounty', |
||||
source_id: 16 |
||||
}, |
||||
{ |
||||
layer: 'region', |
||||
source_id: 17 |
||||
}, |
||||
{ |
||||
layer: 'macroregion', |
||||
source_id: 18 |
||||
}, |
||||
{ |
||||
layer: 'dependency', |
||||
source_id: 19 |
||||
}, |
||||
{ |
||||
layer: 'country', |
||||
source_id: 20 |
||||
} |
||||
] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.deepEquals(generatedQuery.body.vs.var('input:layers').$, { |
||||
neighbourhood: [1, 11], |
||||
borough: [2, 12], |
||||
locality: [3, 13], |
||||
localadmin: [4, 14], |
||||
region: [7, 17], |
||||
macroregion: [8, 18], |
||||
dependency: [9, 19], |
||||
country: [10, 20] |
||||
}); |
||||
|
||||
t.end(); |
||||
}); |
||||
|
||||
test('only band members with ids should be passed', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
} |
||||
}; |
||||
const res = { |
||||
data: [ |
||||
{ |
||||
layer: 'neighbourhood', |
||||
source_id: 1 |
||||
} |
||||
] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.deepEquals(generatedQuery.body.vs.var('input:layers').$, { |
||||
neighbourhood: [1], |
||||
borough: [], |
||||
locality: [], |
||||
localadmin: [], |
||||
region: [], |
||||
macroregion: [], |
||||
dependency: [], |
||||
country: [] |
||||
}); |
||||
|
||||
t.end(); |
||||
}); |
||||
|
||||
test('county/macrocounty granularity band', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
} |
||||
}; |
||||
const res = { |
||||
data: [ |
||||
{ |
||||
layer: 'county', |
||||
source_id: 1 |
||||
}, |
||||
{ |
||||
layer: 'macrocounty', |
||||
source_id: 2 |
||||
}, |
||||
{ |
||||
layer: 'county', |
||||
source_id: 4 |
||||
}, |
||||
{ |
||||
layer: 'macrocounty', |
||||
source_id: 5 |
||||
} |
||||
] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.deepEquals(generatedQuery.body.vs.var('input:layers').$, { |
||||
county: [1, 4], |
||||
macrocounty: [2, 5] |
||||
}); |
||||
|
||||
t.end(); |
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.boundary_filters = (test, common) => { |
||||
test('boundary.country available should add to query', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'boundary.country': 'boundary.country value' |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('boundary:country').toString(), 'boundary.country value'); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('focus.point.lat/lon w/both numbers should add to query', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'focus.point.lat': 12.121212, |
||||
'focus.point.lon': 21.212121 |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('focus:point:lat').toString(), 12.121212); |
||||
t.equals(generatedQuery.body.vs.var('focus:point:lon').toString(), 21.212121); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('boundary.rect with all numbers should add to query', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'boundary.rect.min_lat': 12.121212, |
||||
'boundary.rect.max_lat': 13.131313, |
||||
'boundary.rect.min_lon': 21.212121, |
||||
'boundary.rect.max_lon': 31.313131 |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('boundary:rect:top').toString(), 13.131313); |
||||
t.equals(generatedQuery.body.vs.var('boundary:rect:right').toString(), 31.313131); |
||||
t.equals(generatedQuery.body.vs.var('boundary:rect:bottom').toString(), 12.121212); |
||||
t.equals(generatedQuery.body.vs.var('boundary:rect:left').toString(), 21.212121); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('boundary circle without radius should set radius to default', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'boundary.circle.lat': 12.121212, |
||||
'boundary.circle.lon': 21.212121 |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:lat').toString(), 12.121212); |
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:lon').toString(), 21.212121); |
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:radius').toString(), '50km'); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('boundary circle with radius set radius to that value rounded', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'boundary.circle.lat': 12.121212, |
||||
'boundary.circle.lon': 21.212121, |
||||
'boundary.circle.radius': 17.6 |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:lat').toString(), 12.121212); |
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:lon').toString(), 21.212121); |
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:radius').toString(), '18km'); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`address_search_using_ids query ${name}`, testFunction); |
||||
} |
||||
|
||||
for( var testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,132 @@
|
||||
const proxyquire = require('proxyquire').noCallThru(); |
||||
const mock_logger = require('pelias-mock-logger'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.sanitize = (test, common) => { |
||||
test('verify that no sanitizers were called when should_execute returns false', (t) => { |
||||
t.plan(1); |
||||
|
||||
const logger = mock_logger(); |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
const defer_to_addressit = proxyquire('../../../sanitizer/defer_to_addressit', { |
||||
'../sanitizer/_text_addressit': () => { |
||||
t.fail('_text_addressit should not have been called'); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(() => false); |
||||
|
||||
defer_to_addressit({}, {}, () => { |
||||
t.equals(logger.getInfoMessages().length, 0); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('verify that _text_addressit sanitizer was called when should_execute returns true', (t) => { |
||||
t.plan(2); |
||||
|
||||
const logger = mock_logger(); |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
const defer_to_addressit = proxyquire('../../../sanitizer/defer_to_addressit', { |
||||
'../sanitizer/_text_addressit': () => { |
||||
t.pass('_text_addressit should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
}, |
||||
'pelias-logger': logger, |
||||
'../helper/logging': { |
||||
isDNT: () => false |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
path: '/v1/search', |
||||
clean: { |
||||
text: 'this is the query text' |
||||
} |
||||
}; |
||||
|
||||
defer_to_addressit(req, {}, () => { |
||||
t.deepEquals(logger.getInfoMessages(), ['fallback queryText: this is the query text']); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('query should not be logged if path != \'/v1/search\'', (t) => { |
||||
t.plan(2); |
||||
|
||||
const logger = mock_logger(); |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
const defer_to_addressit = proxyquire('../../../sanitizer/defer_to_addressit', { |
||||
'../sanitizer/_text_addressit': () => { |
||||
t.pass('_text_addressit should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
}, |
||||
'pelias-logger': logger |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
path: 'not /v1/search', |
||||
clean: { |
||||
text: 'this is the query text' |
||||
} |
||||
}; |
||||
|
||||
defer_to_addressit(req, {}, () => { |
||||
t.deepEquals(logger.getInfoMessages(), []); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('query should be logged as [text removed] if private', (t) => { |
||||
t.plan(2); |
||||
|
||||
const logger = mock_logger(); |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
const defer_to_addressit = proxyquire('../../../sanitizer/defer_to_addressit', { |
||||
'../sanitizer/_text_addressit': () => { |
||||
t.pass('_text_addressit should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
}, |
||||
'pelias-logger': logger, |
||||
'../helper/logging': { |
||||
isDNT: () => true |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
path: '/v1/search', |
||||
clean: { |
||||
text: 'this is the query text' |
||||
} |
||||
}; |
||||
|
||||
defer_to_addressit(req, {}, () => { |
||||
t.deepEquals(logger.getInfoMessages(), ['fallback queryText: [text removed]']); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = function (tape, common) { |
||||
|
||||
function test(name, testFunction) { |
||||
return tape(`SANITIZE /defer_to_addressit ${name}`, testFunction); |
||||
} |
||||
|
||||
for( var testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -1,185 +0,0 @@
|
||||
var proxyquire = require('proxyquire').noCallThru(); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.sanitize = function(test, common) { |
||||
test('verify that all sanitizers were called as expected when `res` is undefined', function(t) { |
||||
var called_sanitizers = []; |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
var search = proxyquire('../../../sanitizer/search_fallback', { |
||||
'../sanitizer/_text_addressit': function() { |
||||
called_sanitizers.push('_text_addressit'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}); |
||||
|
||||
var expected_sanitizers = [ |
||||
'_text_addressit' |
||||
]; |
||||
|
||||
var req = {}; |
||||
|
||||
search.middleware(req, undefined, function(){ |
||||
t.deepEquals(called_sanitizers, expected_sanitizers); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('verify that all sanitizers were called as expected when `res` has no `data` property', function(t) { |
||||
var called_sanitizers = []; |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
var search = proxyquire('../../../sanitizer/search_fallback', { |
||||
'../sanitizer/_text_addressit': function() { |
||||
called_sanitizers.push('_text_addressit'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}); |
||||
|
||||
var expected_sanitizers = [ |
||||
'_text_addressit' |
||||
]; |
||||
|
||||
var req = {}; |
||||
var res = {}; |
||||
|
||||
search.middleware(req, res, function(){ |
||||
t.deepEquals(called_sanitizers, expected_sanitizers); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('verify that all sanitizers were called as expected when res.data is empty', function(t) { |
||||
var called_sanitizers = []; |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
var search = proxyquire('../../../sanitizer/search_fallback', { |
||||
'../sanitizer/_text_addressit': function() { |
||||
called_sanitizers.push('_text_addressit'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}); |
||||
|
||||
var expected_sanitizers = [ |
||||
'_text_addressit' |
||||
]; |
||||
|
||||
var req = {}; |
||||
var res = { |
||||
data: [] |
||||
}; |
||||
|
||||
search.middleware(req, res, function(){ |
||||
t.deepEquals(called_sanitizers, expected_sanitizers); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('non-empty res.data should not call the _text_autocomplete sanitizer', function(t) { |
||||
var called_sanitizers = []; |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
var search = proxyquire('../../../sanitizer/search_fallback', { |
||||
'../sanitizer/_text_autocomplete': function() { |
||||
throw new Error('_text_autocomplete sanitizer should not have been called'); |
||||
} |
||||
}); |
||||
|
||||
var expected_sanitizers = []; |
||||
|
||||
var req = {}; |
||||
var res = { |
||||
data: [{}] |
||||
}; |
||||
|
||||
search.middleware(req, res, function(){ |
||||
t.deepEquals(called_sanitizers, expected_sanitizers); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('req.clean.text should be logged when isDNT=false', (t) => { |
||||
const infoLog = []; |
||||
|
||||
const search = proxyquire('../../../sanitizer/search_fallback', { |
||||
'pelias-logger': { |
||||
get: () => { |
||||
return { |
||||
info: (msg) => { |
||||
infoLog.push(msg); |
||||
} |
||||
}; |
||||
} |
||||
}, |
||||
'../helper/logging': { |
||||
isDNT: () => { return false; } |
||||
} |
||||
}); |
||||
|
||||
const req = { |
||||
path: '/v1/search', |
||||
clean: { |
||||
text: 'this is the query text' |
||||
} |
||||
}; |
||||
|
||||
search.middleware(req, undefined, () => { |
||||
t.deepEquals(infoLog, [`fallback queryText: ${req.clean.text}`]); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('req.clean.text should not be logged when isDNT=true', (t) => { |
||||
const infoLog = []; |
||||
|
||||
const search = proxyquire('../../../sanitizer/search_fallback', { |
||||
'pelias-logger': { |
||||
get: () => { |
||||
return { |
||||
info: (msg) => { |
||||
infoLog.push(msg); |
||||
} |
||||
}; |
||||
} |
||||
}, |
||||
'../helper/logging': { |
||||
isDNT: () => { return true; } |
||||
} |
||||
}); |
||||
|
||||
const req = { |
||||
path: '/v1/search', |
||||
clean: { |
||||
text: 'this is the query text' |
||||
} |
||||
}; |
||||
|
||||
search.middleware(req, undefined, () => { |
||||
t.deepEquals(infoLog, ['fallback queryText: [text removed]']); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = function (tape, common) { |
||||
|
||||
function test(name, testFunction) { |
||||
return tape('SANITIZE /search_fallback ' + name, testFunction); |
||||
} |
||||
|
||||
for( var testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
Loading…
Reference in new issue