mirror of https://github.com/pelias/api.git
Julian Simioni
7 years ago
committed by
GitHub
110 changed files with 6604 additions and 2847 deletions
@ -0,0 +1,31 @@
|
||||
const text_analyzer = require('pelias-text-analyzer'); |
||||
const _ = require('lodash'); |
||||
const iso3166 = require('iso3166-1'); |
||||
|
||||
function setup(should_execute) { |
||||
function controller( req, res, next ){ |
||||
// bail early if req/res don't pass conditions for execution
|
||||
if (!should_execute(req, res)) { |
||||
return next(); |
||||
} |
||||
|
||||
// parse text with query parser
|
||||
const parsed_text = text_analyzer.parse(req.clean.text); |
||||
|
||||
if (parsed_text !== undefined) { |
||||
// if a known ISO2 country was parsed, convert it to ISO3
|
||||
if (_.has(parsed_text, 'country') && iso3166.is2(_.toUpper(parsed_text.country))) { |
||||
parsed_text.country = iso3166.to3(_.toUpper(parsed_text.country)); |
||||
} |
||||
|
||||
req.clean.parsed_text = parsed_text; |
||||
} |
||||
|
||||
return next(); |
||||
|
||||
} |
||||
|
||||
return controller; |
||||
} |
||||
|
||||
module.exports = setup; |
@ -0,0 +1,33 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// "arguments" is only available in long-form function declarations, cannot be shortened to fat arrow syntax
|
||||
// potential improvement: inject set operator to allow for any/all functionality
|
||||
module.exports = { |
||||
all: function() { |
||||
// save off property names for future reference
|
||||
const properties = _.values(arguments); |
||||
|
||||
// return true if ALL of the supplied properties are in clean.parsed_text
|
||||
return request => _.isEmpty( |
||||
_.difference( |
||||
_.values(properties), |
||||
_.keys(_.get(request, ['clean', 'parsed_text'], {})) |
||||
) |
||||
); |
||||
|
||||
}, |
||||
any: function() { |
||||
// save off property names for future reference
|
||||
const properties = _.values(arguments); |
||||
|
||||
// return true if ANY of the supplied properties are in clean.parsed_text
|
||||
return request => !_.isEmpty( |
||||
_.intersection( |
||||
_.values(properties), |
||||
_.keys(_.get(request, ['clean', 'parsed_text'], {})) |
||||
) |
||||
); |
||||
|
||||
} |
||||
|
||||
}; |
@ -0,0 +1,4 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// returns true IFF req.clean has a key with the supplied name
|
||||
module.exports = (parameter) => (req, res) => (_.has(req, ['clean', parameter])); |
@ -0,0 +1,4 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// returns true IFF req.clean.parser is addressit
|
||||
module.exports = (req, res) => (_.get(req, 'clean.parser') === 'addressit'); |
@ -1,13 +1,18 @@
|
||||
const _ = require('lodash'); |
||||
const Debug = require('../../helper/debug'); |
||||
const debugLog = new Debug('controller:predicates:is_admin_only_analysis'); |
||||
|
||||
module.exports = (request, response) => { |
||||
if (!request.clean.hasOwnProperty('parsed_text')) { |
||||
debugLog.push(request, false + '(no parsed_text)'); |
||||
return false; |
||||
} |
||||
|
||||
// return true only if all non-admin properties of parsed_text are empty
|
||||
return ['number', 'street', 'query', 'category'].every((prop) => { |
||||
const is_admin_only_analysis = ['number', 'street', 'query', 'category', 'postalcode'].every((prop) => { |
||||
return _.isEmpty(request.clean.parsed_text[prop]); |
||||
}); |
||||
|
||||
debugLog.push(request, is_admin_only_analysis); |
||||
return is_admin_only_analysis; |
||||
}; |
||||
|
@ -1,9 +1,13 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
const Debug = require('../../helper/debug'); |
||||
const debugLog = new Debug('controller:predicates:is_coarse_reverse'); |
||||
const non_coarse_layers = ['address', 'street', 'venue']; |
||||
|
||||
module.exports = (req, res) => { |
||||
// returns true if layers is undefined, empty, or contains 'address', 'street', or 'venue'
|
||||
return !_.isEmpty(req.clean.layers) && |
||||
const is_coarse_reverse = !_.isEmpty(req.clean.layers) && |
||||
_.isEmpty(_.intersection(req.clean.layers, non_coarse_layers)); |
||||
|
||||
debugLog.push(req, is_coarse_reverse); |
||||
return is_coarse_reverse; |
||||
}; |
||||
|
@ -0,0 +1,7 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// return true IFF req.clean.layers is empty OR there are non-venue/address/street layers
|
||||
module.exports = (req, res) => ( |
||||
!_.isEmpty(_.get(req, 'clean.layers', [])) && |
||||
_.isEmpty(_.difference(req.clean.layers, ['venue', 'address', 'street'])) |
||||
); |
@ -0,0 +1,9 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
// returns true IFF 'whosonfirst' is the only requested source
|
||||
module.exports = (req, res) => ( |
||||
_.isEqual( |
||||
_.get(req, 'clean.sources', []), |
||||
['whosonfirst'] |
||||
) |
||||
); |
@ -1,5 +0,0 @@
|
||||
module.exports = (uri) => { |
||||
return (request, response) => { |
||||
return uri !== undefined; |
||||
}; |
||||
}; |
@ -0,0 +1,113 @@
|
||||
const _ = require('lodash'); |
||||
|
||||
const searchService = require('../service/search'); |
||||
const logger = require('pelias-logger').get('api'); |
||||
const logging = require( '../helper/logging' ); |
||||
const retry = require('retry'); |
||||
|
||||
function isRequestTimeout(err) { |
||||
return _.get(err, 'status') === 408; |
||||
} |
||||
|
||||
function setup( apiConfig, esclient, query, should_execute ){ |
||||
function controller( req, res, next ){ |
||||
if (!should_execute(req, res)) { |
||||
return next(); |
||||
} |
||||
|
||||
const cleanOutput = _.cloneDeep(req.clean); |
||||
if (logging.isDNT(req)) { |
||||
logging.removeFields(cleanOutput); |
||||
} |
||||
// log clean parameters for stats
|
||||
logger.info('[req]', `endpoint=${req.path}`, cleanOutput); |
||||
|
||||
const renderedQuery = query(req.clean, res); |
||||
|
||||
// if there's no query to call ES with, skip the service
|
||||
if (_.isUndefined(renderedQuery)) { |
||||
return next(); |
||||
} |
||||
|
||||
// options for retry
|
||||
// maxRetries is from the API config with default of 3
|
||||
// factor of 1 means that each retry attempt will esclient requestTimeout
|
||||
const operationOptions = { |
||||
retries: _.get(apiConfig, 'requestRetries', 3), |
||||
factor: 1, |
||||
minTimeout: _.get(esclient, 'transport.requestTimeout') |
||||
}; |
||||
|
||||
// setup a new operation
|
||||
const operation = retry.operation(operationOptions); |
||||
|
||||
// elasticsearch command
|
||||
const cmd = { |
||||
index: apiConfig.indexName, |
||||
searchType: 'dfs_query_then_fetch', |
||||
body: renderedQuery.body |
||||
}; |
||||
|
||||
logger.debug( '[ES req]', cmd ); |
||||
|
||||
operation.attempt((currentAttempt) => { |
||||
// query elasticsearch
|
||||
searchService( esclient, cmd, function( err, docs, meta ){ |
||||
// returns true if the operation should be attempted again
|
||||
// (handles bookkeeping of maxRetries)
|
||||
// only consider for status 408 (request timeout)
|
||||
if (isRequestTimeout(err) && operation.retry(err)) { |
||||
logger.info(`request timed out on attempt ${currentAttempt}, retrying`); |
||||
return; |
||||
} |
||||
|
||||
// if execution has gotten this far then one of three things happened:
|
||||
// - the request didn't time out
|
||||
// - maxRetries has been hit so we're giving up
|
||||
// - another error occurred
|
||||
// in either case, handle the error or results
|
||||
|
||||
// error handler
|
||||
if( err ){ |
||||
// push err.message or err onto req.errors
|
||||
req.errors.push( _.get(err, 'message', err)); |
||||
} |
||||
else { |
||||
// log that a retry was successful
|
||||
// most requests succeed on first attempt so this declutters log files
|
||||
if (currentAttempt > 1) { |
||||
logger.info(`succeeded on retry ${currentAttempt-1}`); |
||||
} |
||||
|
||||
// because this is used in response to placeholder, there may already
|
||||
// be results. if there are no results from this ES call, don't overwrite
|
||||
// what's already there from placeholder.
|
||||
if (!_.isEmpty(docs)) { |
||||
res.data = docs; |
||||
res.meta = meta || {}; |
||||
// store the query_type for subsequent middleware
|
||||
res.meta.query_type = renderedQuery.type; |
||||
|
||||
const messageParts = [ |
||||
'[controller:search]', |
||||
`[queryType:${renderedQuery.type}]`, |
||||
`[es_result_count:${docs.length}]` |
||||
]; |
||||
|
||||
logger.info(messageParts.join(' ')); |
||||
|
||||
} |
||||
|
||||
} |
||||
logger.debug('[ES response]', docs); |
||||
next(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
} |
||||
|
||||
return controller; |
||||
} |
||||
|
||||
module.exports = setup; |
@ -0,0 +1,43 @@
|
||||
'use strict'; |
||||
const _ = require('lodash'); |
||||
|
||||
class Debug { |
||||
constructor(moduleName){ |
||||
this.name = moduleName || 'unnamed module'; |
||||
} |
||||
|
||||
push(req, debugMsg){ |
||||
if (!_.isEmpty(req.clean) && req.clean.enableDebug){ |
||||
req.debug = req.debug || []; |
||||
// remove the extra space character
|
||||
req.debug.push({[this.name]: debugMsg}); |
||||
// req.debug.push(`[${this.name}] ${debugMsg}`);
|
||||
} |
||||
} |
||||
// optional debugMsg passed to timer
|
||||
beginTimer(req, debugMsg){ |
||||
if (!_.isEmpty(req.clean) && req.clean.enableDebug){ |
||||
// internal object debugTimers. Doesn't get displayed in geocodeJSON
|
||||
req.debugTimers = req.debugTimers || {}; |
||||
req.debugTimers[this.name] = Date.now(); |
||||
if (debugMsg){ |
||||
this.push(req, `Timer Began: ${debugMsg}`); |
||||
} else { |
||||
this.push(req, `Timer Began`); |
||||
} |
||||
} |
||||
} |
||||
|
||||
stopTimer(req, debugMsg){ |
||||
if (!_.isEmpty(req.clean) && req.clean.enableDebug){ |
||||
let timeElapsed = Date.now() - req.debugTimers[this.name]; |
||||
if (debugMsg){ |
||||
this.push(req, `Timer Stopped: ${timeElapsed} ms: ${debugMsg}`); |
||||
} else { |
||||
this.push(req, `Timer Stopped: ${timeElapsed} ms`); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
module.exports = Debug; |
@ -0,0 +1,192 @@
|
||||
const peliasQuery = require('pelias-query'); |
||||
const defaults = require('./search_defaults'); |
||||
const logger = require('pelias-logger').get('api'); |
||||
const _ = require('lodash'); |
||||
const check = require('check-types'); |
||||
|
||||
//------------------------------
|
||||
// general-purpose search query
|
||||
//------------------------------
|
||||
const addressUsingIdsQuery = new peliasQuery.layout.AddressesUsingIdsQuery(); |
||||
|
||||
// scoring boost
|
||||
addressUsingIdsQuery.score( peliasQuery.view.focus_only_function( peliasQuery.view.phrase ) ); |
||||
// --------------------------------
|
||||
|
||||
// non-scoring hard filters
|
||||
addressUsingIdsQuery.filter( peliasQuery.view.boundary_country ); |
||||
addressUsingIdsQuery.filter( peliasQuery.view.boundary_circle ); |
||||
addressUsingIdsQuery.filter( peliasQuery.view.boundary_rect ); |
||||
addressUsingIdsQuery.filter( peliasQuery.view.sources ); |
||||
// --------------------------------
|
||||
|
||||
// This query is a departure from traditional Pelias queries where textual
|
||||
// names of admin areas were looked up. This query uses the ids returned by
|
||||
// placeholder for lookups which dramatically reduces the amount of information
|
||||
// that ES has to store and allows us to have placeholder handle altnames on
|
||||
// behalf of Pelias.
|
||||
//
|
||||
// For the happy path, an input like '30 West 26th Street, Manhattan' would result
|
||||
// in:
|
||||
// neighbourhood_id in []
|
||||
// borough_id in [421205771]
|
||||
// locality_id in [85945171, 85940551, 85972655]
|
||||
// localadmin_id in [404502889, 404499147, 404502891, 85972655]
|
||||
//
|
||||
// Where the ids are for all the various Manhattans. Each of those could
|
||||
// conceivably be the Manhattan that the user was referring to so so all must be
|
||||
// queried for at the same time.
|
||||
//
|
||||
// A counter example for this is '1 West Market Street, York, PA' where York, PA
|
||||
// can be interpreted as a locality OR county. From experience, when there's
|
||||
// ambiguity between locality and county for an input, the user is, with complete
|
||||
// metaphysical certitude, referring to the city. If they were referring to the
|
||||
// county, they would have entered 'York County, PA'. The point is that it's
|
||||
// insufficient to just query for all ids because, in this case, '1 West Market Street'
|
||||
// in other cities in York County, PA would be returned and would be both jarring
|
||||
// to the user and almost certainly leads to incorrect results. For example,
|
||||
// the following could be returned (all are towns in York County, PA):
|
||||
// - 1 West Market Street, Dallastown, PA
|
||||
// - 1 West Market Street, Fawn Grove, PA
|
||||
// - 1 West Market Street, Shrewsbury, PA
|
||||
// etc.
|
||||
//
|
||||
// To avoid this calamitous response, this query takes the approach of
|
||||
// "granularity bands". That is, if there are any ids in the first set of any
|
||||
// of these granularities:
|
||||
// - neighbourhood
|
||||
// - borough
|
||||
// - locality
|
||||
// - localadmin
|
||||
// - region
|
||||
// - macroregion
|
||||
// - dependency
|
||||
// - country
|
||||
//
|
||||
// then query for all ids in only those layers. Falling back, if there are
|
||||
// no ids in those layers, query for the county/macrocounty layers.
|
||||
//
|
||||
// This methodology ensures that no happened-to-match-on-county results are returned.
|
||||
//
|
||||
// The decision was made to include all other layers in one to solve the issue
|
||||
// where a country and city share a name, such as Mexico, which could be
|
||||
// interpreted as a country AND city (in Missouri). The data itself will sort
|
||||
// out which is correct. That is, it's unlikely that "11 Rock Springs Dr" exists
|
||||
// in Mexico the country due to naming conventions and would be filtered out
|
||||
// (though it could, but that's good because it's legitimate)
|
||||
|
||||
const granularity_bands = [ |
||||
['neighbourhood', 'borough', 'locality', 'localadmin', 'region', 'macroregion', 'dependency', 'country'], |
||||
['county', 'macrocounty'] |
||||
]; |
||||
|
||||
// returns IFF there are *any* results in the granularity band
|
||||
function anyResultsAtGranularityBand(results, band) { |
||||
return results.some(result => _.includes(band, result.layer)); |
||||
} |
||||
|
||||
// returns the ids of results at the requested layer
|
||||
function getIdsAtLayer(results, layer) { |
||||
return results.filter(result => result.layer === layer).map(_.property('source_id')); |
||||
} |
||||
|
||||
/** |
||||
map request variables to query variables for all inputs |
||||
provided by this HTTP request. This function operates on res.data which is the |
||||
Document-ified placeholder repsonse. |
||||
**/ |
||||
function generateQuery( clean, res ){ |
||||
const vs = new peliasQuery.Vars( defaults ); |
||||
const results = _.defaultTo(res.data, []); |
||||
|
||||
const logParts = ['query:address_search_using_ids', 'parser:libpostal']; |
||||
|
||||
// sources
|
||||
if( !_.isEmpty(clean.sources) ) { |
||||
vs.var( 'sources', clean.sources); |
||||
logParts.push('param:sources'); |
||||
} |
||||
|
||||
// size
|
||||
if( clean.querySize ) { |
||||
vs.var( 'size', clean.querySize ); |
||||
logParts.push('param:querySize'); |
||||
} |
||||
|
||||
if( ! _.isEmpty(clean.parsed_text.number) ){ |
||||
vs.var( 'input:housenumber', clean.parsed_text.number ); |
||||
} |
||||
vs.var( 'input:street', clean.parsed_text.street ); |
||||
|
||||
// find the first granularity band for which there are results
|
||||
const granularity_band = granularity_bands.find(band => anyResultsAtGranularityBand(results, band)); |
||||
|
||||
// if there's a granularity band, accumulate the ids from each layer in the band
|
||||
// into an object mapping layer->ids of those layers
|
||||
if (granularity_band) { |
||||
const layers_to_ids = granularity_band.reduce((acc, layer) => { |
||||
acc[layer] = getIdsAtLayer(res.data, layer); |
||||
return acc; |
||||
}, {}); |
||||
|
||||
// use an object here instead of calling `set` since that flattens out an
|
||||
// object into key/value pairs and makes identifying layers harder in query module
|
||||
vs.var('input:layers', layers_to_ids); |
||||
|
||||
} |
||||
|
||||
// focus point
|
||||
if( check.number(clean['focus.point.lat']) && |
||||
check.number(clean['focus.point.lon']) ){ |
||||
vs.set({ |
||||
'focus:point:lat': clean['focus.point.lat'], |
||||
'focus:point:lon': clean['focus.point.lon'] |
||||
}); |
||||
} |
||||
|
||||
// boundary rect
|
||||
if( check.number(clean['boundary.rect.min_lat']) && |
||||
check.number(clean['boundary.rect.max_lat']) && |
||||
check.number(clean['boundary.rect.min_lon']) && |
||||
check.number(clean['boundary.rect.max_lon']) ){ |
||||
vs.set({ |
||||
'boundary:rect:top': clean['boundary.rect.max_lat'], |
||||
'boundary:rect:right': clean['boundary.rect.max_lon'], |
||||
'boundary:rect:bottom': clean['boundary.rect.min_lat'], |
||||
'boundary:rect:left': clean['boundary.rect.min_lon'] |
||||
}); |
||||
} |
||||
|
||||
// boundary circle
|
||||
if( check.number(clean['boundary.circle.lat']) && |
||||
check.number(clean['boundary.circle.lon']) ){ |
||||
vs.set({ |
||||
'boundary:circle:lat': clean['boundary.circle.lat'], |
||||
'boundary:circle:lon': clean['boundary.circle.lon'] |
||||
}); |
||||
|
||||
if( check.number(clean['boundary.circle.radius']) ){ |
||||
vs.set({ |
||||
'boundary:circle:radius': Math.round( clean['boundary.circle.radius'] ) + 'km' |
||||
}); |
||||
} |
||||
} |
||||
|
||||
// boundary country
|
||||
if( check.string(clean['boundary.country']) ){ |
||||
vs.set({ |
||||
'boundary:country': clean['boundary.country'] |
||||
}); |
||||
} |
||||
|
||||
// format the log parts into a single coherent string
|
||||
logger.info(logParts.map(part => `[${part}]`).join(' ')); |
||||
|
||||
return { |
||||
type: 'fallback', |
||||
body: addressUsingIdsQuery.render(vs) |
||||
}; |
||||
|
||||
} |
||||
|
||||
module.exports = generateQuery; |
@ -0,0 +1,23 @@
|
||||
var _ = require('lodash'); |
||||
|
||||
function _sanitize(raw, clean){ |
||||
const messages = {errors: [], warnings: []}; |
||||
|
||||
if(!_.isUndefined(raw.debug) ){ |
||||
clean.enableDebug = (typeof raw.debug === 'string') ? isTruthy(raw.debug.toLowerCase()) : isTruthy( raw.debug ); |
||||
} |
||||
return messages; |
||||
} |
||||
|
||||
function _expected() { |
||||
return [{ name: 'debug' }]; |
||||
} |
||||
|
||||
function isTruthy(val) { |
||||
return _.includes( ['true', '1', 1, true], val ); |
||||
} |
||||
|
||||
module.exports = () => ({ |
||||
sanitize: _sanitize, |
||||
expected: _expected |
||||
}); |
@ -1,33 +1,31 @@
|
||||
var check = require('check-types'), |
||||
text_analyzer = require('pelias-text-analyzer'); |
||||
const check = require('check-types'); |
||||
const _ = require('lodash'); |
||||
|
||||
// validate texts, convert types and apply defaults
|
||||
function sanitize( raw, clean ){ |
||||
function _sanitize( raw, clean ){ |
||||
|
||||
// error & warning messages
|
||||
var messages = { errors: [], warnings: [] }; |
||||
const messages = { errors: [], warnings: [] }; |
||||
|
||||
// invalid input 'text'
|
||||
// must call `!check.nonEmptyString` since `check.emptyString` returns
|
||||
// `false` for `undefined` and `null`
|
||||
if( !check.nonEmptyString( raw.text ) ){ |
||||
messages.errors.push('invalid param \'text\': text length, must be >0'); |
||||
} |
||||
|
||||
// valid input 'text'
|
||||
else { |
||||
// valid text
|
||||
} else { |
||||
clean.text = raw.text; |
||||
|
||||
// parse text with query parser
|
||||
var parsed_text = text_analyzer.parse(clean.text); |
||||
if (check.assigned(parsed_text)) { |
||||
clean.parsed_text = parsed_text; |
||||
} |
||||
} |
||||
|
||||
return messages; |
||||
} |
||||
|
||||
function _expected(){ |
||||
return [{ name: 'text' }]; |
||||
} |
||||
// export function
|
||||
module.exports = sanitize; |
||||
module.exports = () => ({ |
||||
sanitize: _sanitize, |
||||
expected: _expected |
||||
}); |
||||
|
@ -0,0 +1,31 @@
|
||||
const sanitizeAll = require('../sanitizer/sanitizeAll'), |
||||
sanitizers = { |
||||
debug: require('../sanitizer/_debug')(), |
||||
text: require('../sanitizer/_text_addressit')() |
||||
}; |
||||
|
||||
const logger = require('pelias-logger').get('api'); |
||||
const logging = require( '../helper/logging' ); |
||||
|
||||
// middleware
|
||||
module.exports = (should_execute) => { |
||||
return function(req, res, next) { |
||||
// if res.data already has results then don't call the _text_autocomplete sanitizer
|
||||
// this has been put into place for when the libpostal integration way of querying
|
||||
// ES doesn't return anything and we want to fallback to the old logic
|
||||
if (!should_execute(req, res)) { |
||||
return next(); |
||||
} |
||||
|
||||
// log the query that caused a fallback since libpostal+new-queries didn't return anything
|
||||
if (req.path === '/v1/search') { |
||||
const queryText = logging.isDNT(req) ? '[text removed]' : req.clean.text; |
||||
logger.info(`fallback queryText: ${queryText}`); |
||||
} |
||||
|
||||
sanitizeAll.sanitize(req, sanitizers); |
||||
next(); |
||||
|
||||
}; |
||||
|
||||
}; |
@ -1,21 +1,25 @@
|
||||
var _ = require('lodash'); |
||||
var sanitizeAll = require('../sanitizer/sanitizeAll'); |
||||
var reverseSanitizers = require('./reverse').sanitizer_list; |
||||
var type_mapping = require('../helper/type_mapping'); |
||||
|
||||
// add categories to the sanitizer list
|
||||
var sanitizers = _.merge({}, reverseSanitizers, { |
||||
categories: require('../sanitizer/_categories') |
||||
}); |
||||
|
||||
var sanitize = function(req, cb) { sanitizeAll(req, sanitizers, cb); }; |
||||
|
||||
// export sanitize for testing
|
||||
module.exports.sanitize = sanitize; |
||||
module.exports.sanitizer_list = sanitizers; |
||||
var sanitizers = { |
||||
singleScalarParameters: require('../sanitizer/_single_scalar_parameters')(), |
||||
debug: require('../sanitizer/_debug')(), |
||||
quattroshapes_deprecation: require('../sanitizer/_deprecate_quattroshapes')(), |
||||
layers: require('../sanitizer/_targets')('layers', type_mapping.layer_mapping), |
||||
sources: require('../sanitizer/_targets')('sources', type_mapping.source_mapping), |
||||
// depends on the layers and sources sanitizers, must be run after them
|
||||
sources_and_layers: require('../sanitizer/_sources_and_layers')(), |
||||
geonames_deprecation: require('../sanitizer/_geonames_deprecation')(), |
||||
size: require('../sanitizer/_size')(/* use defaults*/), |
||||
private: require('../sanitizer/_flag_bool')('private', false), |
||||
geo_reverse: require('../sanitizer/_geo_reverse')(), |
||||
boundary_country: require('../sanitizer/_boundary_country')(), |
||||
categories: require('../sanitizer/_categories')() |
||||
}; |
||||
|
||||
// middleware
|
||||
module.exports.middleware = function( req, res, next ){ |
||||
sanitize( req, function( err, clean ){ |
||||
next(); |
||||
}); |
||||
sanitizeAll.runAllChecks(req, sanitizers); |
||||
next(); |
||||
}; |
||||
|
@ -1,20 +1,14 @@
|
||||
|
||||
var sanitizeAll = require('../sanitizer/sanitizeAll'), |
||||
sanitizers = { |
||||
singleScalarParameters: require('../sanitizer/_single_scalar_parameters'), |
||||
ids: require('../sanitizer/_ids'), |
||||
singleScalarParameters: require('../sanitizer/_single_scalar_parameters')(), |
||||
debug: require('../sanitizer/_debug')(), |
||||
ids: require('../sanitizer/_ids')(), |
||||
private: require('../sanitizer/_flag_bool')('private', false) |
||||
}; |
||||
|
||||
var sanitize = function(req, cb) { sanitizeAll(req, sanitizers, cb); }; |
||||
|
||||
// export sanitize for testing
|
||||
module.exports.sanitize = sanitize; |
||||
module.exports.sanitizer_list = sanitizers; |
||||
|
||||
// middleware
|
||||
module.exports.middleware = function( req, res, next ){ |
||||
sanitize( req, function( err, clean ){ |
||||
next(); |
||||
}); |
||||
module.exports.middleware = function(req, res, next){ |
||||
sanitizeAll.runAllChecks(req, sanitizers); |
||||
next(); |
||||
}; |
||||
|
@ -1,30 +0,0 @@
|
||||
var sanitizeAll = require('../sanitizer/sanitizeAll'), |
||||
sanitizers = { |
||||
text: require('../sanitizer/_text_addressit') |
||||
}; |
||||
|
||||
var sanitize = function(req, cb) { sanitizeAll(req, sanitizers, cb); }; |
||||
var logger = require('pelias-logger').get('api'); |
||||
var logging = require( '../helper/logging' ); |
||||
var _ = require('lodash'); |
||||
|
||||
// middleware
|
||||
module.exports.middleware = function( req, res, next ){ |
||||
// if res.data already has results then don't call the _text_autocomplete sanitizer
|
||||
// this has been put into place for when the libpostal integration way of querying
|
||||
// ES doesn't return anything and we want to fallback to the old logic
|
||||
if (_.get(res, 'data', []).length > 0) { |
||||
return next(); |
||||
} |
||||
|
||||
// log the query that caused a fallback since libpostal+new-queries didn't return anything
|
||||
if (req.path === '/v1/search') { |
||||
const queryText = logging.isDNT(req) ? '[text removed]' : req.clean.text; |
||||
logger.info(`fallback queryText: ${queryText}`); |
||||
} |
||||
|
||||
sanitize( req, function( err, clean ){ |
||||
next(); |
||||
}); |
||||
|
||||
}; |
@ -0,0 +1,30 @@
|
||||
'use strict'; |
||||
|
||||
const url = require('url'); |
||||
|
||||
const _ = require('lodash'); |
||||
|
||||
const ServiceConfiguration = require('pelias-microservice-wrapper').ServiceConfiguration; |
||||
|
||||
class Language extends ServiceConfiguration { |
||||
constructor(o) { |
||||
super('interpolation', o); |
||||
} |
||||
|
||||
getParameters(req, hit) { |
||||
return { |
||||
number: req.clean.parsed_text.number, |
||||
street: hit.address_parts.street || req.clean.parsed_text.street, |
||||
lat: hit.center_point.lat, |
||||
lon: hit.center_point.lon |
||||
}; |
||||
|
||||
} |
||||
|
||||
getUrl(req) { |
||||
return url.resolve(this.baseUrl, 'search/geojson'); |
||||
} |
||||
|
||||
} |
||||
|
||||
module.exports = Language; |
@ -0,0 +1,34 @@
|
||||
'use strict'; |
||||
|
||||
const url = require('url'); |
||||
|
||||
const _ = require('lodash'); |
||||
|
||||
const ServiceConfiguration = require('pelias-microservice-wrapper').ServiceConfiguration; |
||||
|
||||
class Language extends ServiceConfiguration { |
||||
constructor(o) { |
||||
super('language', o); |
||||
} |
||||
|
||||
getParameters(req, res) { |
||||
// find all the values for all keys with names that end with '_id'
|
||||
const ids = _.get(res, 'data', []).reduce((acc, doc) => { |
||||
Array.prototype.push.apply(acc, _.values(_.pickBy(doc.parent, (v, k) => _.endsWith(k, '_id') ) ) ); |
||||
return acc; |
||||
}, []); |
||||
|
||||
return { |
||||
// arrays will be nested, so flatten first, then uniqify, and finally join elements with comma
|
||||
ids: _.uniq(_.flattenDeep(ids)).join(',') |
||||
}; |
||||
|
||||
} |
||||
|
||||
getUrl(req) { |
||||
return url.resolve(this.baseUrl, 'parser/findbyid'); |
||||
} |
||||
|
||||
} |
||||
|
||||
module.exports = Language; |
@ -1,118 +0,0 @@
|
||||
|
||||
var logger = require( 'pelias-logger' ).get( 'api' ), |
||||
request = require( 'superagent' ), |
||||
peliasConfig = require( 'pelias-config' ); |
||||
|
||||
/** |
||||
|
||||
street address interpolation service client |
||||
|
||||
this file provides several different 'transports' which can be used to access the interpolation |
||||
service, either directly from disk or via a network connnection. |
||||
|
||||
the exported method for this module checks pelias-config for a configuration block such as: |
||||
|
||||
"interpolation": { |
||||
"client": { |
||||
"adapter": "http", |
||||
"host": "http://localhost:4444" |
||||
} |
||||
} |
||||
|
||||
for more info on running the service see: https://github.com/pelias/interpolation
|
||||
|
||||
**/ |
||||
|
||||
/** |
||||
NullTransport |
||||
|
||||
disables the service completely |
||||
**/ |
||||
function NullTransport(){} |
||||
NullTransport.prototype.query = function( coord, number, street, cb ){ |
||||
cb(); // no-op
|
||||
}; |
||||
|
||||
/** |
||||
RequireTransport |
||||
|
||||
allows the api to be used by simply requiring the module |
||||
**/ |
||||
function RequireTransport( addressDbPath, streetDbPath ){ |
||||
try { |
||||
var lib = require('pelias-interpolation'); // lazy load dependency
|
||||
this.query = lib.api.search( addressDbPath, streetDbPath ); |
||||
} catch( e ){ |
||||
logger.error( 'RequireTransport: failed to connect to interpolation service' ); |
||||
} |
||||
} |
||||
RequireTransport.prototype.query = function( coord, number, street, cb ){ |
||||
throw new Error( 'interpolation: transport not connected' ); |
||||
}; |
||||
|
||||
/** |
||||
HttpTransport |
||||
|
||||
allows the api to be used via a remote web service |
||||
**/ |
||||
function HttpTransport( host, settings ){ |
||||
this.query = function( coord, number, street, cb ){ |
||||
request |
||||
.get( host + '/search/geojson' ) |
||||
.set( 'Accept', 'application/json' ) |
||||
.query({ lat: coord.lat, lon: coord.lon, number: number, street: street }) |
||||
.timeout( settings && settings.timeout || 1000 ) |
||||
.end( function( err, res ){ |
||||
if( err || !res ){ return cb( err ); } |
||||
if( 200 !== res.status ){ return cb( 'non 200 status' ); } |
||||
return cb( null, res.body ); |
||||
}); |
||||
}; |
||||
} |
||||
HttpTransport.prototype.query = function( coord, number, street, cb ){ |
||||
throw new Error( 'interpolation: transport not connected' ); |
||||
}; |
||||
|
||||
/** |
||||
Setup |
||||
|
||||
allows instantiation of transport depending on configuration and preference |
||||
**/ |
||||
module.exports.search = function setup(){ |
||||
|
||||
// user config
|
||||
var config = peliasConfig.generate(); |
||||
|
||||
// ensure config variables set correctly
|
||||
if( !config.hasOwnProperty('interpolation') || !config.interpolation.hasOwnProperty('client') ){ |
||||
logger.warn( 'interpolation: configuration not found' ); |
||||
} |
||||
|
||||
// valid configuration found
|
||||
else { |
||||
|
||||
// get adapter settings from config
|
||||
var settings = config.interpolation.client; |
||||
|
||||
// http adapter
|
||||
if( 'http' === settings.adapter && settings.hasOwnProperty('host') ){ |
||||
logger.info( 'interpolation: using http transport:', settings.host ); |
||||
if( settings.hasOwnProperty('timeout') ){ |
||||
return new HttpTransport( settings.host, { timeout: parseInt( settings.timeout, 10 ) } ); |
||||
} |
||||
return new HttpTransport( settings.host ); |
||||
} |
||||
|
||||
// require adapter
|
||||
else if( 'require' === settings.adapter ){ |
||||
if( settings.hasOwnProperty('streetdb') && settings.hasOwnProperty('addressdb') ){ |
||||
logger.info( 'interpolation: using require transport' ); |
||||
return new RequireTransport( settings.addressdb, settings.streetdb ); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// default adapter
|
||||
logger.info( 'interpolation: using null transport' ); |
||||
return new NullTransport(); |
||||
}; |
@ -1,93 +0,0 @@
|
||||
|
||||
var logger = require( 'pelias-logger' ).get( 'api' ), |
||||
request = require( 'superagent' ), |
||||
peliasConfig = require( 'pelias-config' ); |
||||
|
||||
/** |
||||
|
||||
language subsitution service client |
||||
|
||||
this file provides a 'transport' which can be used to access the language |
||||
service via a network connnection. |
||||
|
||||
the exported method for this module checks pelias-config for a configuration block such as: |
||||
|
||||
"language": { |
||||
"client": { |
||||
"adapter": "http", |
||||
"host": "http://localhost:6100" |
||||
} |
||||
} |
||||
|
||||
for more info on running the service see: https://github.com/pelias/placeholder
|
||||
|
||||
**/ |
||||
|
||||
/** |
||||
NullTransport |
||||
|
||||
disables the service completely |
||||
**/ |
||||
function NullTransport(){} |
||||
NullTransport.prototype.query = function( ids, cb ){ |
||||
cb(); // no-op
|
||||
}; |
||||
|
||||
/** |
||||
HttpTransport |
||||
|
||||
allows the api to be used via a remote web service |
||||
**/ |
||||
function HttpTransport( host, settings ){ |
||||
this.query = function( ids, cb ){ |
||||
request |
||||
.get( host + '/parser/findbyid' ) |
||||
.set( 'Accept', 'application/json' ) |
||||
.query({ ids: Array.isArray( ids ) ? ids.join(',') : '' }) |
||||
.timeout( settings && settings.timeout || 1000 ) |
||||
.end( function( err, res ){ |
||||
if( err || !res ){ return cb( err ); } |
||||
if( 200 !== res.status ){ return cb( 'non 200 status' ); } |
||||
return cb( null, res.body ); |
||||
}); |
||||
}; |
||||
} |
||||
HttpTransport.prototype.query = function( coord, number, street, cb ){ |
||||
throw new Error( 'language: transport not connected' ); |
||||
}; |
||||
|
||||
/** |
||||
Setup |
||||
|
||||
allows instantiation of transport depending on configuration and preference |
||||
**/ |
||||
module.exports.findById = function setup(){ |
||||
|
||||
// user config
|
||||
var config = peliasConfig.generate(); |
||||
|
||||
// ensure config variables set correctly
|
||||
if( !config.hasOwnProperty('language') || !config.language.hasOwnProperty('client') ){ |
||||
logger.warn( 'language: configuration not found' ); |
||||
} |
||||
|
||||
// valid configuration found
|
||||
else { |
||||
|
||||
// get adapter settings from config
|
||||
var settings = config.language.client; |
||||
|
||||
// http adapter
|
||||
if( 'http' === settings.adapter && settings.hasOwnProperty('host') ){ |
||||
logger.info( 'language: using http transport:', settings.host ); |
||||
if( settings.hasOwnProperty('timeout') ){ |
||||
return new HttpTransport( settings.host, { timeout: parseInt( settings.timeout, 10 ) } ); |
||||
} |
||||
return new HttpTransport( settings.host ); |
||||
} |
||||
} |
||||
|
||||
// default adapter
|
||||
logger.info( 'language: using null transport' ); |
||||
return new NullTransport(); |
||||
}; |
@ -0,0 +1,290 @@
|
||||
'use strict'; |
||||
|
||||
const proxyquire = require('proxyquire').noCallThru(); |
||||
const _ = require('lodash'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', t => { |
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => undefined |
||||
} |
||||
}); |
||||
|
||||
t.equal(typeof controller, 'function', 'libpostal is a function'); |
||||
t.equal(typeof controller(), 'function', 'libpostal returns a controller'); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.should_execute = (test, common) => { |
||||
test('should_execute returning false should not call text-analyzer', t => { |
||||
const should_execute = (req, res) => { |
||||
// req and res should be passed to should_execute
|
||||
t.deepEquals(req, { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}); |
||||
t.deepEquals(res, { b: 2 }); |
||||
return false; |
||||
}; |
||||
|
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => { |
||||
t.fail('parse should not have been called'); |
||||
} |
||||
} |
||||
})(should_execute); |
||||
|
||||
const req = { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}; |
||||
const res = { b: 2 }; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}, 'req should not have been modified'); |
||||
t.deepEquals(res, { b: 2 }); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('should_execute returning false should not call text-analyzer', t => { |
||||
t.plan(5); |
||||
|
||||
const should_execute = (req, res) => { |
||||
// req and res should be passed to should_execute
|
||||
t.deepEquals(req, { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}); |
||||
t.deepEquals(res, { b: 2 }); |
||||
return true; |
||||
}; |
||||
|
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: (query) => { |
||||
t.equals(query, 'original query'); |
||||
return undefined; |
||||
} |
||||
} |
||||
})(should_execute); |
||||
|
||||
const req = { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}; |
||||
const res = { b: 2 }; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
text: 'original query' |
||||
} |
||||
}, 'req should not have been modified'); |
||||
t.deepEquals(res, { b: 2 }); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.parse_is_called = (test, common) => { |
||||
test('parse returning undefined should not overwrite clean.parsed_text', t => { |
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => undefined |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('parse returning something should overwrite clean.parsed_text', t => { |
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => 'replacement parsed_text' |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: 'replacement parsed_text' |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.iso2_conversion = (test, common) => { |
||||
test('no country in parse response should not leave country unset', t => { |
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => ({ |
||||
locality: 'this is the locality' |
||||
}) |
||||
}, |
||||
'iso3166-1': { |
||||
is2: () => t.fail('should not have been called'), |
||||
to3: () => t.fail('should not have been called') |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: { |
||||
locality: 'this is the locality' |
||||
} |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('unknown country should not be converted', t => { |
||||
t.plan(3); |
||||
|
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => ({ |
||||
country: 'unknown country code' |
||||
}) |
||||
}, |
||||
'iso3166-1': { |
||||
is2: country => { |
||||
t.equals(country, 'UNKNOWN COUNTRY CODE'); |
||||
return false; |
||||
}, |
||||
to3: () => t.fail('should not have been called') |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: { |
||||
country: 'unknown country code' |
||||
} |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('ISO2 country should be converted to ISO3', t => { |
||||
t.plan(4); |
||||
|
||||
const controller = proxyquire('../../../controller/libpostal', { |
||||
'pelias-text-analyzer': { |
||||
parse: () => ({ |
||||
country: 'ISO2 COUNTRY CODE' |
||||
}) |
||||
}, |
||||
'iso3166-1': { |
||||
is2: country => { |
||||
t.equals(country, 'ISO2 COUNTRY CODE'); |
||||
return true; |
||||
}, |
||||
to3: country => { |
||||
t.equals(country, 'ISO2 COUNTRY CODE'); |
||||
return 'ISO3 COUNTRY CODE'; |
||||
} |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
clean: { |
||||
parsed_text: 'original parsed_text' |
||||
} |
||||
}; |
||||
const res = 'this is the response'; |
||||
|
||||
controller(req, res, () => { |
||||
t.deepEquals(req, { |
||||
clean: { |
||||
parsed_text: { |
||||
country: 'ISO3 COUNTRY CODE' |
||||
} |
||||
} |
||||
}); |
||||
t.deepEquals(res, 'this is the response'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
|
||||
function test(name, testFunction) { |
||||
return tape(`GET /libpostal ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,163 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const has_parsed_text_properties = require('../../../../controller/predicates/has_parsed_text_properties'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.ok(_.isFunction(has_parsed_text_properties.all), 'has_parsed_text_properties.all is a function'); |
||||
t.ok(_.isFunction(has_parsed_text_properties.any), 'has_parsed_text_properties.any is a function'); |
||||
t.end(); |
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('all: defined request.clean.parsed_text.property should return true', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: { |
||||
property: 'value' |
||||
} |
||||
} |
||||
}; |
||||
|
||||
t.ok(has_parsed_text_properties.all('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('all: clean.parsed_text with any property should return true ', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: { |
||||
property1: 'value1', |
||||
property2: 'value2' |
||||
} |
||||
} |
||||
}; |
||||
|
||||
t.ok(has_parsed_text_properties.all('property2', 'property1')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('any: defined request.clean.parsed_text.property should return true', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: { |
||||
property: 'value' |
||||
} |
||||
} |
||||
}; |
||||
|
||||
t.ok(has_parsed_text_properties.any('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('any: clean.parsed_text with any property should return true ', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: { |
||||
property2: 'value2', |
||||
property3: 'value3' |
||||
} |
||||
} |
||||
}; |
||||
|
||||
t.ok(has_parsed_text_properties.any('property1', 'property3')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('all: undefined request should return false', (t) => { |
||||
t.notOk(has_parsed_text_properties.all('property')()); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('all: undefined request.clean should return false', (t) => { |
||||
const req = {}; |
||||
|
||||
t.notOk(has_parsed_text_properties.all('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('all: undefined request.clean.parsed_text should return false', (t) => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(has_parsed_text_properties.all('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('all: request.clean.parsed_text with none of the supplied properties should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: { |
||||
property1: 'value1' |
||||
} |
||||
} |
||||
}; |
||||
|
||||
t.notOk(has_parsed_text_properties.all('property1', 'property2')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('any: undefined request should return false', (t) => { |
||||
t.notOk(has_parsed_text_properties.any('property')()); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('any: undefined request.clean should return false', (t) => { |
||||
const req = {}; |
||||
|
||||
t.notOk(has_parsed_text_properties.any('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('any: undefined request.clean.parsed_text should return false', (t) => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(has_parsed_text_properties.any('property')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('any: request.clean.parsed_text with none of the supplied properties should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
parsed_text: {} |
||||
} |
||||
}; |
||||
|
||||
t.notOk(has_parsed_text_properties.any('property1', 'property2')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /has_parsed_text_properties ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,63 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const has_request_parameter = require('../../../../controller/predicates/has_request_parameter'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', t => { |
||||
t.equal(typeof has_request_parameter, 'function', 'has_request_parameter is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('request with specified parameter should return true', t => { |
||||
[[], {}, 'string value', 17].forEach(val => { |
||||
const req = { |
||||
clean: { |
||||
'parameter name': val |
||||
} |
||||
}; |
||||
|
||||
t.ok(has_request_parameter('parameter name')(req)); |
||||
|
||||
}); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('request with undefined clean should return false', t => { |
||||
const req = {}; |
||||
|
||||
t.notOk(has_request_parameter('parameter name')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean without specified parameter should return false', t => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(has_request_parameter('parameter name')(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /has_request_parameter ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,73 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const is_addressit_parse = require('../../../../controller/predicates/is_addressit_parse'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', t => { |
||||
t.ok(_.isFunction(is_addressit_parse), 'is_addressit_parse is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('request.clean.parser=addressit should return true', t => { |
||||
const req = { |
||||
clean: { |
||||
parser: 'addressit' |
||||
} |
||||
}; |
||||
|
||||
t.ok(is_addressit_parse(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('undefined request should return false', t => { |
||||
t.notOk(is_addressit_parse(undefined)); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('undefined request.clean should return false', t => { |
||||
const req = {}; |
||||
|
||||
t.notOk(is_addressit_parse(req)); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('undefined request.clean.parser should return false', t => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(is_addressit_parse(req)); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('non-\'addressit\' request.clean.parser should return false', t => { |
||||
const req = { |
||||
clean: { |
||||
parser: 'not addressit' |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_addressit_parse(req)); |
||||
t.end(); |
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /is_addressit_parse ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,111 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const is_only_non_admin_layers = require('../../../../controller/predicates/is_only_non_admin_layers'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', t => { |
||||
t.equal(typeof is_only_non_admin_layers, 'function', 'is_only_non_admin_layers is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('request with specified parameter should return true', t => { |
||||
[ |
||||
['venue', 'address', 'street'], |
||||
['venue', 'address'], |
||||
['venue', 'street'], |
||||
['address', 'street'], |
||||
['venue'], |
||||
['address'], |
||||
['street'] |
||||
].forEach(layers => { |
||||
const req = { |
||||
clean: { |
||||
layers: layers |
||||
} |
||||
}; |
||||
|
||||
t.ok(is_only_non_admin_layers(req)); |
||||
|
||||
}); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('request with undefined clean should return false', t => { |
||||
const req = {}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean without layers parameter should return false', t => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request with empty layers should return false', t => { |
||||
const req = { |
||||
clean: { |
||||
layers: [] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean.layers without venue, address, or street should return false', t => { |
||||
const req = { |
||||
clean: { |
||||
layers: ['locality'] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('request.clean.layers with other layers besides venue, address, or street should return false', t => { |
||||
['venue', 'address', 'street'].forEach(non_admin_layer => { |
||||
const req = { |
||||
clean: { |
||||
layers: ['locality', non_admin_layer] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_only_non_admin_layers(req)); |
||||
|
||||
}); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /is_only_non_admin_layers ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,107 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const is_request_sources_only_whosonfirst = require('../../../../controller/predicates/is_request_sources_only_whosonfirst'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.ok(_.isFunction(is_request_sources_only_whosonfirst), 'is_request_sources_only_whosonfirst is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('sources only \'whosonfirst\' should return true', (t) => { |
||||
const req = { |
||||
clean: { |
||||
sources: [ |
||||
'whosonfirst' |
||||
] |
||||
} |
||||
}; |
||||
|
||||
t.ok(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('undefined req should return false', (t) => { |
||||
t.notOk(is_request_sources_only_whosonfirst(undefined)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('undefined req.clean should return false', (t) => { |
||||
const req = {}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('undefined req.clean.sources should return false', (t) => { |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('empty req.clean.sources should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
sources: [] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('sources not \'whosonfirst\' should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
sources: [ |
||||
'not whosonfirst' |
||||
] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('sources other than \'whosonfirst\' should return false', (t) => { |
||||
const req = { |
||||
clean: { |
||||
sources: [ |
||||
'whosonfirst', 'not whosonfirst' |
||||
] |
||||
} |
||||
}; |
||||
|
||||
t.notOk(is_request_sources_only_whosonfirst(req)); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /is_request_sources_only_whosonfirst ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -1,42 +0,0 @@
|
||||
'use strict'; |
||||
|
||||
const _ = require('lodash'); |
||||
const is_service_enabled = require('../../../../controller/predicates/is_service_enabled'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.equal(typeof is_service_enabled, 'function', 'is_service_enabled is a function'); |
||||
t.equal(typeof is_service_enabled(), 'function', 'is_service_enabled() is a function'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.true_conditions = (test, common) => { |
||||
test('string uri should return true', (t) => { |
||||
t.ok(is_service_enabled('pip uri')()); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.false_conditions = (test, common) => { |
||||
test('undefined uri should return false', (t) => { |
||||
t.notOk(is_service_enabled()()); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /is_service_enabled ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,570 @@
|
||||
'use strict'; |
||||
|
||||
const setup = require('../../../controller/search_with_ids'); |
||||
const proxyquire = require('proxyquire').noCallThru(); |
||||
const mocklogger = require('pelias-mock-logger'); |
||||
const _ = require('lodash'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.ok(_.isFunction(setup), 'setup is a function'); |
||||
t.ok(_.isFunction(setup()), 'setup returns a controller'); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.success = (test, common) => { |
||||
test('successful request to search service should replace data and meta', (t) => { |
||||
t.plan(5); |
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
t.equal(esclient, 'this is the esclient'); |
||||
t.deepEqual(cmd, { |
||||
index: 'indexName value', |
||||
searchType: 'dfs_query_then_fetch', |
||||
body: 'this is the query body' |
||||
}); |
||||
|
||||
const docs = [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
]; |
||||
const meta = { key: 'replacement meta value' }; |
||||
|
||||
callback(undefined, docs, meta); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ name: 'original result #1'}, |
||||
{ name: 'original result #2'} |
||||
], |
||||
meta: { |
||||
key: 'original meta value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
], |
||||
meta: { |
||||
key: 'replacement meta value', |
||||
query_type: 'this is the query type' |
||||
} |
||||
}); |
||||
|
||||
t.ok(logger.isInfoMessage('[controller:search] [queryType:this is the query type] [es_result_count:2]')); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('undefined meta should set empty object into res', (t) => { |
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
const docs = [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
]; |
||||
|
||||
callback(undefined, docs, undefined); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ name: 'original result #1'}, |
||||
{ name: 'original result #2'} |
||||
], |
||||
meta: { |
||||
key: 'original meta value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
], |
||||
meta: { |
||||
query_type: 'this is the query type' |
||||
} |
||||
}); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('undefined docs in response should not overwrite existing results', (t) => { |
||||
t.plan(1+3); // ensures that search service was called, then req+res+logger tests
|
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
t.pass('search service was called'); |
||||
|
||||
const meta = { key: 'new value' }; |
||||
|
||||
callback(undefined, undefined, meta); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ id: 1 }, |
||||
{ id: 2 } |
||||
], |
||||
meta: { |
||||
key: 'value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ id: 1 }, |
||||
{ id: 2 } |
||||
], |
||||
meta: { key: 'value' } |
||||
}); |
||||
|
||||
t.notOk(logger.isInfoMessage(/[controller:search] [queryType:this is the query type] [es_result_count:0]/)); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('empty docs in response should not overwrite existing results', (t) => { |
||||
t.plan(4); |
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
t.pass('search service was called'); |
||||
|
||||
const meta = { key: 'value' }; |
||||
|
||||
callback(undefined, [], meta); |
||||
} |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ name: 'pre-existing result #1' }, |
||||
{ name: 'pre-existing result #2' } |
||||
], |
||||
meta: { |
||||
key: 'value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ name: 'pre-existing result #1' }, |
||||
{ name: 'pre-existing result #2' } |
||||
], |
||||
meta: { key: 'value' } |
||||
}); |
||||
|
||||
t.notOk(logger.isInfoMessage(/[controller:search] [queryType:this is the query type] [es_result_count:0]/)); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('successful request on retry to search service should log info message', (t) => { |
||||
t.plan(3+2+2); // 3 search service calls, 2 log messages, 1 req, 1 res
|
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
type: 'this is the query type' |
||||
}); |
||||
|
||||
let searchServiceCallCount = 0; |
||||
|
||||
const timeoutError = { |
||||
status: 408, |
||||
displayName: 'RequestTimeout', |
||||
message: 'Request Timeout after 17ms' |
||||
}; |
||||
|
||||
// a controller that validates the esclient and cmd that was passed to the search service
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
t.pass('search service was called'); |
||||
|
||||
if (searchServiceCallCount < 2) { |
||||
// note that the searchService got called
|
||||
searchServiceCallCount++; |
||||
callback(timeoutError); |
||||
} else { |
||||
const docs = [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
]; |
||||
const meta = { key: 'replacement meta value' }; |
||||
|
||||
callback(undefined, docs, meta); |
||||
} |
||||
|
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = { |
||||
data: [ |
||||
{ name: 'original result #1'}, |
||||
{ name: 'original result #2'} |
||||
], |
||||
meta: { |
||||
key: 'original meta value' |
||||
} |
||||
}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [], |
||||
warnings: [] |
||||
}); |
||||
t.deepEquals(res, { |
||||
data: [ |
||||
{ name: 'replacement result #1'}, |
||||
{ name: 'replacement result #2'} |
||||
], |
||||
meta: { |
||||
key: 'replacement meta value', |
||||
query_type: 'this is the query type' |
||||
} |
||||
}); |
||||
|
||||
t.ok(logger.isInfoMessage('[controller:search] [queryType:this is the query type] [es_result_count:2]')); |
||||
t.ok(logger.isInfoMessage('succeeded on retry 2')); |
||||
|
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.service_errors = (test, common) => { |
||||
test('default # of request timeout retries should be 3', (t) => { |
||||
// test for 1 initial search service, 3 retries, 1 log messages, 1 req, and 1 res
|
||||
t.plan(1 + 3 + 1 + 2); |
||||
|
||||
const logger = mocklogger(); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ |
||||
body: 'this is the query body', |
||||
}); |
||||
|
||||
const timeoutError = { |
||||
status: 408, |
||||
displayName: 'RequestTimeout', |
||||
message: 'Request Timeout after 17ms' |
||||
}; |
||||
|
||||
// a controller that validates that the search service was called
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
// note that the searchService got called
|
||||
t.pass('search service was called'); |
||||
|
||||
callback(timeoutError); |
||||
}, |
||||
'pelias-logger': logger |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = {}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(logger.getInfoMessages(), [ |
||||
'[req]', |
||||
'request timed out on attempt 1, retrying', |
||||
'request timed out on attempt 2, retrying', |
||||
'request timed out on attempt 3, retrying' |
||||
]); |
||||
|
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [timeoutError.message], |
||||
warnings: [] |
||||
}); |
||||
t.deepEqual(res, {}); |
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('explicit apiConfig.requestRetries should retry that many times', (t) => { |
||||
t.plan(1 + 17); // test for initial search service call and 17 retries
|
||||
|
||||
const config = { |
||||
indexName: 'indexName value', |
||||
requestRetries: 17 |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ }); |
||||
|
||||
const timeoutError = { |
||||
status: 408, |
||||
displayName: 'RequestTimeout', |
||||
message: 'Request Timeout after 17ms' |
||||
}; |
||||
|
||||
// a controller that validates that the search service was called
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
// note that the searchService got called
|
||||
t.pass('search service was called'); |
||||
|
||||
callback(timeoutError); |
||||
} |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = {}; |
||||
|
||||
controller(req, res, () => t.end() ); |
||||
|
||||
}); |
||||
|
||||
test('only status code 408 should be considered a retryable request', (t) => { |
||||
t.plan(2); |
||||
|
||||
const config = { |
||||
indexName: 'indexName value', |
||||
requestRetries: 17 |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ }); |
||||
|
||||
const nonTimeoutError = { |
||||
status: 500, |
||||
displayName: 'InternalServerError', |
||||
message: 'an internal server error occurred' |
||||
}; |
||||
|
||||
// a controller that validates that the search service was called
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
// note that the searchService got called
|
||||
t.pass('search service was called'); |
||||
|
||||
callback(nonTimeoutError); |
||||
} |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = {}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: [nonTimeoutError.message], |
||||
warnings: [] |
||||
}); |
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
test('string error should not retry and be logged as-is', (t) => { |
||||
t.plan(2); // service call + error is in req.errors
|
||||
|
||||
const config = { |
||||
indexName: 'indexName value' |
||||
}; |
||||
const esclient = 'this is the esclient'; |
||||
const query = () => ({ }); |
||||
|
||||
// a controller that validates that the search service was called
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': (esclient, cmd, callback) => { |
||||
// note that the searchService got called
|
||||
t.pass('search service was called'); |
||||
|
||||
callback('this is an error string'); |
||||
} |
||||
})(config, esclient, query, () => true ); |
||||
|
||||
const req = { clean: { }, errors: [], warnings: [] }; |
||||
const res = {}; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(req, { |
||||
clean: {}, |
||||
errors: ['this is an error string'], |
||||
warnings: [] |
||||
}); |
||||
t.end(); |
||||
}; |
||||
|
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.should_execute = (test, common) => { |
||||
test('should_execute returning false and empty req.errors should call next', (t) => { |
||||
const esclient = () => t.fail('esclient should not have been called'); |
||||
const query = () => t.fail('query should not have been called'); |
||||
const should_execute = () => false; |
||||
const controller = setup( {}, esclient, query, should_execute ); |
||||
|
||||
const req = { }; |
||||
const res = { }; |
||||
|
||||
const next = () => { |
||||
t.deepEqual(res, { }); |
||||
t.end(); |
||||
}; |
||||
controller(req, res, next); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.undefined_query = (test, common) => { |
||||
test('query returning undefined should not call service', (t) => { |
||||
t.plan(0, 'test will fail if search service actually gets called'); |
||||
|
||||
// a function that returns undefined
|
||||
const query = () => undefined; |
||||
|
||||
const controller = proxyquire('../../../controller/search_with_ids', { |
||||
'../service/search': () => { |
||||
t.fail('search service should not have been called'); |
||||
} |
||||
})(undefined, undefined, query, () => true ); |
||||
|
||||
const next = () => t.end(); |
||||
|
||||
controller({}, {}, next); |
||||
|
||||
}); |
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`GET /search ${name}`, testFunction); |
||||
} |
||||
|
||||
for( const testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,95 @@
|
||||
const Debug = require('../../../helper/debug'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.debug = function(test, common) { |
||||
test('initialize the debugger with a name', (t) => { |
||||
const debugLog = new Debug('debugger'); |
||||
t.deepEquals(debugLog.name, 'debugger'); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('don\'t push debug message if enableDebug is false', (t) => { |
||||
const debugLog = new Debug('debugger'); |
||||
const req = { |
||||
clean: { |
||||
enableDebug: false |
||||
} |
||||
}; |
||||
debugLog.push(req, 'This should not be pushed'); |
||||
t.deepEquals(req.debug, undefined); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('don\'t start timer if enableDebug is false', (t) => { |
||||
const debugLog = new Debug('debugger'); |
||||
const req = { |
||||
clean: { |
||||
enableDebug: false |
||||
} |
||||
}; |
||||
debugLog.beginTimer(req, 'This should not be pushed'); |
||||
t.deepEquals(req.debug, undefined); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('don\'t push debug message if req.clean is empty', (t) => { |
||||
const debugLog = new Debug('debugger'); |
||||
const req = { |
||||
clean: {} |
||||
}; |
||||
debugLog.push('This should not be pushed'); |
||||
t.deepEquals(req.debug, undefined); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('Push messages if enableDebug is true', (t) => { |
||||
const debugLog = new Debug('debugger'); |
||||
const req = { |
||||
clean: { |
||||
enableDebug: true |
||||
} |
||||
}; |
||||
const expected_req = { |
||||
debug: [ |
||||
{ |
||||
debugger: 'This should be pushed' |
||||
}, |
||||
{ |
||||
debugger: 'Timer Began: Timer 1' |
||||
} |
||||
] |
||||
}; |
||||
debugLog.push(req, 'This should be pushed'); |
||||
debugLog.beginTimer(req, 'Timer 1'); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('Timer should return positive number of milliseconds', (t) => { |
||||
const debugLog = new Debug('debugger'); |
||||
const req = { |
||||
clean: { |
||||
enableDebug: true |
||||
} |
||||
}; |
||||
debugLog.beginTimer(req); |
||||
setTimeout(() => { |
||||
debugLog.stopTimer(req); |
||||
t.deepEquals(parseInt(req.debug[1].debugger.slice(15, -3)) > 0, true); |
||||
t.end(); |
||||
}, 2); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = function (tape, common) { |
||||
|
||||
function test(name, testFunction) { |
||||
return tape('[helper] debug: ' + name, testFunction); |
||||
} |
||||
|
||||
for( var testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,27 @@
|
||||
'use strict'; |
||||
|
||||
module.exports = class MockQuery { |
||||
constructor() { |
||||
this._score_functions = []; |
||||
this._filter_functions = []; |
||||
} |
||||
|
||||
render(vs) { |
||||
return { |
||||
vs: vs, |
||||
score_functions: this._score_functions, |
||||
filter_functions: this._filter_functions |
||||
}; |
||||
} |
||||
|
||||
score(view) { |
||||
this._score_functions.push(view); |
||||
return this; |
||||
} |
||||
|
||||
filter(view) { |
||||
this._filter_functions.push(view); |
||||
return this; |
||||
} |
||||
|
||||
}; |
@ -0,0 +1,553 @@
|
||||
const generateQuery = require('../../../query/address_search_using_ids'); |
||||
const _ = require('lodash'); |
||||
const proxyquire = require('proxyquire').noCallThru(); |
||||
const mock_logger = require('pelias-mock-logger'); |
||||
const MockQuery = require('./MockQuery'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.interface = (test, common) => { |
||||
test('valid interface', (t) => { |
||||
t.ok(_.isFunction(generateQuery)); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
// helper for canned views
|
||||
const views = { |
||||
focus_only_function: () => 'focus_only_function', |
||||
boundary_country: 'boundary_country view', |
||||
boundary_circle: 'boundary_circle view', |
||||
boundary_rect: 'boundary_rect view', |
||||
sources: 'sources view' |
||||
}; |
||||
|
||||
module.exports.tests.base_query = (test, common) => { |
||||
test('basic', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
} |
||||
}; |
||||
const res = { |
||||
data: [] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.type, 'fallback'); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('input:housenumber').toString(), 'housenumber value'); |
||||
t.equals(generatedQuery.body.vs.var('input:street').toString(), 'street value'); |
||||
t.notOk(generatedQuery.body.vs.isset('sources')); |
||||
t.equals(generatedQuery.body.vs.var('size').toString(), 20); |
||||
|
||||
t.deepEquals(generatedQuery.body.score_functions, [ |
||||
'focus_only_function' |
||||
]); |
||||
|
||||
t.deepEquals(generatedQuery.body.filter_functions, [ |
||||
'boundary_country view', |
||||
'boundary_circle view', |
||||
'boundary_rect view', |
||||
'sources view' |
||||
]); |
||||
|
||||
t.deepEquals(logger.getInfoMessages(), ['[query:address_search_using_ids] [parser:libpostal]']); |
||||
t.end(); |
||||
|
||||
}); |
||||
}; |
||||
|
||||
module.exports.tests.other_parameters = (test, common) => { |
||||
test('explicit size set', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
querySize: 'querySize value' |
||||
}; |
||||
const res = { |
||||
data: [] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('size').toString(), 'querySize value'); |
||||
t.deepEquals(logger.getInfoMessages(), ['[query:address_search_using_ids] [parser:libpostal] [param:querySize]']); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('explicit sources set', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
sources: ['source 1', 'source 2'] |
||||
}; |
||||
const res = { |
||||
data: [] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.deepEquals(generatedQuery.body.vs.var('sources').toString(), ['source 1', 'source 2']); |
||||
t.deepEquals(logger.getInfoMessages(), ['[query:address_search_using_ids] [parser:libpostal] [param:sources]']); |
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.granularity_bands = (test, common) => { |
||||
test('neighbourhood/borough/locality/localadmin granularity band', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
} |
||||
}; |
||||
const res = { |
||||
data: [ |
||||
{ |
||||
layer: 'neighbourhood', |
||||
source_id: 1 |
||||
}, |
||||
{ |
||||
layer: 'borough', |
||||
source_id: 2 |
||||
}, |
||||
{ |
||||
layer: 'locality', |
||||
source_id: 3 |
||||
}, |
||||
{ |
||||
layer: 'localadmin', |
||||
source_id: 4 |
||||
}, |
||||
{ |
||||
layer: 'county', |
||||
source_id: 5 |
||||
}, |
||||
{ |
||||
layer: 'macrocounty', |
||||
source_id: 6 |
||||
}, |
||||
{ |
||||
layer: 'region', |
||||
source_id: 7 |
||||
}, |
||||
{ |
||||
layer: 'macroregion', |
||||
source_id: 8 |
||||
}, |
||||
{ |
||||
layer: 'dependency', |
||||
source_id: 9 |
||||
}, |
||||
{ |
||||
layer: 'country', |
||||
source_id: 10 |
||||
}, |
||||
{ |
||||
layer: 'neighbourhood', |
||||
source_id: 11 |
||||
}, |
||||
{ |
||||
layer: 'borough', |
||||
source_id: 12 |
||||
}, |
||||
{ |
||||
layer: 'locality', |
||||
source_id: 13 |
||||
}, |
||||
{ |
||||
layer: 'localadmin', |
||||
source_id: 14 |
||||
}, |
||||
{ |
||||
layer: 'county', |
||||
source_id: 15 |
||||
}, |
||||
{ |
||||
layer: 'macrocounty', |
||||
source_id: 16 |
||||
}, |
||||
{ |
||||
layer: 'region', |
||||
source_id: 17 |
||||
}, |
||||
{ |
||||
layer: 'macroregion', |
||||
source_id: 18 |
||||
}, |
||||
{ |
||||
layer: 'dependency', |
||||
source_id: 19 |
||||
}, |
||||
{ |
||||
layer: 'country', |
||||
source_id: 20 |
||||
} |
||||
] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.deepEquals(generatedQuery.body.vs.var('input:layers').$, { |
||||
neighbourhood: [1, 11], |
||||
borough: [2, 12], |
||||
locality: [3, 13], |
||||
localadmin: [4, 14], |
||||
region: [7, 17], |
||||
macroregion: [8, 18], |
||||
dependency: [9, 19], |
||||
country: [10, 20] |
||||
}); |
||||
|
||||
t.end(); |
||||
}); |
||||
|
||||
test('only band members with ids should be passed', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
} |
||||
}; |
||||
const res = { |
||||
data: [ |
||||
{ |
||||
layer: 'neighbourhood', |
||||
source_id: 1 |
||||
} |
||||
] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.deepEquals(generatedQuery.body.vs.var('input:layers').$, { |
||||
neighbourhood: [1], |
||||
borough: [], |
||||
locality: [], |
||||
localadmin: [], |
||||
region: [], |
||||
macroregion: [], |
||||
dependency: [], |
||||
country: [] |
||||
}); |
||||
|
||||
t.end(); |
||||
}); |
||||
|
||||
test('county/macrocounty granularity band', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
} |
||||
}; |
||||
const res = { |
||||
data: [ |
||||
{ |
||||
layer: 'county', |
||||
source_id: 1 |
||||
}, |
||||
{ |
||||
layer: 'macrocounty', |
||||
source_id: 2 |
||||
}, |
||||
{ |
||||
layer: 'county', |
||||
source_id: 4 |
||||
}, |
||||
{ |
||||
layer: 'macrocounty', |
||||
source_id: 5 |
||||
} |
||||
] |
||||
}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.deepEquals(generatedQuery.body.vs.var('input:layers').$, { |
||||
county: [1, 4], |
||||
macrocounty: [2, 5] |
||||
}); |
||||
|
||||
t.end(); |
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.tests.boundary_filters = (test, common) => { |
||||
test('boundary.country available should add to query', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'boundary.country': 'boundary.country value' |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('boundary:country').toString(), 'boundary.country value'); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('focus.point.lat/lon w/both numbers should add to query', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'focus.point.lat': 12.121212, |
||||
'focus.point.lon': 21.212121 |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('focus:point:lat').toString(), 12.121212); |
||||
t.equals(generatedQuery.body.vs.var('focus:point:lon').toString(), 21.212121); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('boundary.rect with all numbers should add to query', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'boundary.rect.min_lat': 12.121212, |
||||
'boundary.rect.max_lat': 13.131313, |
||||
'boundary.rect.min_lon': 21.212121, |
||||
'boundary.rect.max_lon': 31.313131 |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('boundary:rect:top').toString(), 13.131313); |
||||
t.equals(generatedQuery.body.vs.var('boundary:rect:right').toString(), 31.313131); |
||||
t.equals(generatedQuery.body.vs.var('boundary:rect:bottom').toString(), 12.121212); |
||||
t.equals(generatedQuery.body.vs.var('boundary:rect:left').toString(), 21.212121); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('boundary circle without radius should set radius to default', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'boundary.circle.lat': 12.121212, |
||||
'boundary.circle.lon': 21.212121 |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:lat').toString(), 12.121212); |
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:lon').toString(), 21.212121); |
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:radius').toString(), '50km'); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
test('boundary circle with radius set radius to that value rounded', (t) => { |
||||
const logger = mock_logger(); |
||||
|
||||
const clean = { |
||||
parsed_text: { |
||||
number: 'housenumber value', |
||||
street: 'street value' |
||||
}, |
||||
'boundary.circle.lat': 12.121212, |
||||
'boundary.circle.lon': 21.212121, |
||||
'boundary.circle.radius': 17.6 |
||||
}; |
||||
const res = {}; |
||||
|
||||
const generateQuery = proxyquire('../../../query/address_search_using_ids', { |
||||
'pelias-logger': logger, |
||||
'pelias-query': { |
||||
layout: { |
||||
AddressesUsingIdsQuery: MockQuery |
||||
}, |
||||
view: views, |
||||
Vars: require('pelias-query').Vars |
||||
} |
||||
|
||||
}); |
||||
|
||||
const generatedQuery = generateQuery(clean, res); |
||||
|
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:lat').toString(), 12.121212); |
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:lon').toString(), 21.212121); |
||||
t.equals(generatedQuery.body.vs.var('boundary:circle:radius').toString(), '18km'); |
||||
|
||||
t.end(); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = (tape, common) => { |
||||
function test(name, testFunction) { |
||||
return tape(`address_search_using_ids query ${name}`, testFunction); |
||||
} |
||||
|
||||
for( var testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,65 @@
|
||||
const sanitizer = require('../../../sanitizer/_debug')(); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.sanitize_debug = function(test, common) { |
||||
['true', '1', 1, true, 'TRUE', 'TrUe'].forEach((value) => { |
||||
test('debug flag is on', function(t) { |
||||
const raw = { debug: value }; |
||||
const clean = {}; |
||||
const expected_clean = { enableDebug: true }; |
||||
|
||||
const messages = sanitizer.sanitize(raw, clean); |
||||
|
||||
t.deepEquals(clean, expected_clean); |
||||
t.deepEqual(messages.errors, [], 'no error returned'); |
||||
t.deepEqual(messages.warnings, [], 'no warnings returned'); |
||||
t.end(); |
||||
}); |
||||
}); |
||||
|
||||
['false', false, '0', 0, 'value', {}].forEach((value) => { |
||||
test('non-truthy values should set clean.debug to false', function(t) { |
||||
const raw = { debug: value }; |
||||
const clean = {}; |
||||
const expected_clean = { enableDebug: false }; |
||||
|
||||
const messages = sanitizer.sanitize(raw, clean); |
||||
|
||||
t.deepEquals(clean, expected_clean); |
||||
t.deepEqual(messages.errors, [], 'no error returned'); |
||||
t.deepEqual(messages.warnings, [], 'no warnings returned'); |
||||
t.end(); |
||||
}); |
||||
}); |
||||
|
||||
test('undefined default flag should not set clean.debug', function(t) { |
||||
const raw = {}; |
||||
const clean = {}; |
||||
const expected_clean = {}; |
||||
|
||||
const messages = sanitizer.sanitize(raw, clean); |
||||
|
||||
t.deepEquals(clean, expected_clean); |
||||
t.deepEqual(messages.errors, [], 'no error returned'); |
||||
t.deepEqual(messages.warnings, [], 'no warnings returned'); |
||||
t.end(); |
||||
}); |
||||
|
||||
test('return an array of expected parameters in object form for validation', (t) => { |
||||
const expected = [{ name: 'debug' }]; |
||||
const validParameters = sanitizer.expected(); |
||||
t.deepEquals(validParameters, expected); |
||||
t.end(); |
||||
}); |
||||
}; |
||||
|
||||
module.exports.all = function (tape, common) { |
||||
function test(name, testFunction) { |
||||
return tape('SANITIZE _debug ' + name, testFunction); |
||||
} |
||||
|
||||
for( var testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
@ -0,0 +1,179 @@
|
||||
const proxyquire = require('proxyquire').noCallThru(); |
||||
const mock_logger = require('pelias-mock-logger'); |
||||
|
||||
module.exports.tests = {}; |
||||
|
||||
module.exports.tests.sanitize = (test, common) => { |
||||
test('verify that no sanitizers were called when should_execute returns false', (t) => { |
||||
t.plan(1); |
||||
|
||||
const logger = mock_logger(); |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
const defer_to_addressit = proxyquire('../../../sanitizer/defer_to_addressit', { |
||||
'../sanitizer/_text_addressit': function () { |
||||
return { |
||||
sanitize: () => { |
||||
t.fail('_text_addressit should not have been called'); |
||||
} |
||||
}; |
||||
}, |
||||
'pelias-logger': logger, |
||||
'../sanitizer/_debug': () => { |
||||
return { |
||||
sanitize: () => { |
||||
t.fail('_debug should not have been called'); |
||||
} |
||||
}; |
||||
} |
||||
})(() => false); |
||||
|
||||
defer_to_addressit({}, {}, () => { |
||||
t.equals(logger.getInfoMessages().length, 0); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('verify that _text_addressit sanitizer was called when should_execute returns true', (t) => { |
||||
t.plan(3); |
||||
|
||||
const logger = mock_logger(); |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
const defer_to_addressit = proxyquire('../../../sanitizer/defer_to_addressit', { |
||||
'../sanitizer/_text_addressit': function () { |
||||
return { |
||||
sanitize: () => { |
||||
t.pass('_text_addressit should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}; |
||||
}, |
||||
'pelias-logger': logger, |
||||
'../helper/logging': { |
||||
isDNT: () => false |
||||
}, |
||||
'../sanitizer/_debug': () => { |
||||
return { |
||||
sanitize: () => { |
||||
t.pass('_debug should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}; |
||||
}, |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
path: '/v1/search', |
||||
clean: { |
||||
text: 'this is the query text' |
||||
} |
||||
}; |
||||
|
||||
defer_to_addressit(req, {}, () => { |
||||
t.deepEquals(logger.getInfoMessages(), ['fallback queryText: this is the query text']); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('query should not be logged if path != \'/v1/search\'', (t) => { |
||||
t.plan(3); |
||||
|
||||
const logger = mock_logger(); |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
const defer_to_addressit = proxyquire('../../../sanitizer/defer_to_addressit', { |
||||
'../sanitizer/_text_addressit': function () { |
||||
return { |
||||
sanitize: () => { |
||||
t.pass('_text_addressit should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}; |
||||
}, |
||||
'pelias-logger': logger, |
||||
'../sanitizer/_debug': () => { |
||||
return { |
||||
sanitize: () => { |
||||
t.pass('_debug should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}; |
||||
}, |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
path: 'not /v1/search', |
||||
clean: { |
||||
text: 'this is the query text' |
||||
} |
||||
}; |
||||
|
||||
defer_to_addressit(req, {}, () => { |
||||
t.deepEquals(logger.getInfoMessages(), []); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
test('query should be logged as [text removed] if private', (t) => { |
||||
t.plan(3); |
||||
|
||||
const logger = mock_logger(); |
||||
|
||||
// rather than re-verify the functionality of all the sanitizers, this test just verifies that they
|
||||
// were all called correctly
|
||||
const defer_to_addressit = proxyquire('../../../sanitizer/defer_to_addressit', { |
||||
'../sanitizer/_text_addressit': function () { |
||||
return { |
||||
sanitize: () => { |
||||
t.pass('_text_addressit should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}; |
||||
}, |
||||
'pelias-logger': logger, |
||||
'../helper/logging': { |
||||
isDNT: () => true |
||||
}, |
||||
'../sanitizer/_debug': () => { |
||||
return { |
||||
sanitize: () => { |
||||
t.pass('_debug should have been called'); |
||||
return { errors: [], warnings: [] }; |
||||
} |
||||
}; |
||||
} |
||||
})(() => true); |
||||
|
||||
const req = { |
||||
path: '/v1/search', |
||||
clean: { |
||||
text: 'this is the query text' |
||||
} |
||||
}; |
||||
|
||||
defer_to_addressit(req, {}, () => { |
||||
t.deepEquals(logger.getInfoMessages(), ['fallback queryText: [text removed]']); |
||||
t.end(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}; |
||||
|
||||
module.exports.all = function (tape, common) { |
||||
|
||||
function test(name, testFunction) { |
||||
return tape(`SANITIZE /defer_to_addressit ${name}`, testFunction); |
||||
} |
||||
|
||||
for( var testCase in module.exports.tests ){ |
||||
module.exports.tests[testCase](test, common); |
||||
} |
||||
}; |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue