Browse Source

Merge branch 'master' of github.com:pelias/api into 261-set-boundary-circle-latlong-to-point-latlon

pull/295/head
Diana Shkolnikov 9 years ago
parent
commit
3395c6485e
  1. 11
      controller/place.js
  2. 1
      controller/search.js
  3. 79
      helper/geojsonify.js
  4. 4
      helper/query_parser.js
  5. 87
      helper/type_mapping.js
  6. 20
      helper/types.js
  7. 2
      package.json
  8. 15
      query/layers.js
  9. 14
      query/sources.js
  10. 7
      query/text_parser.js
  11. 16
      query/types.js
  12. 70
      sanitiser/_ids.js
  13. 23
      sanitiser/_single_scalar_parameters.js
  14. 1
      sanitiser/autocomplete.js
  15. 1
      sanitiser/place.js
  16. 6
      sanitiser/reverse.js
  17. 6
      sanitiser/search.js
  18. 4
      test/ciao/place/basic_place.coffee
  19. 30
      test/ciao/reverse/duplicate_parameter_name.coffee
  20. 4
      test/ciao/reverse/layers_invalid.coffee
  21. 4
      test/ciao/reverse/layers_mix_invalid_valid.coffee
  22. 4
      test/ciao/reverse/layers_multiple.coffee
  23. 4
      test/ciao/reverse/layers_single.coffee
  24. 30
      test/ciao/reverse/non_scalar_parameter.coffee
  25. 4
      test/ciao/reverse/sources_layers_invalid_combo.coffee
  26. 4
      test/ciao/reverse/sources_layers_valid_combo.coffee
  27. 4
      test/ciao/search/layers_alias_address.coffee
  28. 4
      test/ciao/search/layers_invalid.coffee
  29. 4
      test/ciao/search/layers_mix_invalid_valid.coffee
  30. 4
      test/ciao/search/layers_multiple.coffee
  31. 4
      test/ciao/search/layers_single.coffee
  32. 4
      test/ciao/search/sources_layers_invalid_combo.coffee
  33. 4
      test/ciao/search/sources_layers_valid_combo.coffee
  34. 10
      test/unit/controller/place.js
  35. 31
      test/unit/controller/search.js
  36. 3
      test/unit/helper/geojsonify.js
  37. 5
      test/unit/helper/query_parser.js
  38. 52
      test/unit/helper/type_mapping.js
  39. 2
      test/unit/mock/backend.js
  40. 23
      test/unit/query/types.js
  41. 3
      test/unit/run.js
  42. 80
      test/unit/sanitiser/_ids.js
  43. 7
      test/unit/sanitiser/_layers.js
  44. 60
      test/unit/sanitiser/_single_scalar_parameters.js
  45. 127
      test/unit/sanitiser/_source.js
  46. 3
      test/unit/sanitiser/_sources.js
  47. 2
      test/unit/sanitiser/autocomplete.js
  48. 20
      test/unit/sanitiser/place.js
  49. 2
      test/unit/sanitiser/reverse.js
  50. 4
      test/unit/sanitiser/search.js
  51. 7
      test/unit/service/search.js

11
controller/place.js

@ -16,7 +16,16 @@ function setup( backend ){
var query = req.clean.ids.map( function(id) {
return {
_index: 'pelias',
_type: id.type,
/*
* some gids aren't resolvable to a single type (ex: osmnode and osmway
* both have source osm and layer venue), so expect an array of
* possible values. It's important to use `type` here instead of
* `_type`, as the former actually queries against the type, and thus
* can accept multiple match values. `_type`, on the other hand,
* simply changes the actual URL of the query sent to Elasticsearch to
* contain a type, which obviously can only take a single type.
*/
type: id.types,
_id: id.id
};
});

1
controller/search.js

@ -36,6 +36,7 @@ function setup( backend, query ){
// set response data
else {
res.data = docs;
res.meta = meta;
}
next();

79
helper/geojsonify.js

@ -2,7 +2,9 @@
var GeoJSON = require('geojson'),
extent = require('geojson-extent'),
outputGenerator = require('./outputGenerator'),
logger = require('pelias-logger').get('api');
logger = require('pelias-logger').get('api'),
type_mapping = require('./type_mapping'),
_ = require('lodash');
// Properties to be copied
var DETAILS_PROPS = [
@ -22,54 +24,30 @@ var DETAILS_PROPS = [
];
var SOURCES = {
'geoname': 'gn',
'osmnode': 'osm',
'osmway': 'osm',
'admin0': 'qs',
'admin1': 'qs',
'admin2': 'qs',
'neighborhood': 'qs',
'locality': 'qs',
'local_admin': 'qs',
'osmaddress': 'osm',
'openaddresses': 'oa'
};
function lookupSource(src) {
return SOURCES.hasOwnProperty(src._type) ? SOURCES[src._type] : src._type;
var sources = type_mapping.type_to_source;
return sources.hasOwnProperty(src._type) ? sources[src._type] : src._type;
}
/*
* Use the type to layer mapping, except for Geonames, where having a full
* Elasticsearch document source allows a more specific layer name to be chosen
*/
function lookupLayer(src) {
switch(src._type) {
case 'osmnode':
case 'osmway':
return 'venue';
case 'admin0':
return 'country';
case 'admin1':
return 'region';
case 'admin2':
return 'county';
case 'neighborhood':
return 'neighbourhood';
case 'locality':
return 'locality';
case 'local_admin':
return 'localadmin';
case 'osmaddress':
case 'openaddresses':
return 'address';
case 'geoname':
if (src.category && src.category.indexOf('admin') !== -1) {
if (src.category.indexOf('admin:city') !== -1) { return 'locality'; }
if (src.category.indexOf('admin:admin1') !== -1) { return 'region'; }
if (src.category.indexOf('admin:admin2') !== -1) { return 'county'; }
return 'neighbourhood'; // this could also be 'local_admin'
}
if (src.name) { return 'venue'; }
if (src.address) { return 'address'; }
if (src._type === 'geoname') {
if (_.contains(src.category, 'admin')) {
if (_.contains(src.category, 'admin:city')) { return 'locality'; }
if (_.contains(src.category, 'admin:admin1')) { return 'region'; }
if (_.contains(src.category, 'admin:admin2')) { return 'county'; }
return 'neighbourhood'; // this could also be 'local_admin'
}
if (src.name) { return 'venue'; }
if (src.address) { return 'address'; }
}
if (_.contains(type_mapping.types, src._type)) {
return type_mapping.type_to_layer[src._type];
}
logger.warn('[geojsonify]: could not map _type ', src._type);
@ -177,6 +155,16 @@ function copyProperties( source, props, dst ) {
});
}
/**
* Create a gid from a document
* @TODO modify all importers to create separate source and layer fields to remove mapping
*
* @param {object} src
*/
function makeGid(src) {
return lookupSource(src) + ':' + lookupLayer(src) + ':' + src._id;
}
/**
* Determine and set place id, type, and source
*
@ -185,6 +173,7 @@ function copyProperties( source, props, dst ) {
*/
function addMetaData(src, dst) {
dst.id = src._id;
dst.gid = makeGid(src);
dst.layer = lookupLayer(src);
dst.source = lookupSource(src);
}

4
helper/query_parser.js

@ -1,7 +1,7 @@
var parser = require('addressit');
var extend = require('extend');
var layers_map = require('../query/layers');
var type_mapping = require('../helper/type_mapping');
var delim = ',';
module.exports = {};
@ -9,7 +9,7 @@ module.exports = {};
module.exports.get_layers = function get_layers(query) {
if (query.length <= 3 ) {
// no address parsing required
return layers_map.coarse;
return type_mapping.layer_with_aliases_to_type.coarse;
}
};

87
helper/type_mapping.js

@ -0,0 +1,87 @@
var extend = require('extend'),
_ = require('lodash');
var TYPE_TO_SOURCE = {
'geoname': 'gn',
'osmnode': 'osm',
'osmway': 'osm',
'admin0': 'qs',
'admin1': 'qs',
'admin2': 'qs',
'neighborhood': 'qs',
'locality': 'qs',
'local_admin': 'qs',
'osmaddress': 'osm',
'openaddresses': 'oa'
};
/*
* This doesn't include alias layers such as coarse
*/
var TYPE_TO_LAYER = {
'geoname': 'venue',
'osmnode': 'venue',
'osmway': 'venue',
'admin0': 'country',
'admin1': 'region',
'admin2': 'county',
'neighborhood': 'neighbourhood',
'locality': 'locality',
'local_admin': 'localadmin',
'osmaddress': 'address',
'openaddresses': 'address'
};
var SOURCE_TO_TYPE = {
'gn' : ['geoname'],
'geonames' : ['geoname'],
'oa' : ['openaddresses'],
'openaddresses' : ['openaddresses'],
'qs' : ['admin0', 'admin1', 'admin2', 'neighborhood', 'locality', 'local_admin'],
'quattroshapes' : ['admin0', 'admin1', 'admin2', 'neighborhood', 'locality', 'local_admin'],
'osm' : ['osmaddress', 'osmnode', 'osmway'],
'openstreetmap' : ['osmaddress', 'osmnode', 'osmway']
};
/**
* This does not included alias layers, those are built separately
*/
var LAYER_TO_TYPE = {
'venue': ['geoname','osmnode','osmway'],
'address': ['osmaddress','openaddresses', 'geoname'],
'country': ['admin0', 'geoname'],
'region': ['admin1', 'geoname'],
'county': ['admin2', 'geoname'],
'locality': ['locality', 'geoname'],
'localadmin': ['local_admin'],
'neighbourhood': ['neighborhood', 'geoname']
};
var LAYER_ALIASES = {
'coarse': ['admin0','admin1','admin2','neighborhood','locality','local_admin']
};
var LAYER_WITH_ALIASES_TO_TYPE = extend({}, LAYER_ALIASES, LAYER_TO_TYPE);
/*
* derive the list of types, sources, and layers from above mappings
*/
var TYPES = Object.keys(TYPE_TO_SOURCE);
var SOURCES = Object.keys(SOURCE_TO_TYPE);
var LAYERS = Object.keys(LAYER_TO_TYPE);
var sourceAndLayerToType = function sourceAndLayerToType(source, layer) {
return _.intersection(SOURCE_TO_TYPE[source], LAYER_WITH_ALIASES_TO_TYPE[layer]);
};
module.exports = {
types: TYPES,
sources: SOURCES,
layers: LAYERS,
type_to_source: TYPE_TO_SOURCE,
type_to_layer: TYPE_TO_LAYER,
source_to_type: SOURCE_TO_TYPE,
layer_to_type: LAYER_TO_TYPE,
layer_with_aliases_to_type: LAYER_WITH_ALIASES_TO_TYPE,
source_and_layer_to_type: sourceAndLayerToType
};

20
helper/types.js

@ -1,13 +1,5 @@
var valid_types = require( '../query/types' );
/**
* Calculate the set-style intersection of two arrays
*/
var intersection = function intersection(set1, set2) {
return set2.filter(function(value) {
return set1.indexOf(value) !== -1;
});
};
var type_mapping = require( '../helper/type_mapping' );
var _ = require('lodash');
/**
* Combine all types and determine the unique subset
@ -24,14 +16,14 @@ module.exports = function calculate_types(clean_types) {
* perform a set intersection of their specified types
*/
if (clean_types.from_layers || clean_types.from_sources) {
var types = valid_types;
var types = type_mapping.types;
if (clean_types.from_layers) {
types = intersection(types, clean_types.from_layers);
types = _.intersection(types, clean_types.from_layers);
}
if (clean_types.from_sources) {
types = intersection(types, clean_types.from_sources);
types = _.intersection(types, clean_types.from_sources);
}
return types;
@ -46,4 +38,4 @@ module.exports = function calculate_types(clean_types) {
}
throw new Error('no types specified');
};
};

2
package.json

@ -52,7 +52,7 @@
"pelias-config": "^1.0.1",
"pelias-esclient": "0.0.25",
"pelias-logger": "^0.0.8",
"pelias-query": "^1.5.0",
"pelias-query": "1.5.0",
"pelias-schema": "1.0.0",
"pelias-suggester-pipeline": "2.0.2",
"stats-lite": "^1.0.3",

15
query/layers.js

@ -1,15 +0,0 @@
/*
* Mapping from data layers to type values
*/
module.exports = {
'venue': ['geoname','osmnode','osmway'],
'address': ['osmaddress','openaddresses'],
'country': ['admin0'],
'region': ['admin1'],
'county': ['admin2'],
'locality': ['locality'],
'localadmin': ['local_admin'],
'neighbourhood': ['neighborhood'],
'coarse': ['admin0','admin1','admin2','neighborhood','locality','local_admin'],
};

14
query/sources.js

@ -1,14 +0,0 @@
/*
* Mapping from data sources to type values
*/
module.exports = {
'gn' : ['geoname'],
'geonames' : ['geoname'],
'oa' : ['openaddresses'],
'openaddresses' : ['openaddresses'],
'qs' : ['admin0', 'admin1', 'admin2', 'neighborhood', 'locality', 'local_admin'],
'quattroshapes' : ['admin0', 'admin1', 'admin2', 'neighborhood', 'locality', 'local_admin'],
'osm' : ['osmaddress', 'osmnode', 'osmway'],
'openstreetmap' : ['osmaddress', 'osmnode', 'osmway']
};

7
query/text_parser.js

@ -1,4 +1,5 @@
var logger = require('pelias-logger').get('api');
var adminFields = require('../helper/adminFields')();
/**
@ -21,9 +22,9 @@ function addParsedVariablesToQueryVariables( parsed_text, vs ){
// ?
else {
console.warn( 'chaos monkey asks: what happens now?' );
console.log( parsed_text );
try{ throw new Error(); } catch(e){ console.error( e.stack ); } // print a stack trace
logger.warn( 'chaos monkey asks: what happens now?' );
logger.warn( parsed_text );
try{ throw new Error(); } catch(e){ logger.warn( e.stack ); } // print a stack trace
}
// ==== add parsed matches [address components] ====

16
query/types.js

@ -1,16 +0,0 @@
// querable types
module.exports = [
'geoname',
'osmnode',
'osmway',
'admin0',
'admin1',
'admin2',
'neighborhood',
'locality',
'local_admin',
'osmaddress',
'openaddresses'
];

70
sanitiser/_ids.js

@ -1,19 +1,58 @@
var _ = require('lodash'),
check = require('check-types'),
types = require('../query/types');
type_mapping = require('../helper/type_mapping');
var ID_DELIM = ':';
// validate inputs, convert types and apply defaults
// id generally looks like 'geoname:4163334' (type:id)
// so, both type and id are required fields.
// validate inputs, convert types and apply defaults id generally looks like
// 'geonames:venue:4163334' (source:layer:id) so, all three are required
var lengthError = 'invalid param \'ids\': length must be >0';
var formatError = function(input) {
return 'id `' + input + 'is invalid: must be of the format type:id for ex: \'geoname:4163334\'';
return 'id `' + input + ' is invalid: must be of the format source:layer:id for ex: \'geonames:venue:4163334\'';
};
var targetError = function(target, target_list) {
return target + ' is invalid. It must be one of these values - [' + target_list.join(', ') + ']';
};
function sanitizeId(rawId, messages) {
var parts = rawId.split(ID_DELIM);
if ( parts.length < 3 ) {
messages.errors.push( formatError(rawId) );
return;
}
var source = parts[0];
var layer = parts[1];
var id = parts.slice(2).join(ID_DELIM);
// check if any parts of the gid are empty
if (_.contains([source, layer, id], '')) {
messages.errors.push( formatError(rawId) );
return;
}
if (!_.contains(type_mapping.sources, source)) {
messages.errors.push( targetError(source, type_mapping.sources) );
return;
}
if (!_.contains(type_mapping.layers, layer)) {
messages.errors.push( targetError(layer, type_mapping.layers) );
return;
}
var types = type_mapping.source_and_layer_to_type(source, layer);
return {
id: id,
types: types
};
}
function sanitize( raw, clean ){
// error & warning messages
var messages = { errors: [], warnings: [] };
@ -42,25 +81,8 @@ function sanitize( raw, clean ){
}
// cycle through raw ids and set those which are valid
var validIds = rawIds.map( function( rawId ){
var param_index = rawId.indexOf(ID_DELIM);
var type = rawId.substring(0, param_index );
var id = rawId.substring(param_index + 1);
// check id format
if(!check.contains(rawId, ID_DELIM) || !check.unemptyString( id ) || !check.unemptyString( type )) {
messages.errors.push( formatError(rawId) );
}
// type text must be one of the types
else if( !_.contains( types, type ) ){
messages.errors.push( type + ' is invalid. It must be one of these values - [' + types.join(', ') + ']' );
}
else {
return {
id: id,
type: type
};
}
var validIds = rawIds.map(function(rawId) {
return sanitizeId(rawId, messages);
});
if (validIds.every(check.object)) {

23
sanitiser/_single_scalar_parameters.js

@ -0,0 +1,23 @@
var _ = require('lodash'),
check = require('check-types');
// validate inputs
function sanitize( raw, clean ){
// error & warning messages
var messages = { errors: [], warnings: [] };
Object.keys(raw).forEach(function(key) {
if (_.isArray(raw[key])) {
messages.errors.push('\'' + key + '\' parameter can only have one value');
} else if (_.isObject(raw[key])) {
messages.errors.push('\'' + key + '\' parameter must be a scalar');
}
});
return messages;
}
// export function
module.exports = sanitize;

1
sanitiser/autocomplete.js

@ -1,5 +1,6 @@
var sanitizeAll = require('../sanitiser/sanitizeAll'),
sanitizers = {
singleScalarParameters: require('../sanitiser/_single_scalar_parameters'),
text: require('../sanitiser/_text'),
size: require('../sanitiser/_size'),
private: require('../sanitiser/_flag_bool')('private', false),

1
sanitiser/place.js

@ -1,6 +1,7 @@
var sanitizeAll = require('../sanitiser/sanitizeAll'),
sanitizers = {
singleScalarParameters: require('../sanitiser/_single_scalar_parameters'),
ids: require('../sanitiser/_ids'),
private: require('../sanitiser/_flag_bool')('private', false)
};

6
sanitiser/reverse.js

@ -1,8 +1,10 @@
var type_mapping = require('../helper/type_mapping');
var sanitizeAll = require('../sanitiser/sanitizeAll'),
sanitizers = {
layers: require('../sanitiser/_targets')('layers', require('../query/layers')),
sources: require('../sanitiser/_targets')('sources', require('../query/sources')),
singleScalarParameters: require('../sanitiser/_single_scalar_parameters'),
layers: require('../sanitiser/_targets')('layers', type_mapping.layer_with_aliases_to_type),
sources: require('../sanitiser/_targets')('sources', type_mapping.source_to_type),
size: require('../sanitiser/_size'),
private: require('../sanitiser/_flag_bool')('private', false),
geo_reverse: require('../sanitiser/_geo_reverse'),

6
sanitiser/search.js

@ -1,10 +1,12 @@
var type_mapping = require('../helper/type_mapping');
var sanitizeAll = require('../sanitiser/sanitizeAll'),
sanitizers = {
singleScalarParameters: require('../sanitiser/_single_scalar_parameters'),
text: require('../sanitiser/_text'),
size: require('../sanitiser/_size'),
layers: require('../sanitiser/_targets')('layers', require( '../query/layers' )),
sources: require('../sanitiser/_targets')('sources', require( '../query/sources' )),
layers: require('../sanitiser/_targets')('layers', type_mapping.layer_with_aliases_to_type),
sources: require('../sanitiser/_targets')('sources', type_mapping.source_to_type),
private: require('../sanitiser/_flag_bool')('private', false),
geo_search: require('../sanitiser/_geo_search'),
boundary_country: require('../sanitiser/_boundary_country'),

4
test/ciao/place/basic_place.coffee

@ -1,6 +1,6 @@
#> basic place
path: '/v1/place?ids=geoname:1'
path: '/v1/place?ids=geonames:venue:1'
#? 200 ok
response.statusCode.should.be.equal 200
@ -29,5 +29,5 @@ should.not.exist json.geocoding.errors
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['ids'].should.eql [{ id: '1', type: 'geoname' }]
json.geocoding.query['ids'].should.eql [{ id: '1', types: [ 'geoname' ] }]
should.not.exist json.geocoding.query['size']

30
test/ciao/reverse/duplicate_parameter_name.coffee

@ -0,0 +1,30 @@
#> set size
path: '/v1/reverse?point.lat=1&point.lon=1&param=value1&param=value2'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected warnings
should.not.exist json.geocoding.warnings
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'param\' parameter can only have one value' ]

4
test/ciao/reverse/layers_invalid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: venue,address,country,region,county,locality,localadmin,neighbourhood,coarse' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,venue,address,country,region,county,locality,localadmin,neighbourhood' ]
#? expected warnings
should.not.exist json.geocoding.warnings
@ -32,4 +32,4 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['size'].should.eql 10
should.not.exist json.geocoding.query['types']
should.not.exist json.geocoding.query['type']
should.not.exist json.geocoding.query['type']

4
test/ciao/reverse/layers_mix_invalid_valid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: venue,address,country,region,county,locality,localadmin,neighbourhood,coarse' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,venue,address,country,region,county,locality,localadmin,neighbourhood' ]
#? expected warnings
should.not.exist json.geocoding.warnings
@ -32,4 +32,4 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['size'].should.eql 10
should.not.exist json.geocoding.query['types']
should.not.exist json.geocoding.query['type']
should.not.exist json.geocoding.query['type']

4
test/ciao/reverse/layers_multiple.coffee

@ -30,5 +30,5 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["admin0","admin1"]
json.geocoding.query['type'].should.eql ["admin0","admin1"]
json.geocoding.query.types['from_layers'].should.eql ["admin0","geoname","admin1"]
json.geocoding.query['type'].should.eql ["admin0","geoname","admin1"]

4
test/ciao/reverse/layers_single.coffee

@ -30,5 +30,5 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["admin0"]
json.geocoding.query['type'].should.eql ["admin0"]
json.geocoding.query.types['from_layers'].should.eql ["admin0","geoname"]
json.geocoding.query['type'].should.eql ["admin0","geoname"]

30
test/ciao/reverse/non_scalar_parameter.coffee

@ -0,0 +1,30 @@
#> set size
path: '/v1/reverse?point.lat=1&point.lon=1&parameter[idx]=value'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected warnings
should.not.exist json.geocoding.warnings
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'parameter\' parameter must be a scalar' ]

4
test/ciao/reverse/sources_layers_invalid_combo.coffee

@ -31,6 +31,6 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses"]
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses","geoname"]
json.geocoding.query.types['from_sources'].should.eql ["admin0","admin1","admin2","neighborhood","locality","local_admin"]
should.not.exist json.geocoding.query['type']
should.not.exist json.geocoding.query['type']

4
test/ciao/reverse/sources_layers_valid_combo.coffee

@ -30,5 +30,5 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses"]
json.geocoding.query['type'].should.eql ["openaddresses"]
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses","geoname"]
json.geocoding.query['type'].should.eql ["openaddresses"]

4
test/ciao/search/layers_alias_address.coffee

@ -31,5 +31,5 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses"]
json.geocoding.query['type'].should.eql ["osmaddress","openaddresses"]
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses","geoname"]
json.geocoding.query['type'].should.eql ["osmaddress","openaddresses","geoname"]

4
test/ciao/search/layers_invalid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: venue,address,country,region,county,locality,localadmin,neighbourhood,coarse' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,venue,address,country,region,county,locality,localadmin,neighbourhood' ]
#? expected warnings
should.not.exist json.geocoding.warnings
@ -33,4 +33,4 @@ should.not.exist json.geocoding.warnings
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
should.not.exist json.geocoding.query['types']
should.not.exist json.geocoding.query['type']
should.not.exist json.geocoding.query['type']

4
test/ciao/search/layers_mix_invalid_valid.coffee

@ -24,7 +24,7 @@ json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: venue,address,country,region,county,locality,localadmin,neighbourhood,coarse' ]
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,venue,address,country,region,county,locality,localadmin,neighbourhood' ]
#? expected warnings
should.not.exist json.geocoding.warnings
@ -33,4 +33,4 @@ should.not.exist json.geocoding.warnings
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
should.not.exist json.geocoding.query['types']
should.not.exist json.geocoding.query['type']
should.not.exist json.geocoding.query['type']

4
test/ciao/search/layers_multiple.coffee

@ -31,5 +31,5 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["admin0","admin1"]
json.geocoding.query['type'].should.eql ["admin0","admin1"]
json.geocoding.query.types['from_layers'].should.eql ["admin0","geoname","admin1"]
json.geocoding.query['type'].should.eql ["admin0","geoname","admin1"]

4
test/ciao/search/layers_single.coffee

@ -31,5 +31,5 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["admin0"]
json.geocoding.query['type'].should.eql ["admin0"]
json.geocoding.query.types['from_layers'].should.eql ["admin0","geoname"]
json.geocoding.query['type'].should.eql ["admin0","geoname"]

4
test/ciao/search/sources_layers_invalid_combo.coffee

@ -32,6 +32,6 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses"]
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses","geoname"]
json.geocoding.query.types['from_sources'].should.eql ["admin0","admin1","admin2","neighborhood","locality","local_admin"]
should.not.exist json.geocoding.query['type']
should.not.exist json.geocoding.query['type']

4
test/ciao/search/sources_layers_valid_combo.coffee

@ -31,5 +31,5 @@ should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses"]
json.geocoding.query['type'].should.eql ["openaddresses"]
json.geocoding.query.types['from_layers'].should.eql ["osmaddress","openaddresses","geoname"]
json.geocoding.query['type'].should.eql ["openaddresses"]

10
test/unit/controller/place.js

@ -41,7 +41,7 @@ module.exports.tests.functional_success = function(test, common) {
test('functional success', function(t) {
var backend = mockBackend( 'client/mget/ok/1', function( cmd ){
t.deepEqual(cmd, { body: { docs: [ { _id: 123, _index: 'pelias', _type: 'a' } ] } }, 'correct backend command');
t.deepEqual(cmd, { body: { docs: [ { _id: 123, _index: 'pelias', type: [ 'a' ] } ] } }, 'correct backend command');
});
var controller = setup( backend );
var res = {
@ -57,7 +57,7 @@ module.exports.tests.functional_success = function(test, common) {
t.deepEqual(json.features, expected, 'values correctly mapped');
}
};
var req = { clean: { ids: [ {'id' : 123, 'type': 'a' } ] }, errors: [], warnings: [] };
var req = { clean: { ids: [ {'id' : 123, types: [ 'a' ] } ] }, errors: [], warnings: [] };
var next = function next() {
t.equal(req.errors.length, 0, 'next was called without error');
t.end();
@ -70,10 +70,10 @@ module.exports.tests.functional_success = function(test, common) {
module.exports.tests.functional_failure = function(test, common) {
test('functional failure', function(t) {
var backend = mockBackend( 'client/mget/fail/1', function( cmd ){
t.deepEqual(cmd, { body: { docs: [ { _id: 123, _index: 'pelias', _type: 'b' } ] } }, 'correct backend command');
t.deepEqual(cmd, { body: { docs: [ { _id: 123, _index: 'pelias', type: [ 'b' ] } ] } }, 'correct backend command');
});
var controller = setup( backend );
var req = { clean: { ids: [ {'id' : 123, 'type': 'b' } ] }, errors: [], warnings: [] };
var req = { clean: { ids: [ {'id' : 123, types: [ 'b' ] } ] }, errors: [], warnings: [] };
var next = function( message ){
t.equal(req.errors[0],'a backend error occurred','error passed to errorHandler');
t.end();
@ -85,7 +85,7 @@ module.exports.tests.functional_failure = function(test, common) {
module.exports.all = function (tape, common) {
function test(name, testFunction) {
return tape('GET /doc ' + name, testFunction);
return tape('GET /place ' + name, testFunction);
}
for( var testCase in module.exports.tests ){

31
test/unit/controller/search.js

@ -41,6 +41,35 @@ module.exports.tests.functional_success = function(test, common) {
}
}];
var expectedMeta = {
scores: [10, 20]
};
var expectedData = [
{
_id: 'myid1',
_score: 10,
_type: 'mytype1',
admin0: 'country1',
admin1: 'state1',
admin2: 'city1',
center_point: { lat: 100.1, lon: -50.5 },
name: { default: 'test name1' },
value: 1
},
{
_id: 'myid2',
_score: 20,
_type: 'mytype2',
admin0: 'country2',
admin1: 'state2',
admin2: 'city2',
center_point: { lat: 100.2, lon: -51.5 },
name: { default: 'test name2' },
value: 2
}
];
test('functional success', function (t) {
var backend = mockBackend('client/search/ok/1', function (cmd) {
t.deepEqual(cmd, {
@ -66,6 +95,8 @@ module.exports.tests.functional_success = function(test, common) {
var req = { clean: { a: 'b' }, errors: [], warnings: [] };
var next = function next() {
t.equal(req.errors.length, 0, 'next was called without error');
t.deepEqual(res.meta, expectedMeta, 'meta data was set');
t.deepEqual(res.data, expectedData, 'data was set');
t.end();
};
controller(req, res, next);

3
test/unit/helper/geojsonify.js

@ -141,6 +141,7 @@ module.exports.tests.search = function(test, common) {
},
'properties': {
'id': 'id1',
'gid': 'type1:type1:id1',
'layer': 'type1',
'source': 'type1',
'label': '\'Round Midnight Jazz and Blues Bar, test3, Angel',
@ -169,6 +170,7 @@ module.exports.tests.search = function(test, common) {
},
'properties': {
'id': 'id2',
'gid': 'type2:type2:id2',
'layer': 'type2',
'source': 'type2',
'label': 'Blues Cafe, test3, Smithfield',
@ -194,6 +196,7 @@ module.exports.tests.search = function(test, common) {
},
'properties': {
'id': '34633854',
'gid': 'osm:venue:34633854',
'layer': 'venue',
'source': 'osm',
'label': 'Empire State Building, Manhattan, NY',

5
test/unit/helper/query_parser.js

@ -1,6 +1,7 @@
var parser = require('../../../helper/query_parser');
var layers_map = require('../../../query/layers');
var type_mapping = require('../../../helper/type_mapping');
var layers_map = type_mapping.layer_with_aliases_to_type;
module.exports.tests = {};

52
test/unit/helper/type_mapping.js

@ -0,0 +1,52 @@
var check = require('check-types');
var type_mapping = require('../../../helper/type_mapping');
module.exports.tests = {};
module.exports.tests.interfaces = function(test, common) {
test('types list', function(t) {
t.ok(check.array(type_mapping.types), 'is array');
t.ok(check.hasLength(type_mapping.types, 11), 'has correct number of elements');
t.end();
});
test('type to source mapping', function(t) {
t.ok(check.object(type_mapping.type_to_source), 'is object');
t.ok(check.hasLength(Object.keys(type_mapping.type_to_source), 11), 'has correct number of elements');
t.end();
});
test('type to layer mapping', function(t) {
t.ok(check.object(type_mapping.type_to_layer), 'is object');
t.ok(check.hasLength(Object.keys(type_mapping.type_to_layer), 11), 'has correct number of elements');
t.end();
});
test('source to type mapping', function(t) {
t.ok(check.object(type_mapping.source_to_type), 'is object');
t.ok(check.hasLength(Object.keys(type_mapping.source_to_type), 8), 'has correct number of elements');
t.end();
});
test('layer to type mapping', function(t) {
t.ok(check.object(type_mapping.layer_to_type), 'is object');
t.equal(Object.keys(type_mapping.layer_to_type).length, 8, 'has correct number of elements');
t.end();
});
test('layer to type mapping (with aliases)', function(t) {
t.ok(check.object(type_mapping.layer_with_aliases_to_type), 'is object');
t.ok(check.hasLength(Object.keys(type_mapping.layer_with_aliases_to_type), 9), 'has correct number of elements');
t.end();
});
};
module.exports.all = function (tape, common) {
function test(name, testFunction) {
return tape('type_mapping: ' + name, testFunction);
}
for( var testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

2
test/unit/mock/backend.js

@ -94,4 +94,4 @@ function searchEnvelope( options ){
return { hits: { total: options.length, hits: options } };
}
module.exports = setup;
module.exports = setup;

23
test/unit/query/types.js

@ -1,23 +0,0 @@
var types = require('../../../query/types');
module.exports.tests = {};
module.exports.tests.interface = function(test, common) {
test('valid interface', function(t) {
t.true(Array.isArray(types), 'valid array');
t.equal(types.length, 11, 'valid array');
t.end();
});
};
module.exports.all = function (tape, common) {
function test(name, testFunction) {
return tape('types ' + name, testFunction);
}
for( var testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

3
test/unit/run.js

@ -17,7 +17,6 @@ var tests = [
require('./sanitiser/_layers'),
require('./sanitiser/reverse'),
require('./sanitiser/place'),
require('./query/types'),
require('./query/search'),
require('./query/autocomplete'),
require('./query/reverse'),
@ -26,10 +25,12 @@ var tests = [
require('./helper/geojsonify'),
require('./helper/outputSchema'),
require('./helper/types'),
require('./helper/type_mapping'),
require('./sanitiser/_geo_common'),
require('./middleware/distance'),
require('./middleware/confidenceScoreReverse'),
require('./sanitiser/_size'),
require('./sanitiser/_single_scalar_parameters'),
];
tests.map(function(t) {

80
test/unit/sanitiser/_ids.js

@ -1,20 +1,16 @@
var sanitize = require('../../../sanitiser/_ids');
var delimiter = ':';
var types = require('../../../query/types');
var type_mapping = require('../../../helper/type_mapping');
var inputs = {
valid: [ 'geoname:1', 'osmnode:2', 'admin0:53', 'osmway:44', 'geoname:5' ],
invalid: [ ':', '', '::', 'geoname:', ':234', 'gibberish:23' ]
};
var formatError = function(input) {
return 'id `' + input + 'is invalid: must be of the format type:id for ex: \'geoname:4163334\'';
return 'id `' + input + ' is invalid: must be of the format source:layer:id for ex: \'geonames:venue:4163334\'';
};
var lengthError = 'invalid param \'ids\': length must be >0';
var defaultMissingTypeError = function(input) {
var type = input.split(delimiter)[0];
return type + ' is invalid. It must be one of these values - [' + types.join(', ') + ']';
};
module.exports.tests = {};
@ -74,50 +70,58 @@ module.exports.tests.invalid_ids = function(test, common) {
t.end();
});
test('invalid id: type name invalid', function(t) {
var raw = { ids: 'gibberish:23' };
test('invalid id: source name invalid', function(t) {
var raw = { ids: 'invalidsource:venue:23' };
var clean = {};
var expected_error = 'invalidsource is invalid. It must be one of these values - [' + type_mapping.sources.join(', ') + ']';
var messages = sanitize(raw, clean);
t.equal(messages.errors[0], defaultMissingTypeError('gibberish:23'), 'format error returned');
t.equal(messages.errors[0], expected_error, 'format error returned');
t.equal(clean.ids, undefined, 'ids unset in clean object');
t.end();
});
test('invalid id: old style 2 part id', function(t) {
var raw = { ids: 'geonames:23' };
var clean = {};
var messages = sanitize(raw, clean);
t.equal(messages.errors[0], formatError('geonames:23'), 'format error returned');
t.equal(clean.ids, undefined, 'ids unset in clean object');
t.end();
});
};
module.exports.tests.valid_ids = function(test, common) {
test('ids: valid input', function(t) {
inputs.valid.forEach( function( input ){
var input_parts = input.split(delimiter);
var expected_clean = { ids: [ { id: input_parts[1], type: input_parts[0] } ]};
var raw = { ids: input };
var clean = {};
var messages = sanitize( raw, clean );
t.deepEqual( messages.errors, [], 'no error (' + input + ')' );
t.deepEqual( clean, expected_clean, 'clean set correctly (' + input + ')');
});
test('ids: valid input (openaddresses)', function(t) {
var raw = { ids: 'openaddresses:address:20' };
var clean = {};
var expected_ids = [{
id: '20',
types: [ 'openaddresses' ]
}];
var messages = sanitize( raw, clean );
t.deepEqual( messages.errors, [], ' no errors');
t.deepEqual( clean.ids, expected_ids, 'single type value returned');
t.end();
});
test('ids: valid input with multiple values' , function(t) {
var raw = { ids: inputs.valid.join(',') };
test('ids: valid input (osm)', function(t) {
var raw = { ids: 'osm:venue:500' };
var clean = {};
var expected_clean={
ids: [],
};
// construct the expected id and type for each valid input
inputs.valid.forEach( function( input ){
var input_parts = input.split(delimiter);
expected_clean.ids.push({ id: input_parts[1], type: input_parts[0] });
});
var expected_ids = [{
id: '500',
types: [ 'osmnode', 'osmway' ]
}];
var messages = sanitize( raw, clean );
t.deepEqual( messages.errors, [], 'no errors' );
t.deepEqual( clean, expected_clean, 'clean set correctly' );
t.deepEqual( messages.errors, [], ' no errors');
t.deepEqual( clean.ids, expected_ids, 'osm could be two types, but that\'s ok');
t.end();
});
};
@ -137,10 +141,10 @@ module.exports.tests.array_of_ids = function(test, common) {
};
module.exports.tests.multiple_ids = function(test, common) {
test('duplicate ids', function(t) {
var expected_clean = { ids: [ { id: '1', type: 'geoname' }, { id: '2', type: 'osmnode' } ] };
var raw = { ids: 'geoname:1,osmnode:2' };
test('multiple ids', function(t) {
var raw = { ids: 'geonames:venue:1,osm:venue:2' };
var clean = {};
var expected_clean = { ids: [ { id: '1', types: [ 'geoname' ] }, { id: '2', types: [ 'osmnode', 'osmway' ] } ] };
var messages = sanitize( raw, clean);
@ -153,8 +157,8 @@ module.exports.tests.multiple_ids = function(test, common) {
module.exports.tests.de_dupe = function(test, common) {
test('duplicate ids', function(t) {
var expected_clean = { ids: [ { id: '1', type: 'geoname' }, { id: '2', type: 'osmnode' } ]};
var raw = { ids: 'geoname:1,osmnode:2,geoname:1' };
var expected_clean = { ids: [ { id: '1', types: [ 'geoname' ] }, { id: '2', types: [ 'osmnode', 'osmway' ] } ] };
var raw = { ids: 'geonames:venue:1,osm:venue:2,geonames:venue:1' };
var clean = {};
var messages = sanitize( raw, clean );

7
test/unit/sanitiser/_layers.js

@ -1,5 +1,6 @@
var type_mapping = require('../../../helper/type_mapping');
var sanitize = require('../../../sanitiser/_targets')('layers', require('../../../query/layers'));
var sanitize = require('../../../sanitiser/_targets')('layers', type_mapping.layer_with_aliases_to_type);
module.exports.tests = {};
@ -42,7 +43,7 @@ module.exports.tests.sanitize_layers = function(test, common) {
t.end();
});
test('address (alias) layer', function(t) {
var address_layers = ['osmaddress','openaddresses'];
var address_layers = ['osmaddress','openaddresses','geoname'];
var raw = { layers: 'address' };
var clean = {};
@ -75,7 +76,7 @@ module.exports.tests.sanitize_layers = function(test, common) {
t.end();
});
test('address alias layer plus regular layers', function(t) {
var address_layers = ['osmaddress','openaddresses'];
var address_layers = ['osmaddress','openaddresses','geoname'];
var reg_layers = ['admin0', 'locality'];
var raw = { layers: 'address,country,locality' };

60
test/unit/sanitiser/_single_scalar_parameters.js

@ -0,0 +1,60 @@
var sanitize = require('../../../sanitiser/_single_scalar_parameters');
module.exports.tests = {};
module.exports.tests.single_scalar_parameters = function(test, common) {
test('all duplicate parameters should have error messages returned', function(t) {
var raw = {
arrayParameter1: ['value1', 'value2'],
scalarParameter: 'value',
arrayParameter2: ['value3']
};
var clean = {};
var errorsAndWarnings = sanitize(raw, clean);
t.deepEquals(errorsAndWarnings, {
errors: [
'\'arrayParameter1\' parameter can only have one value',
'\'arrayParameter2\' parameter can only have one value',
],
warnings: []
});
t.end();
});
test('object parameters should have error messages returned', function(t) {
var raw = {
objectParameter1: { key1: 'value1', key2: 'value2'},
scalarParameter: 'value',
objectParameter2: { }
};
var clean = {};
var errorsAndWarnings = sanitize(raw, clean);
t.deepEquals(errorsAndWarnings, {
errors: [
'\'objectParameter1\' parameter must be a scalar',
'\'objectParameter2\' parameter must be a scalar'
],
warnings: []
});
t.end();
});
test('request with all scalar parameters should return empty errors', function(t) {
var raw = { scalarParameter1: 'value1', scalarParameter2: 2, scalarParameter3: true };
var clean = {};
var errorsAndWarnings = sanitize(raw, clean);
t.deepEquals(errorsAndWarnings, { errors: [], warnings: [] });
t.end();
});
};
module.exports.all = function (tape, common) {
function test(name, testFunction) {
return tape('SANTIZE _single_scalar_parameters ' + name, testFunction);
}
for( var testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

127
test/unit/sanitiser/_source.js

@ -1,127 +0,0 @@
var sanitize = require( '../../../sanitiser/_source' );
var success_response = { error: false };
module.exports.tests = {};
module.exports.tests.no_sources = function(test, common) {
test('source is not set', function(t) {
var req = {
query: { },
clean: { }
};
var response = sanitize(req.query, req.clean);
t.deepEqual(req.clean.types, {}, 'clean.types should be empty object');
t.deepEqual(response.errors, [], 'no error returned');
t.deepEqual(response.warnings, [], 'no warnings returned');
t.end();
});
test('source is empty string', function(t) {
var req = {
query: {
source: ''
},
clean: { }
};
var response = sanitize(req.query, req.clean);
t.deepEqual(req.clean.types, {}, 'clean.types should be empty object');
t.deepEqual(response.errors, [], 'no error returned');
t.deepEqual(response.warnings, [], 'no warnings returned');
t.end();
});
};
module.exports.tests.valid_sources = function(test, common) {
test('geonames source', function(t) {
var req = {
query: {
source: 'geonames'
},
clean: { }
};
var response = sanitize(req.query, req.clean);
t.deepEqual(req.clean.types, { from_source: ['geoname'] }, 'clean.types should contain from_source entry with geonames');
t.deepEqual(response.errors, [], 'no error returned');
t.deepEqual(response.warnings, [], 'no warnings returned');
t.end();
});
test('openstreetmap source', function(t) {
var req = {
query: {
source: 'openstreetmap'
},
clean: { }
};
var expected_types = {
from_source: ['osmaddress', 'osmnode', 'osmway']
};
var response = sanitize(req.query, req.clean);
t.deepEqual(req.clean.types, expected_types, 'clean.types should contain from_source entry with multiple entries for openstreetmap');
t.deepEqual(response.errors, [], 'no error returned');
t.deepEqual(response.warnings, [], 'no warnings returned');
t.end();
});
test('multiple sources', function(t) {
var req = {
query: {
source: 'openstreetmap,openaddresses'
},
clean: { }
};
var expected_types = {
from_source: ['osmaddress', 'osmnode', 'osmway', 'openaddresses']
};
var response = sanitize(req.query, req.clean);
t.deepEqual(req.clean.types, expected_types,
'clean.types should contain from_source entry with multiple entries for openstreetmap and openadresses');
t.deepEqual(response.errors, [], 'no error returned');
t.deepEqual(response.warnings, [], 'no warnings returned');
t.end();
});
};
module.exports.tests.invalid_sources = function(test, common) {
test('geonames source', function(t) {
var req = {
query: {
source: 'notasource'
},
clean: { }
};
var expected_response = {
errors: [
'\'notasource\' is an invalid source parameter. Valid options: geonames,openaddresses,quattroshapes,openstreetmap'
],
warnings: []
};
var response = sanitize(req.query, req.clean);
t.deepEqual(response, expected_response, 'error with message returned');
t.deepEqual(req.clean.types, { }, 'clean.types should remain empty');
t.end();
});
};
module.exports.all = function (tape, common) {
function test(name, testFunction) {
return tape('SANTIZE _source ' + name, testFunction);
}
for( var testCase in module.exports.tests ){
module.exports.tests[testCase](test, common);
}
};

3
test/unit/sanitiser/_sources.js

@ -1,4 +1,5 @@
var sanitize = require( '../../../sanitiser/_targets' )('sources', require('../../../query/sources'));
var type_mapping = require('../../../helper/type_mapping');
var sanitize = require( '../../../sanitiser/_targets' )('sources', type_mapping.source_to_type);
var success_messages = { error: false };

2
test/unit/sanitiser/autocomplete.js

@ -4,7 +4,7 @@ module.exports.tests = {};
module.exports.tests.sanitisers = function(test, common) {
test('check sanitiser list', function (t) {
var expected = ['text', 'size', 'private', 'geo_autocomplete' ];
var expected = ['singleScalarParameters', 'text', 'size', 'private', 'geo_autocomplete' ];
t.deepEqual(Object.keys(autocomplete.sanitiser_list), expected);
t.end();
});

20
test/unit/sanitiser/place.js

@ -1,7 +1,7 @@
var place = require('../../../sanitiser/place'),
sanitize = place.sanitize,
middleware = place.middleware,
defaultClean = { ids: [ { id: '123', type: 'geoname' } ], private: false };
defaultClean = { ids: [ { id: '123', types: [ 'geoname' ] } ], private: false };
// these are the default values you would expect when no input params are specified.
module.exports.tests = {};
@ -19,11 +19,19 @@ module.exports.tests.interface = function(test, common) {
});
};
module.exports.tests.sanitisers = function(test, common) {
test('check sanitiser list', function (t) {
var expected = ['singleScalarParameters', 'ids', 'private' ];
t.deepEqual(Object.keys(place.sanitiser_list), expected);
t.end();
});
};
module.exports.tests.sanitize_private = function(test, common) {
var invalid_values = [null, -1, 123, NaN, 'abc'];
invalid_values.forEach(function(value) {
test('invalid private param ' + value, function(t) {
var req = { query: { ids:'geoname:123', 'private': value } };
var req = { query: { ids:'geonames:venue:123', 'private': value } };
sanitize(req, function(){
t.deepEqual( req.errors, [], 'no errors' );
t.deepEqual( req.warnings, [], 'no warnings' );
@ -36,7 +44,7 @@ module.exports.tests.sanitize_private = function(test, common) {
var valid_values = ['true', true, 1];
valid_values.forEach(function(value) {
test('valid private param ' + value, function(t) {
var req = { query: { ids:'geoname:123', 'private': value } };
var req = { query: { ids:'geonames:venue:123', 'private': value } };
sanitize(req, function(){
t.deepEqual( req.errors, [], 'no errors' );
t.deepEqual( req.warnings, [], 'no warnings' );
@ -49,7 +57,7 @@ module.exports.tests.sanitize_private = function(test, common) {
var valid_false_values = ['false', false, 0];
valid_false_values.forEach(function(value) {
test('test setting false explicitly ' + value, function(t) {
var req = { query: { ids:'geoname:123', 'private': value } };
var req = { query: { ids:'geonames:venue:123', 'private': value } };
sanitize(req, function(){
t.deepEqual( req.errors, [], 'no errors' );
t.deepEqual( req.warnings, [], 'no warnings' );
@ -60,7 +68,7 @@ module.exports.tests.sanitize_private = function(test, common) {
});
test('test default behavior', function(t) {
var req = { query: { ids:'geoname:123' } };
var req = { query: { ids:'geonames:venue:123' } };
sanitize(req, function(){
t.deepEqual( req.errors, [], 'no errors' );
t.deepEqual( req.warnings, [], 'no warnings' );
@ -83,7 +91,7 @@ module.exports.tests.invalid_params = function(test, common) {
module.exports.tests.middleware_success = function(test, common) {
test('middleware success', function(t) {
var req = { query: { ids: 'geoname:123' }};
var req = { query: { ids: 'geonames:venue:123' }};
var next = function(){
t.deepEqual( req.errors, [], 'no errors' );
t.deepEqual( req.warnings, [], 'no warnings' );

2
test/unit/sanitiser/reverse.js

@ -36,7 +36,7 @@ module.exports.tests.interface = function(test, common) {
module.exports.tests.sanitisers = function(test, common) {
test('check sanitiser list', function (t) {
var expected = ['layers', 'sources', 'size', 'private', 'geo_reverse', 'boundary_country'];
var expected = ['singleScalarParameters', 'layers', 'sources', 'size', 'private', 'geo_reverse', 'boundary_country'];
t.deepEqual(Object.keys(reverse.sanitiser_list), expected);
t.end();
});

4
test/unit/sanitiser/search.js

@ -25,7 +25,7 @@ module.exports.tests.interface = function(test, common) {
module.exports.tests.sanitisers = function(test, common) {
test('check sanitiser list', function (t) {
var expected = ['text', 'size', 'layers', 'sources', 'private', 'geo_search', 'boundary_country' ];
var expected = ['singleScalarParameters', 'text', 'size', 'layers', 'sources', 'private', 'geo_search', 'boundary_country' ];
t.deepEqual(Object.keys(search.sanitiser_list), expected);
t.end();
});
@ -33,7 +33,7 @@ module.exports.tests.sanitisers = function(test, common) {
module.exports.tests.sanitize_invalid_text = function(test, common) {
test('invalid text', function(t) {
var invalid = [ '', 100, null, undefined, new Date() ];
var invalid = [ '', 100, null, undefined ];
invalid.forEach( function( text ){
var req = { query: { text: text } };
sanitize(req, function(){

7
test/unit/service/search.js

@ -35,16 +35,21 @@ module.exports.tests.functional_success = function(test, common) {
}
];
var expectedMeta = {
scores: [10, 20]
};
test('valid ES query', function(t) {
var backend = mockBackend( 'client/search/ok/1', function( cmd ){
t.deepEqual(cmd, example_valid_es_query, 'no change to the command');
});
setup( backend, example_valid_es_query, function(err, data) {
setup( backend, example_valid_es_query, function(err, data, meta) {
t.true(Array.isArray(data), 'returns an array');
data.forEach(function(d) {
t.true(typeof d === 'object', 'valid object');
});
t.deepEqual(data, expected, 'values correctly mapped');
t.deepEqual(meta, expectedMeta, 'meta data correctly mapped');
t.end();
});
});

Loading…
Cancel
Save