Browse Source

Merge pull request #468 from pelias/autocomplete_api

Autocomplete api
pull/503/head
Diana Shkolnikov 9 years ago
parent
commit
c4f1abcee3
  1. 38
      README.md
  2. 8
      query/autocomplete.js
  3. 22
      sanitiser/_size.js
  4. 8
      sanitiser/autocomplete.js
  5. 2
      sanitiser/reverse.js
  6. 2
      sanitiser/search.js
  7. 34
      test/ciao/autocomplete/layers_alias_address.coffee
  8. 46
      test/ciao/autocomplete/layers_alias_coarse.coffee
  9. 34
      test/ciao/autocomplete/layers_invalid.coffee
  10. 35
      test/ciao/autocomplete/layers_mix_invalid_valid.coffee
  11. 34
      test/ciao/autocomplete/layers_multiple.coffee
  12. 34
      test/ciao/autocomplete/layers_single.coffee
  13. 34
      test/ciao/autocomplete/size_not_default.coffee
  14. 34
      test/ciao/autocomplete/sources_invalid.coffee
  15. 37
      test/ciao/autocomplete/sources_layers_invalid_combo.coffee
  16. 35
      test/ciao/autocomplete/sources_layers_valid_combo.coffee
  17. 34
      test/ciao/autocomplete/sources_multiple.coffee
  18. 34
      test/ciao/autocomplete/sources_single.coffee
  19. 58
      test/ciao_test_data.js
  20. 85
      test/unit/fixture/autocomplete_with_source_filtering.js
  21. 93
      test/unit/fixture/search_with_source_filtering.js
  22. 13
      test/unit/query/autocomplete.js
  23. 13
      test/unit/query/search.js
  24. 8
      test/unit/sanitiser/_size.js
  25. 2
      test/unit/sanitiser/autocomplete.js

38
README.md

@ -34,3 +34,41 @@ The API recognizes the following properties under the top-level `api` key in you
Please fork and pull request against upstream master on a feature branch. Pretty please; provide unit tests and script
fixtures in the `test` directory.
## Unit tests
You can run the unit test suite using the command:
```bash
$ npm test
```
## HTTP tests
We have another set of tests which are used to test the HTTP API layer, these tests send expected HTTP requests and then
assert that the responses coming back have the correct geoJSON format and HTTP status codes.
You can run the HTTP test suite using the command:
```bash
$ npm run ciao
```
Note: some of the tests in this suite fail when no data is present in the index, there is a small set of test documents
provided in `./test/ciao_test_data` which can be inserted in order to avoid these errors.
To inject dummy data in to your local index:
```bash
$ node test/ciao_test_data.js
```
You can confirm the dummy data has been inserted with the command:
```bash
$ curl localhost:9200/pelias/_count?pretty
{
"count" : 9,
...
}
```

8
query/autocomplete.js

@ -41,6 +41,9 @@ query.score( views.focus_selected_layers( views.ngrams_strict ) );
query.score( peliasQuery.view.popularity( views.ngrams_strict ) );
query.score( peliasQuery.view.population( views.ngrams_strict ) );
// non-scoring hard filters
query.filter( peliasQuery.view.sources );
// --------------------------------
/**
@ -51,6 +54,11 @@ function generateQuery( clean ){
var vs = new peliasQuery.Vars( defaults );
// sources
if( check.array(clean.sources) && clean.sources.length ){
vs.var( 'sources', clean.sources );
}
// mark the name as incomplete (user has not yet typed a comma)
vs.var( 'input:name:isComplete', false );

22
sanitiser/_size.js

@ -5,7 +5,14 @@ var MIN_SIZE = 1,
DEFAULT_SIZE = 10;
// validate inputs, convert types and apply defaults
function sanitize( raw, clean ){
function setup( size_min, size_max, size_def ){
// allow caller to inject custom min/max/default values
if( !check.number( size_min ) ){ size_min = MIN_SIZE; }
if( !check.number( size_max ) ){ size_max = MAX_SIZE; }
if( !check.number( size_def ) ){ size_def = DEFAULT_SIZE; }
return function sanitize( raw, clean ){
// error & warning messages
var messages = { errors: [], warnings: [] };
@ -15,22 +22,23 @@ function sanitize( raw, clean ){
// invalid numeric input
if( isNaN( clean.size ) ){
clean.size = DEFAULT_SIZE;
clean.size = size_def;
}
// ensure size falls within defined range
else if( clean.size > MAX_SIZE ){
else if( clean.size > size_max ){
// set the max size
messages.warnings.push('out-of-range integer \'size\', using MAX_SIZE');
clean.size = MAX_SIZE;
clean.size = size_max;
}
else if( clean.size < MIN_SIZE ){
else if( clean.size < size_min ){
// set the min size
messages.warnings.push('out-of-range integer \'size\', using MIN_SIZE');
clean.size = MIN_SIZE;
clean.size = size_min;
}
return messages;
};
}
// export function
module.exports = sanitize;
module.exports = setup;

8
sanitiser/autocomplete.js

@ -1,8 +1,14 @@
var type_mapping = require('../helper/type_mapping');
var sanitizeAll = require('../sanitiser/sanitizeAll'),
sanitizers = {
singleScalarParameters: require('../sanitiser/_single_scalar_parameters'),
text: require('../sanitiser/_text'),
size: require('../sanitiser/_size'),
size: require('../sanitiser/_size')(10, 10, 10),
layers: require('../sanitiser/_targets')('layers', type_mapping.layer_mapping),
sources: require('../sanitiser/_targets')('sources', type_mapping.source_mapping),
// depends on the layers and sources sanitisers, must be run after them
sources_and_layers: require('../sanitiser/_sources_and_layers'),
private: require('../sanitiser/_flag_bool')('private', false),
geo_autocomplete: require('../sanitiser/_geo_autocomplete'),
};

2
sanitiser/reverse.js

@ -8,7 +8,7 @@ var sanitizeAll = require('../sanitiser/sanitizeAll'),
sources: require('../sanitiser/_targets')('sources', type_mapping.source_mapping),
// depends on the layers and sources sanitisers, must be run after them
sources_and_layers: require('../sanitiser/_sources_and_layers'),
size: require('../sanitiser/_size'),
size: require('../sanitiser/_size')(/* use defaults*/),
private: require('../sanitiser/_flag_bool')('private', false),
geo_reverse: require('../sanitiser/_geo_reverse'),
boundary_country: require('../sanitiser/_boundary_country'),

2
sanitiser/search.js

@ -5,7 +5,7 @@ var sanitizeAll = require('../sanitiser/sanitizeAll'),
quattroshapes_deprecation: require('../sanitiser/_deprecate_quattroshapes'),
singleScalarParameters: require('../sanitiser/_single_scalar_parameters'),
text: require('../sanitiser/_text'),
size: require('../sanitiser/_size'),
size: require('../sanitiser/_size')(/* use defaults*/),
layers: require('../sanitiser/_targets')('layers', type_mapping.layer_mapping),
sources: require('../sanitiser/_targets')('sources', type_mapping.source_mapping),
// depends on the layers and sources sanitisers, must be run after them

34
test/ciao/autocomplete/layers_alias_address.coffee

@ -0,0 +1,34 @@
#> layer alias
path: '/v1/autocomplete?text=a&layers=address'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql ["address"]

46
test/ciao/autocomplete/layers_alias_coarse.coffee

@ -0,0 +1,46 @@
#> layer alias
path: '/v1/autocomplete?text=a&layers=coarse'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql [ "continent",
"macrocountry",
"country",
"dependency",
"region",
"locality",
"localadmin",
"county",
"macrohood",
"neighbourhood",
"microhood",
"disputed"
]

34
test/ciao/autocomplete/layers_invalid.coffee

@ -0,0 +1,34 @@
#> layer alias
path: '/v1/autocomplete?text=a&layers=notlayer'
#? 200 ok
response.statusCode.should.be.equal 400
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,country,region,county,locality,continent,macrocountry,dependency,localadmin,macrohood,neighbourhood,microhood,disputed' ]
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10

35
test/ciao/autocomplete/layers_mix_invalid_valid.coffee

@ -0,0 +1,35 @@
#> layer alias
path: '/v1/autocomplete?text=a&layers=country,notlayer'
#? 200 ok
response.statusCode.should.be.equal 400
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notlayer\' is an invalid layers parameter. Valid options: coarse,address,venue,country,region,county,locality,continent,macrocountry,dependency,localadmin,macrohood,neighbourhood,microhood,disputed' ]
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
should.not.exist json.geocoding.query['layers']

34
test/ciao/autocomplete/layers_multiple.coffee

@ -0,0 +1,34 @@
#> layer alias
path: '/v1/autocomplete?text=a&layers=country,region'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql ["country","region"]

34
test/ciao/autocomplete/layers_single.coffee

@ -0,0 +1,34 @@
#> layer alias
path: '/v1/autocomplete?text=a&layers=country'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql ["country"]

34
test/ciao/autocomplete/size_not_default.coffee

@ -0,0 +1,34 @@
#> set size (autocomplete does not allow size to be changed)
path: '/v1/autocomplete?text=a&size=3'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.exist json.geocoding.warnings
json.geocoding.warnings.should.eql [ 'out-of-range integer \'size\', using MIN_SIZE' ]
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10 # should remain the default size

34
test/ciao/autocomplete/sources_invalid.coffee

@ -0,0 +1,34 @@
#> sources filter
path: '/v1/autocomplete?text=a&sources=openstreetmap,notasource'
#? 200 ok
response.statusCode.should.be.equal 400
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ '\'notasource\' is an invalid sources parameter. Valid options: osm,oa,gn,wof,openstreetmap,openaddresses,geonames,whosonfirst' ]
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10

37
test/ciao/autocomplete/sources_layers_invalid_combo.coffee

@ -0,0 +1,37 @@
#> sources and layers specified (invalid combo)
path: '/v1/autocomplete?text=a&sources=whosonfirst&layers=address'
#? 200 ok
response.statusCode.should.be.equal 400
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.exist json.geocoding.errors
json.geocoding.errors.should.eql [ 'You have specified both the `sources` and `layers` parameters in a combination that will return no results: the whosonfirst source has nothing in the address layer' ]
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql ["address"]
json.geocoding.query.sources.should.eql ["whosonfirst"]
should.not.exist json.geocoding.query['type']

35
test/ciao/autocomplete/sources_layers_valid_combo.coffee

@ -0,0 +1,35 @@
#> sources and layers specified
path: '/v1/autocomplete?text=a&sources=openaddresses&layers=address'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.layers.should.eql ["address"]
json.geocoding.query.sources.should.eql ["openaddresses"]

34
test/ciao/autocomplete/sources_multiple.coffee

@ -0,0 +1,34 @@
#> sources filter
path: "/v1/autocomplete?text=a&sources=openstreetmap,geonames"
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header "charset", 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.sources.should.eql ["openstreetmap", "geonames"]

34
test/ciao/autocomplete/sources_single.coffee

@ -0,0 +1,34 @@
#> sources filter
path: '/v1/autocomplete?text=a&sources=openstreetmap'
#? 200 ok
response.statusCode.should.be.equal 200
response.should.have.header 'charset', 'utf8'
response.should.have.header 'content-type', 'application/json; charset=utf-8'
#? valid geocoding block
should.exist json.geocoding
should.exist json.geocoding.version
should.exist json.geocoding.attribution
should.exist json.geocoding.query
should.exist json.geocoding.engine
should.exist json.geocoding.engine.name
should.exist json.geocoding.engine.author
should.exist json.geocoding.engine.version
should.exist json.geocoding.timestamp
#? valid geojson
json.type.should.be.equal 'FeatureCollection'
json.features.should.be.instanceof Array
#? expected errors
should.not.exist json.geocoding.errors
#? expected warnings
should.not.exist json.geocoding.warnings
#? inputs
json.geocoding.query['text'].should.eql 'a'
json.geocoding.query['size'].should.eql 10
json.geocoding.query.sources.should.eql ["openstreetmap"]

58
test/ciao_test_data.js

@ -0,0 +1,58 @@
/**
Test data required by the ciao test suite.
Some tests will fail when run against an empty index, you can use this script
to insert some dummy data in to your index before running the tests.
note: as this is dummy data, care should be taken in order to make sure these
documents don't end up in your production index; for that reason the HTTP port
has been hard-coded as port:9200.
**/
// we use the default config to avoid making calls to
// a cluster running on a non-standard port.
var client = require('elasticsearch').Client(),
async = require('async'),
actions = [];
// add one record per 'type' in order to cause the _default_ mapping
// to be copied when the new type is created.
var types = ['venue','address','county','region','county','country','admin0','admin1','admin2'],
sources = ['test'],
layers = ['geonames'];
// iterate over all types/sources/layers and index a test document
types.forEach( function( type, i1 ){
sources.forEach( function( source, i2 ){
layers.forEach( function( layer, i3 ){
actions.push( function( done ){
client.index({
index: 'pelias',
type: type,
id: [i1,i2,i3].join(':'),
body: {
source: source,
layer: layer,
name: { default: 'test' },
phrase: { default: 'test' },
parent: {
country_a: ['USA']
}
}
});
done();
});
});
});
});
// call refresh so the index merges the changes
actions.push( function( done ){
client.indices.refresh( { index: 'pelias' }, done);
});
// perform all actions in series
async.series( actions, function( err, resp ){
console.log('test data inported');
});

85
test/unit/fixture/autocomplete_with_source_filtering.js

@ -0,0 +1,85 @@
module.exports = {
'query': {
'filtered': {
'query': {
'bool': {
'must': [{
'match': {
'name.default': {
'analyzer': 'peliasPhrase',
'boost': 100,
'query': 'test',
'type': 'phrase',
'operator': 'and'
}
}
}],
'should':[{
'function_score': {
'query': {
'match': {
'name.default': {
'analyzer': 'peliasPhrase',
'boost': 100,
'query': 'test',
'type': 'phrase',
'operator': 'and'
}
}
},
'max_boost': 20,
'score_mode': 'first',
'boost_mode': 'replace',
'functions': [{
'field_value_factor': {
'modifier': 'log1p',
'field': 'popularity',
'missing': 1
},
'weight': 1
}]
}
},{
'function_score': {
'query': {
'match': {
'name.default': {
'analyzer': 'peliasPhrase',
'boost': 100,
'query': 'test',
'type': 'phrase',
'operator': 'and'
}
}
},
'max_boost': 20,
'score_mode': 'first',
'boost_mode': 'replace',
'functions': [{
'field_value_factor': {
'modifier': 'log1p',
'field': 'population',
'missing': 1
},
'weight': 3
}]
}
}]
}
},
'filter': {
'bool': {
'must': [{
'terms': {
'source': ['test_source']
}
}]
}
}
}
},
'sort': [ '_score' ],
'size': 20,
'track_scores': true
};

93
test/unit/fixture/search_with_source_filtering.js

@ -0,0 +1,93 @@
module.exports = {
'query': {
'filtered': {
'query': {
'bool': {
'must': [{
'match': {
'name.default': {
'query': 'test',
'boost': 1,
'analyzer': 'peliasOneEdgeGram'
}
}
}],
'should': [{
'match': {
'phrase.default': {
'query': 'test',
'analyzer': 'peliasPhrase',
'type': 'phrase',
'boost': 1,
'slop': 2
}
}
},{
'function_score': {
'query': {
'match': {
'phrase.default': {
'query': 'test',
'analyzer': 'peliasPhrase',
'type': 'phrase',
'slop': 2,
'boost': 1
}
}
},
'max_boost': 20,
'score_mode': 'first',
'boost_mode': 'replace',
'functions': [{
'field_value_factor': {
'modifier': 'log1p',
'field': 'popularity',
'missing': 1
},
'weight': 1
}]
}
},{
'function_score': {
'query': {
'match': {
'phrase.default': {
'query': 'test',
'analyzer': 'peliasPhrase',
'type': 'phrase',
'slop': 2,
'boost': 1
}
}
},
'max_boost': 20,
'score_mode': 'first',
'boost_mode': 'replace',
'functions': [{
'field_value_factor': {
'modifier': 'log1p',
'field': 'population',
'missing': 1
},
'weight': 2
}]
}
}]
}
},
'filter': {
'bool': {
'must': [{
'terms': {
'source': ['test_source']
}
}]
}
}
}
},
'sort': [ '_score' ],
'size': 20,
'track_scores': true
};

13
test/unit/query/autocomplete.js

@ -95,6 +95,19 @@ module.exports.tests.query = function(test, common) {
t.deepEqual(compiled, expected, 'valid autocomplete query');
t.end();
});
test('valid sources filter', function(t) {
var query = generate({
'text': 'test',
'sources': ['test_source']
});
var compiled = JSON.parse( JSON.stringify( query ) );
var expected = require('../fixture/autocomplete_with_source_filtering');
t.deepEqual(compiled, expected, 'valid autocomplete query with source filtering');
t.end();
});
};
module.exports.all = function (tape, common) {

13
test/unit/query/search.js

@ -182,6 +182,19 @@ module.exports.tests.query = function(test, common) {
t.end();
});
test('valid sources filter', function(t) {
var query = generate({
'text': 'test',
'sources': ['test_source']
});
var compiled = JSON.parse( JSON.stringify( query ) );
var expected = require('../fixture/search_with_source_filtering');
t.deepEqual(compiled, expected, 'valid search query with source filtering');
t.end();
});
};
module.exports.all = function (tape, common) {

8
test/unit/sanitiser/_size.js

@ -6,7 +6,7 @@ module.exports.tests.sanitize_size = function(test, common) {
test('size=0', function(t) {
var raw = { size: 0 };
var clean = {};
var res = sanitize(raw, clean);
var res = sanitize(/*defaults*/)(raw, clean);
t.equal(res.errors.length, 0, 'should return no errors');
t.equal(res.warnings.length, 1, 'should return warning');
t.equal(res.warnings[0], 'out-of-range integer \'size\', using MIN_SIZE', 'check warning text');
@ -17,7 +17,7 @@ module.exports.tests.sanitize_size = function(test, common) {
test('size=10000', function(t) {
var raw = { size: 10000 };
var clean = {};
var res = sanitize(raw, clean);
var res = sanitize(/*defaults*/)(raw, clean);
t.equal(res.errors.length, 0, 'should return no errors');
t.equal(res.warnings.length, 1, 'should return warning');
t.equal(res.warnings[0], 'out-of-range integer \'size\', using MAX_SIZE', 'check warning text');
@ -28,7 +28,7 @@ module.exports.tests.sanitize_size = function(test, common) {
test('size not set', function(t) {
var raw = {};
var clean = {};
var res = sanitize(raw, clean);
var res = sanitize(/*defaults*/)(raw, clean);
t.equal(res.errors.length, 0, 'should return no errors');
t.equal(res.warnings.length, 0, 'should return no warning');
t.equal(clean.size, 10, 'default to 10');
@ -41,7 +41,7 @@ module.exports.tests.sanitize_size = function(test, common) {
test('size=' + size, function (t) {
var raw = {size: size};
var clean = {};
var res = sanitize(raw, clean);
var res = sanitize(/*defaults*/)(raw, clean);
t.equal(res.errors.length, 0, 'should return no errors');
t.equal(res.warnings.length, 0, 'should return warning');
t.equal(clean.size, 5, 'set to correct integer');

2
test/unit/sanitiser/autocomplete.js

@ -4,7 +4,7 @@ module.exports.tests = {};
module.exports.tests.sanitisers = function(test, common) {
test('check sanitiser list', function (t) {
var expected = ['singleScalarParameters', 'text', 'size', 'private', 'geo_autocomplete' ];
var expected = ['singleScalarParameters', 'text', 'size', 'layers', 'sources', 'sources_and_layers', 'private', 'geo_autocomplete' ];
t.deepEqual(Object.keys(autocomplete.sanitiser_list), expected);
t.end();
});

Loading…
Cancel
Save