mirror of
https://gitlab.com/openstapps/openstapps.git
synced 2026-01-10 19:52:53 +00:00
committed by
Rainer Killinger
parent
fe7dd09d7e
commit
5eefa3c2e1
281
test/storage/elasticsearch/aggregations.spec.ts
Normal file
281
test/storage/elasticsearch/aggregations.spec.ts
Normal file
@@ -0,0 +1,281 @@
|
||||
/*
|
||||
* Copyright (C) 2020 StApps
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
import {SCFacet, SCThingType} from '@openstapps/core';
|
||||
import {expect} from 'chai';
|
||||
import {parseAggregations} from '../../../src/storage/elasticsearch/aggregations';
|
||||
import {AggregationResponse, AggregationSchema} from '../../../src/storage/elasticsearch/common';
|
||||
|
||||
describe('Aggregations', function () {
|
||||
const schema: AggregationSchema = {
|
||||
'@all': {
|
||||
aggs: {
|
||||
type: {
|
||||
terms: {
|
||||
field: 'type.raw',
|
||||
size: 1000
|
||||
}
|
||||
}
|
||||
},
|
||||
filter: {
|
||||
match_all: {}
|
||||
}
|
||||
},
|
||||
'academic event': {
|
||||
aggs: {
|
||||
'academicTerms.acronym': {
|
||||
terms: {
|
||||
field: 'academicTerms.acronym.raw',
|
||||
size: 1000
|
||||
}
|
||||
},
|
||||
'catalogs.categories': {
|
||||
terms: {
|
||||
field: 'catalogs.categories.raw',
|
||||
size: 1000
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
terms: {
|
||||
field: 'categories.raw',
|
||||
size: 1000
|
||||
}
|
||||
},
|
||||
'creativeWorks.keywords': {
|
||||
terms: {
|
||||
field: 'creativeWorks.keywords.raw',
|
||||
size: 1000
|
||||
}
|
||||
},
|
||||
majors: {
|
||||
terms: {
|
||||
field: 'majors.raw',
|
||||
size: 1000
|
||||
}
|
||||
}
|
||||
},
|
||||
filter: {
|
||||
type: {
|
||||
value: 'academic event'
|
||||
}
|
||||
}
|
||||
},
|
||||
catalog: {
|
||||
aggs: {
|
||||
'academicTerm.acronym': {
|
||||
terms: {
|
||||
field: 'academicTerm.acronym.raw',
|
||||
size: 1000
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
terms: {
|
||||
field: 'categories.raw',
|
||||
size: 1000
|
||||
}
|
||||
},
|
||||
'superCatalog.categories': {
|
||||
terms: {
|
||||
field: 'superCatalog.categories.raw',
|
||||
size: 1000
|
||||
}
|
||||
},
|
||||
'superCatalogs.categories': {
|
||||
terms: {
|
||||
field: 'superCatalogs.categories.raw',
|
||||
size: 1000
|
||||
}
|
||||
}
|
||||
},
|
||||
filter: {
|
||||
type: {
|
||||
value: 'catalog'
|
||||
}
|
||||
}
|
||||
},
|
||||
person: {
|
||||
aggs: {
|
||||
'homeLocations.categories': {
|
||||
terms: {
|
||||
field: 'homeLocations.categories.raw',
|
||||
size: 1000
|
||||
}
|
||||
}
|
||||
},
|
||||
filter: {
|
||||
type: {
|
||||
value: 'person'
|
||||
}
|
||||
}
|
||||
},
|
||||
fooType: {
|
||||
terms: {
|
||||
field: 'foo',
|
||||
size: 123,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const aggregations: AggregationResponse = {
|
||||
catalog: {
|
||||
doc_count: 4,
|
||||
'superCatalogs.categories': {
|
||||
buckets: []
|
||||
},
|
||||
'academicTerm.acronym': {
|
||||
buckets: [
|
||||
{
|
||||
key: 'SoSe 2020',
|
||||
doc_count: 2
|
||||
}
|
||||
]
|
||||
},
|
||||
'superCatalog.categories': {
|
||||
buckets: []
|
||||
},
|
||||
categories: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'foo',
|
||||
doc_count: 1,
|
||||
},
|
||||
{
|
||||
key: 'bar',
|
||||
doc_count: 3,
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
person: {
|
||||
doc_count: 13,
|
||||
'homeLocations.categories': {
|
||||
buckets: []
|
||||
}
|
||||
},
|
||||
'academic event': {
|
||||
doc_count: 0,
|
||||
'academicTerms.acronym': {
|
||||
buckets: []
|
||||
},
|
||||
categories: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'foobar',
|
||||
doc_count: 8,
|
||||
},
|
||||
{
|
||||
key: 'bar',
|
||||
doc_count: 2,
|
||||
},
|
||||
]
|
||||
},
|
||||
'creativeWorks.keywords': {
|
||||
buckets: []
|
||||
}
|
||||
},
|
||||
fooType: {
|
||||
buckets: [
|
||||
{
|
||||
doc_count: 321,
|
||||
key: 'foo'
|
||||
}
|
||||
],
|
||||
},
|
||||
'@all': {
|
||||
doc_count: 17,
|
||||
type: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'person',
|
||||
doc_count: 13
|
||||
},
|
||||
{
|
||||
key: 'catalog',
|
||||
doc_count: 4
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const expectedFacets: SCFacet[] = [
|
||||
{
|
||||
buckets: [
|
||||
{
|
||||
count: 13,
|
||||
'key': 'person'
|
||||
},
|
||||
{
|
||||
count: 4,
|
||||
key: 'catalog'
|
||||
}
|
||||
],
|
||||
field: 'type',
|
||||
},
|
||||
{
|
||||
buckets: [
|
||||
{
|
||||
count: 8,
|
||||
key: 'foobar'
|
||||
},
|
||||
{
|
||||
count: 2,
|
||||
key: 'bar'
|
||||
}
|
||||
],
|
||||
field: 'categories',
|
||||
onlyOnType: SCThingType.AcademicEvent,
|
||||
},
|
||||
{
|
||||
buckets: [
|
||||
{
|
||||
count: 2,
|
||||
key: 'SoSe 2020'
|
||||
}
|
||||
],
|
||||
field: 'academicTerm.acronym',
|
||||
onlyOnType: SCThingType.Catalog
|
||||
},
|
||||
{
|
||||
buckets: [
|
||||
{
|
||||
count: 1,
|
||||
key: 'foo'
|
||||
},
|
||||
{
|
||||
count: 3,
|
||||
key: 'bar'
|
||||
}
|
||||
],
|
||||
field: 'categories',
|
||||
onlyOnType: SCThingType.Catalog,
|
||||
},
|
||||
{
|
||||
buckets: [
|
||||
{
|
||||
count: 321,
|
||||
key: 'foo'
|
||||
}
|
||||
],
|
||||
field: 'fooType'
|
||||
}
|
||||
];
|
||||
|
||||
it('should parse the aggregations providing the appropriate facets', function () {
|
||||
const facets = parseAggregations(schema, aggregations);
|
||||
|
||||
expect(facets).to.be.eql(expectedFacets);
|
||||
});
|
||||
});
|
||||
88
test/storage/elasticsearch/common.spec.ts
Normal file
88
test/storage/elasticsearch/common.spec.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
/*
|
||||
* Copyright (C) 2020 StApps
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
import {
|
||||
ESAggMatchAllFilter,
|
||||
ESAggTypeFilter,
|
||||
ESNestedAggregation,
|
||||
ESTermsFilter
|
||||
} from '@openstapps/core-tools/lib/mappings/aggregation-definitions';
|
||||
import { expect } from "chai";
|
||||
import {
|
||||
BucketAggregation,
|
||||
isBucketAggregation, isESAggMatchAllFilter, isESNestedAggregation, isESTermsFilter,
|
||||
isNestedAggregation,
|
||||
NestedAggregation
|
||||
} from '../../../src/storage/elasticsearch/common';
|
||||
|
||||
describe('Common', function () {
|
||||
const bucketAggregation: BucketAggregation = {buckets: []};
|
||||
const esNestedAggregation: ESNestedAggregation = {aggs: {}, filter: {match_all: true}};
|
||||
const esTermsFilter: ESTermsFilter = {terms: {field: 'foo'}};
|
||||
|
||||
describe('isBucketAggregation', function () {
|
||||
it('should be false for a number', function () {
|
||||
expect(isBucketAggregation(123)).to.be.false;
|
||||
});
|
||||
|
||||
it('should be true for a bucket aggregation', function () {
|
||||
expect(isBucketAggregation(bucketAggregation)).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('isNestedAggregation', function () {
|
||||
it('should be false for a bucket aggregation', function () {
|
||||
expect(isNestedAggregation(bucketAggregation)).to.be.false;
|
||||
});
|
||||
|
||||
it('should be true for a nested aggregation', function () {
|
||||
const nestedAggregation: NestedAggregation = {doc_count: 123};
|
||||
|
||||
expect(isNestedAggregation(nestedAggregation)).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('isESTermsFilter', function () {
|
||||
it('should be false for an elasticsearch nested aggregation', function () {
|
||||
expect(isESTermsFilter(esNestedAggregation)).to.be.false;
|
||||
});
|
||||
|
||||
it('should be true for an elasticsearch terms filter', function () {
|
||||
expect(isESTermsFilter(esTermsFilter)).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('isESNestedAggregation', function () {
|
||||
it('should be false for an elasticsearch terms filter', function () {
|
||||
expect(isESNestedAggregation(esTermsFilter)).to.be.false;
|
||||
});
|
||||
|
||||
it('should be true for an elasticsearch nested aggregation', function () {
|
||||
expect(isESNestedAggregation(esNestedAggregation)).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('isESAggMatchAllFilter', function () {
|
||||
it('should be false for an elasticsearch aggregation type filter', function () {
|
||||
const aggregationTypeFilter: ESAggTypeFilter = {type: {value: 'foo'}};
|
||||
expect(isESAggMatchAllFilter(aggregationTypeFilter)).to.be.false;
|
||||
});
|
||||
|
||||
it('should be true for an elasticsearch aggregation match all filter', function () {
|
||||
const esAggMatchAllFilter: ESAggMatchAllFilter = {match_all: {}};
|
||||
expect(isESAggMatchAllFilter(esAggMatchAllFilter)).to.be.true;
|
||||
});
|
||||
});
|
||||
});
|
||||
641
test/storage/elasticsearch/elasticsearch.spec.ts
Normal file
641
test/storage/elasticsearch/elasticsearch.spec.ts
Normal file
@@ -0,0 +1,641 @@
|
||||
/*
|
||||
* Copyright (C) 2020 StApps
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
import {ApiResponse, Client} from '@elastic/elasticsearch';
|
||||
import {SCBook, SCBulkResponse, SCConfigFile, SCMessage, SCSearchQuery, SCThings, SCThingType} from '@openstapps/core';
|
||||
import {instance as book} from '@openstapps/core/test/resources/Book.1.json';
|
||||
import {instance as message} from '@openstapps/core/test/resources/Message.1.json';
|
||||
import {Logger} from '@openstapps/logger';
|
||||
import {SMTP} from '@openstapps/logger/lib/smtp';
|
||||
import {expect, use} from 'chai';
|
||||
import chaiAsPromised from 'chai-as-promised';
|
||||
import {SearchResponse} from 'elasticsearch';
|
||||
import mockedEnv from 'mocked-env';
|
||||
import sinon from 'sinon';
|
||||
import {configFile} from '../../../src/common';
|
||||
import {MailQueue} from '../../../src/notification/mail-queue';
|
||||
import * as aggregations from '../../../src/storage/elasticsearch/aggregations';
|
||||
import {ElasticsearchObject} from '../../../src/storage/elasticsearch/common';
|
||||
import {Elasticsearch} from '../../../src/storage/elasticsearch/elasticsearch';
|
||||
import * as Monitoring from '../../../src/storage/elasticsearch/monitoring';
|
||||
import * as query from '../../../src/storage/elasticsearch/query';
|
||||
import * as templating from '../../../src/storage/elasticsearch/templating';
|
||||
import {bulk, DEFAULT_TEST_TIMEOUT, getTransport, index} from '../../common';
|
||||
import fs from 'fs';
|
||||
|
||||
use(chaiAsPromised);
|
||||
|
||||
describe('Elasticsearch', function () {
|
||||
// increase timeout for the suite
|
||||
this.timeout(DEFAULT_TEST_TIMEOUT);
|
||||
const sandbox = sinon.createSandbox();
|
||||
let checkESTemplateStub: sinon.SinonStub = sandbox.stub(templating, 'checkESTemplate');
|
||||
|
||||
before(function () {
|
||||
console.log('before');
|
||||
sandbox.stub(fs, 'readFileSync').returns('{}');
|
||||
});
|
||||
after(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
describe('getElasticsearchUrl', function () {
|
||||
it('should provide custom elasticsearch URL if defined', function () {
|
||||
const customAddress = 'http://foo-address:9200';
|
||||
const restore = mockedEnv({
|
||||
ES_ADDR: customAddress
|
||||
});
|
||||
|
||||
expect(Elasticsearch.getElasticsearchUrl()).to.be.equal(customAddress);
|
||||
// restore env variables
|
||||
restore();
|
||||
});
|
||||
|
||||
it('should provide local URL as fallback', function () {
|
||||
const restore = mockedEnv({
|
||||
ES_ADDR: undefined
|
||||
});
|
||||
|
||||
expect(Elasticsearch.getElasticsearchUrl()).to.match(/(https?:\/\/)?localhost(:\d+)?/);
|
||||
// restore env variables
|
||||
restore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getIndex (including getIndexUID)', function () {
|
||||
const type = 'foo bar type';
|
||||
const source = 'foo_source';
|
||||
const bulk: SCBulkResponse = {
|
||||
expiration: '',
|
||||
source: '',
|
||||
state: 'in progress',
|
||||
type: SCThingType.Semester,
|
||||
uid: 'bulk-uid-123-123-123'
|
||||
};
|
||||
|
||||
it('should provide index UID from the provided UID', function () {
|
||||
const indexUID = Elasticsearch.getIndexUID(bulk.uid);
|
||||
|
||||
expect(indexUID.length).to.be.equal(Elasticsearch.INDEX_UID_LENGTH);
|
||||
// test starting and ending character
|
||||
expect(indexUID[0]).to.be.equal(bulk.uid[0]);
|
||||
expect(indexUID[indexUID.length - 1]).to.be.equal(bulk.uid[Elasticsearch.INDEX_UID_LENGTH - 1]);
|
||||
});
|
||||
|
||||
it('should provide index name from the provided data', function () {
|
||||
expect(Elasticsearch.getIndex(type as SCThingType, source, bulk))
|
||||
.to.be.equal(`stapps_${type.split(' ').join('_')}_${source}_${Elasticsearch.getIndexUID(bulk.uid)}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeAliasChars', function () {
|
||||
it('should remove invalid characters', function () {
|
||||
expect(Elasticsearch.removeAliasChars('f,o#o\\ b|ar/ <?al\ias>* ', 'bulk-uid')).to.be.equal('foobaralias');
|
||||
});
|
||||
|
||||
it('should remove invalid starting characters', function () {
|
||||
expect(Elasticsearch.removeAliasChars('-foobaralias', 'bulk-uid')).to.be.equal('foobaralias');
|
||||
expect(Elasticsearch.removeAliasChars('_foobaralias', 'bulk-uid')).to.be.equal('foobaralias');
|
||||
expect(Elasticsearch.removeAliasChars('+foobaralias', 'bulk-uid')).to.be.equal('foobaralias');
|
||||
});
|
||||
|
||||
it('should replace with a placeholder in case of invalid alias', function () {
|
||||
expect(Elasticsearch.removeAliasChars('.', 'bulk-uid')).to.contain('placeholder');
|
||||
expect(Elasticsearch.removeAliasChars('..', 'bulk-uid')).to.contain('placeholder');
|
||||
});
|
||||
|
||||
it('should work with common cases', function () {
|
||||
expect(Elasticsearch.removeAliasChars('the-quick-brown-fox-jumps-over-the-lazy-dog-1234567890', 'bulk-uid'))
|
||||
.to.be.equal('the-quick-brown-fox-jumps-over-the-lazy-dog-1234567890');
|
||||
expect(Elasticsearch.removeAliasChars('THE_QUICK_BROWN_FOX_JUMPS_OVER_THE_LAZY_DOG', 'bulk-uid'))
|
||||
.to.be.equal('THE_QUICK_BROWN_FOX_JUMPS_OVER_THE_LAZY_DOG');
|
||||
});
|
||||
|
||||
it('should warn in case of characters that are invalid in future elasticsearch versions', function () {
|
||||
const sandbox = sinon.createSandbox();
|
||||
const loggerWarnStub = sandbox.stub(Logger, 'warn');
|
||||
|
||||
expect(Elasticsearch.removeAliasChars('foo:bar:alias', 'bulk-uid')).to.contain('foo:bar:alias');
|
||||
expect(loggerWarnStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('constructor', async function () {
|
||||
const sandbox = sinon.createSandbox();
|
||||
afterEach(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should complain (throw an error) if database in config is undefined', function () {
|
||||
const config: SCConfigFile = {...configFile, internal: {...configFile.internal, database: undefined}};
|
||||
|
||||
expect(() => new Elasticsearch(config)).to.throw(Error);
|
||||
});
|
||||
|
||||
it('should complain (throw an error) if database version is not a string', function () {
|
||||
const config: SCConfigFile = {
|
||||
...configFile,
|
||||
internal: {
|
||||
...configFile.internal,
|
||||
database: {
|
||||
name: 'foo',
|
||||
version: 123
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
expect(() => new Elasticsearch(config)).to.throw(Error);
|
||||
});
|
||||
|
||||
it('should log an error in case of there is one when getting response from the elasticsearch client', async function () {
|
||||
const error = Error('Foo Error');
|
||||
const loggerErrorStub = sandbox.stub(Logger, 'error').resolves('foo');
|
||||
sandbox.stub(Client.prototype, 'on').yields(error);
|
||||
|
||||
new Elasticsearch(configFile);
|
||||
|
||||
expect(loggerErrorStub.calledWith(error)).to.be.true;
|
||||
});
|
||||
|
||||
it('should log the result in the debug mode when getting response from the elasticsearch client', async function () {
|
||||
const fakeResponse = {foo: 'bar'};
|
||||
const loggerLogStub = sandbox.stub(Logger, 'log');
|
||||
sandbox.stub(Client.prototype, 'on').yields(null, fakeResponse);
|
||||
|
||||
new Elasticsearch(configFile);
|
||||
expect(loggerLogStub.calledWith(fakeResponse)).to.be.false;
|
||||
|
||||
const restore = mockedEnv({
|
||||
'ES_DEBUG': 'true',
|
||||
});
|
||||
new Elasticsearch(configFile);
|
||||
|
||||
expect(loggerLogStub.calledWith(fakeResponse)).to.be.true;
|
||||
// restore env variables
|
||||
restore();
|
||||
});
|
||||
|
||||
it('should force mapping update if related process env variable is not set', async function () {
|
||||
const restore = mockedEnv({
|
||||
'ES_FORCE_MAPPING_UPDATE': undefined,
|
||||
});
|
||||
|
||||
new Elasticsearch(configFile);
|
||||
|
||||
expect(checkESTemplateStub.calledWith(false)).to.be.true;
|
||||
// restore env variables
|
||||
restore();
|
||||
});
|
||||
|
||||
it('should force mapping update if related process env variable is set', async function () {
|
||||
const restore = mockedEnv({
|
||||
'ES_FORCE_MAPPING_UPDATE': 'true',
|
||||
});
|
||||
|
||||
new Elasticsearch(configFile);
|
||||
|
||||
expect(checkESTemplateStub.calledWith(true)).to.be.true;
|
||||
// restore env variables
|
||||
restore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('init', async function () {
|
||||
const sandbox = sinon.createSandbox();
|
||||
after(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should complain (throw an error) if monitoring is set but mail queue is undefined', async function () {
|
||||
const config: SCConfigFile = {
|
||||
...configFile,
|
||||
internal: {
|
||||
...configFile.internal,
|
||||
monitoring: {
|
||||
actions: [],
|
||||
watchers: [],
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const es = new Elasticsearch(config);
|
||||
|
||||
return expect(es.init()).to.be.rejected;
|
||||
});
|
||||
|
||||
it('should setup the monitoring if there is monitoring is set and mail queue is defined', function () {
|
||||
const config: SCConfigFile = {
|
||||
...configFile,
|
||||
internal: {
|
||||
...configFile.internal,
|
||||
monitoring: {
|
||||
actions: [],
|
||||
watchers: [],
|
||||
}
|
||||
}
|
||||
};
|
||||
const monitoringSetUpStub = sandbox.stub(Monitoring, 'setUp');
|
||||
|
||||
const es = new Elasticsearch(config, new MailQueue(getTransport(false) as unknown as SMTP));
|
||||
|
||||
es.init();
|
||||
|
||||
expect(monitoringSetUpStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('Operations with bundle/index', async function () {
|
||||
const sandbox = sinon.createSandbox();
|
||||
let es: Elasticsearch;
|
||||
const oldIndex = 'stapps_footype_foosource_oldindex';
|
||||
|
||||
beforeEach(function () {
|
||||
es = new Elasticsearch(configFile);
|
||||
// @ts-ignore
|
||||
es.client.indices = {
|
||||
// @ts-ignore
|
||||
getAlias: () => Promise.resolve({body: [{[oldIndex]: {aliases: {[SCThingType.Book]: {}}}}]}),
|
||||
// @ts-ignore
|
||||
putTemplate: () => Promise.resolve({}),
|
||||
// @ts-ignore
|
||||
create: () => Promise.resolve({}),
|
||||
// @ts-ignore
|
||||
delete: () => Promise.resolve({}),
|
||||
// @ts-ignore
|
||||
exists: () => Promise.resolve({}),
|
||||
// @ts-ignore
|
||||
refresh: () => Promise.resolve({}),
|
||||
// @ts-ignore
|
||||
updateAliases: () => Promise.resolve({})
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
describe('bulkCreated', async function () {
|
||||
it('should reject (throw an error) if the connection to elasticsearch is not established', async function () {
|
||||
return expect(es.bulkCreated(bulk)).to.be.rejectedWith('elasticsearch');
|
||||
});
|
||||
|
||||
it('should reject (throw an error) if the index name is not valid', async function () {
|
||||
sandbox.stub(Elasticsearch, 'getIndex').returns(`invalid_${index}`);
|
||||
sandbox.createStubInstance(Client, {});
|
||||
await es.init();
|
||||
|
||||
return expect(es.bulkCreated(bulk)).to.be.rejectedWith('Index');
|
||||
});
|
||||
|
||||
it('should create a new index', async function () {
|
||||
sandbox.stub(Elasticsearch, 'getIndex').returns(index);
|
||||
const putTemplateStub = sandbox.stub(templating, 'putTemplate');
|
||||
const createStub = sandbox.stub(es.client.indices, 'create');
|
||||
await es.init();
|
||||
|
||||
await es.bulkCreated(bulk);
|
||||
|
||||
expect(putTemplateStub.called).to.be.true;
|
||||
expect(createStub.calledWith({index})).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('bulkExpired', async function () {
|
||||
const sandbox = sinon.createSandbox();
|
||||
afterEach(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
it('should cleanup index in case of the expired bulk for bulk whose index is not in use', async function () {
|
||||
sandbox.stub(Elasticsearch, 'getIndex').returns(index);
|
||||
const clientDeleteStub = sandbox.stub(es.client.indices, 'delete');
|
||||
|
||||
await es.bulkExpired({...bulk, state: 'in progress'});
|
||||
|
||||
expect(clientDeleteStub.called).to.be.true;
|
||||
});
|
||||
|
||||
it('should not cleanup index in case of the expired bulk for bulk whose index is in use', async function () {
|
||||
sandbox.stub(Elasticsearch, 'getIndex').returns(index);
|
||||
const clientDeleteStub = sandbox.stub(es.client.indices, 'delete');
|
||||
|
||||
await es.bulkExpired({...bulk, state: 'done'});
|
||||
|
||||
expect(clientDeleteStub.called).to.be.false;
|
||||
});
|
||||
});
|
||||
|
||||
describe('bulkUpdated', function () {
|
||||
it('should reject if the connection to elasticsearch is not established', async function () {
|
||||
return expect(es.bulkUpdated(bulk)).to.be.rejectedWith('elasticsearch');
|
||||
});
|
||||
|
||||
it('should reject if the index name is not valid', async function () {
|
||||
sandbox.stub(Elasticsearch, 'getIndex').returns(`invalid_${index}`);
|
||||
sandbox.createStubInstance(Client, {});
|
||||
await es.init();
|
||||
|
||||
return expect(es.bulkUpdated(bulk)).to.be.rejectedWith('Index');
|
||||
});
|
||||
|
||||
it('should create a new index', async function () {
|
||||
const expectedRefreshActions = [
|
||||
{
|
||||
add: {index: index, alias: SCThingType.Book},
|
||||
},
|
||||
{
|
||||
remove: {index: oldIndex, alias: SCThingType.Book},
|
||||
}
|
||||
];
|
||||
sandbox.stub(Elasticsearch, 'getIndex').returns(index);
|
||||
sandbox.stub(es, 'aliasMap').value({
|
||||
[SCThingType.Book]: {
|
||||
[bulk.source]: oldIndex,
|
||||
}
|
||||
});
|
||||
const refreshStub = sandbox.stub(es.client.indices, 'refresh');
|
||||
const updateAliasesStub = sandbox.stub(es.client.indices, 'updateAliases');
|
||||
const deleteStub = sandbox.stub(es.client.indices, 'delete');
|
||||
sandbox.stub(templating, 'putTemplate');
|
||||
await es.init();
|
||||
|
||||
await es.bulkUpdated(bulk);
|
||||
|
||||
expect(refreshStub.calledWith({index})).to.be.true;
|
||||
expect(updateAliasesStub.calledWith({
|
||||
body: {
|
||||
actions: expectedRefreshActions
|
||||
}
|
||||
})
|
||||
).to.be.true;
|
||||
expect(deleteStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', async function () {
|
||||
let es: Elasticsearch;
|
||||
const sandbox = sinon.createSandbox();
|
||||
|
||||
before(function () {
|
||||
es = new Elasticsearch(configFile);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should reject if object is not found', async function () {
|
||||
sandbox.stub(es.client, 'search').resolves({body:{hits: { hits: []}}});
|
||||
|
||||
return expect(es.get('123')).to.rejectedWith('found');
|
||||
});
|
||||
|
||||
it('should provide the thing if object is found', async function () {
|
||||
const foundObject: ElasticsearchObject<SCMessage> = {_id: '', _index: '', _score: 0, _type: '', _source: message as SCMessage};
|
||||
sandbox.stub(es.client, 'search').resolves({body:{hits: { hits: [foundObject]}}});
|
||||
|
||||
return expect(await es.get('123')).to.be.eql(message);
|
||||
});
|
||||
});
|
||||
|
||||
describe('post', async function () {
|
||||
let es: Elasticsearch;
|
||||
const sandbox = sinon.createSandbox();
|
||||
|
||||
before(function () {
|
||||
es = new Elasticsearch(configFile);
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should not post if the object already exists in an index which will not be rolled over', async function () {
|
||||
const oldIndex = index.replace('foosource', 'barsource');
|
||||
const object: ElasticsearchObject<SCMessage> = {_id: '', _index: oldIndex, _score: 0, _type: '', _source: message as SCMessage};
|
||||
sandbox.stub(es.client, 'search').resolves({body:{hits: { hits: [object]}}});
|
||||
sandbox.stub(Elasticsearch, 'getIndex').returns(index);
|
||||
|
||||
return expect(es.post(object._source, bulk)).to.rejectedWith('exist');
|
||||
});
|
||||
|
||||
it('should reject if there is an object creation error on the elasticsearch side', async function () {
|
||||
sandbox.stub(es.client, 'search').resolves({body:{hits: { hits: []}}});
|
||||
sandbox.stub(es.client, 'create').resolves({body: {created: false}});
|
||||
|
||||
return expect(es.post(message as SCMessage, bulk)).to.rejectedWith('creation');
|
||||
});
|
||||
|
||||
it('should create a new object', async function () {
|
||||
let caughtParam: any;
|
||||
sandbox.stub(es.client, 'search').resolves({body:{hits: { hits: []}}});
|
||||
// @ts-ignore
|
||||
let createStub = sandbox.stub(es.client, 'create').callsFake((param) => {
|
||||
caughtParam = param;
|
||||
return Promise.resolve({body: { created: true }});
|
||||
});
|
||||
|
||||
await es.post(message as SCMessage, bulk);
|
||||
|
||||
expect(createStub.called).to.be.true;
|
||||
expect(caughtParam.body).to.be.eql({...message, creation_date: caughtParam.body.creation_date});
|
||||
});
|
||||
});
|
||||
|
||||
describe('put', async function () {
|
||||
let es: Elasticsearch;
|
||||
const sandbox = sinon.createSandbox();
|
||||
|
||||
before(function () {
|
||||
es = new Elasticsearch(configFile);
|
||||
});
|
||||
afterEach(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
it('should reject to put if the object does not already exist', async function () {
|
||||
const oldIndex = index.replace('foosource', 'barsource');
|
||||
const object: ElasticsearchObject<SCMessage> = {_id: '', _index: oldIndex, _score: 0, _type: '', _source: message as SCMessage};
|
||||
sandbox.stub(es.client, 'search').resolves({body:{hits: { hits: []}}});
|
||||
|
||||
return expect(es.put(object._source)).to.rejectedWith('exist');
|
||||
});
|
||||
|
||||
it('should update the object if it already exists', async function () {
|
||||
let caughtParam: any;
|
||||
const oldIndex = index.replace('foosource', 'barsource');
|
||||
const object: ElasticsearchObject<SCMessage> = {_id: '', _index: oldIndex, _score: 0, _type: '', _source: message as SCMessage};
|
||||
sandbox.stub(es.client, 'search').resolves({body:{hits: { hits: [object]}}});
|
||||
// @ts-ignore
|
||||
const stubUpdate = sandbox.stub(es.client, 'update').callsFake((params) => {
|
||||
caughtParam = params;
|
||||
return Promise.resolve({body: { created: true }});
|
||||
});
|
||||
|
||||
await es.put(object._source);
|
||||
|
||||
expect(caughtParam.body.doc).to.be.eql(object._source);
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', async function () {
|
||||
let es: Elasticsearch;
|
||||
const sandbox = sinon.createSandbox();
|
||||
const objectMessage: ElasticsearchObject<SCMessage> = {_id: '123', _index: index, _score: 0, _type: '', _source: message as SCMessage};
|
||||
const objectBook: ElasticsearchObject<SCBook> = {_id: '321', _index: index, _score: 0, _type: '', _source: book as SCBook};
|
||||
const fakeEsAggregations = {
|
||||
'@all': {
|
||||
doc_count: 17,
|
||||
type: {
|
||||
doc_count_error_upper_bound: 0,
|
||||
sum_other_doc_count: 0,
|
||||
buckets: [
|
||||
{
|
||||
key: 'person',
|
||||
doc_count: 13
|
||||
},
|
||||
{
|
||||
key: 'catalog',
|
||||
doc_count: 4
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
const fakeSearchResponse: Partial<ApiResponse<SearchResponse<SCThings>>> = {
|
||||
// @ts-ignore
|
||||
body: {
|
||||
took: 12,
|
||||
timed_out: false,
|
||||
// @ts-ignore
|
||||
_shards: {},
|
||||
// @ts-ignore
|
||||
hits: {
|
||||
hits: [
|
||||
objectMessage,
|
||||
objectBook,
|
||||
],
|
||||
total: 123
|
||||
},
|
||||
aggregations: fakeEsAggregations
|
||||
},
|
||||
headers: {},
|
||||
// @ts-ignore
|
||||
meta: {},
|
||||
// @ts-ignore
|
||||
statusCode: {},
|
||||
// @ts-ignore
|
||||
warnings: {}
|
||||
};
|
||||
let searchStub: sinon.SinonStub;
|
||||
before(function () {
|
||||
es = new Elasticsearch(configFile);
|
||||
});
|
||||
beforeEach(function () {
|
||||
searchStub = sandbox.stub(es.client, 'search').resolves(fakeSearchResponse);
|
||||
});
|
||||
afterEach(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should provide appropriate data and facets', async function () {
|
||||
const fakeFacets = [
|
||||
{
|
||||
buckets: [
|
||||
{
|
||||
count: 1,
|
||||
'key': 'foo'
|
||||
},
|
||||
{
|
||||
count: 1,
|
||||
key: 'bar'
|
||||
}
|
||||
],
|
||||
field: 'type',
|
||||
}
|
||||
];
|
||||
const parseAggregationsStub = sandbox.stub(aggregations, 'parseAggregations').returns(fakeFacets);
|
||||
|
||||
const {data, facets} = await es.search({});
|
||||
|
||||
expect(data).to.be.eql([objectMessage._source, objectBook._source]);
|
||||
expect(facets).to.be.eql(fakeFacets);
|
||||
expect(parseAggregationsStub.calledWith(sinon.match.any, fakeEsAggregations)).to.be.true;
|
||||
});
|
||||
|
||||
it('should provide pagination from params', async function () {
|
||||
const from = 30;
|
||||
const {pagination} = await es.search({from});
|
||||
|
||||
expect(pagination).to.be.eql({
|
||||
count: fakeSearchResponse.body!.hits.hits.length,
|
||||
offset: from,
|
||||
total: fakeSearchResponse.body!.hits.total
|
||||
});
|
||||
});
|
||||
|
||||
it('should have fallback to zero if from is not given through params', async function () {
|
||||
const {pagination} = await es.search({});
|
||||
|
||||
expect(pagination.offset).to.be.equal(0);
|
||||
});
|
||||
|
||||
it('should build the search request properly', async function () {
|
||||
const params: SCSearchQuery = {
|
||||
query: 'mathematics',
|
||||
from: 30,
|
||||
size: 5,
|
||||
sort: [
|
||||
{
|
||||
type: 'ducet',
|
||||
order: 'desc',
|
||||
arguments:
|
||||
{
|
||||
field: 'name'
|
||||
}
|
||||
}
|
||||
],
|
||||
filter: {
|
||||
type: 'value',
|
||||
arguments: {
|
||||
field: 'type',
|
||||
value: SCThingType.AcademicEvent
|
||||
}
|
||||
}
|
||||
};
|
||||
const fakeResponse = {foo: 'bar'};
|
||||
const fakeBuildSortResponse = [fakeResponse];
|
||||
// @ts-ignore
|
||||
sandbox.stub(query, 'buildQuery').returns(fakeResponse);
|
||||
// @ts-ignore
|
||||
sandbox.stub(query, 'buildSort').returns(fakeBuildSortResponse);
|
||||
|
||||
await es.search(params);
|
||||
|
||||
sandbox.assert
|
||||
.calledWithMatch(searchStub,
|
||||
{
|
||||
body: {
|
||||
aggs: es.aggregationsSchema,
|
||||
query: fakeResponse,
|
||||
sort: fakeBuildSortResponse
|
||||
},
|
||||
from: params.from,
|
||||
index: Elasticsearch.getListOfAllIndices(),
|
||||
size: params.size,
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
138
test/storage/elasticsearch/monitoring.spec.ts
Normal file
138
test/storage/elasticsearch/monitoring.spec.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
/*
|
||||
* Copyright (C) 2020 StApps
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
import {ApiResponse, Client} from '@elastic/elasticsearch';
|
||||
import {
|
||||
SCMonitoringConfiguration,
|
||||
SCMonitoringLogAction,
|
||||
SCMonitoringMailAction,
|
||||
SCMonitoringWatcher, SCThings
|
||||
} from '@openstapps/core';
|
||||
import {Logger} from '@openstapps/logger';
|
||||
import {SearchResponse} from 'elasticsearch';
|
||||
import {MailQueue} from '../../../src/notification/mail-queue';
|
||||
import {setUp} from '../../../src/storage/elasticsearch/monitoring';
|
||||
|
||||
import {getTransport} from '../../common';
|
||||
import { expect } from 'chai';
|
||||
import sinon from 'sinon';
|
||||
import cron from 'node-cron';
|
||||
import * as templating from '../../../src/storage/elasticsearch/templating';
|
||||
|
||||
describe('Monitoring', async function () {
|
||||
const sandbox = sinon.createSandbox();
|
||||
const logAction: SCMonitoringLogAction = {
|
||||
message: 'Foo monitoring message',
|
||||
prefix: 'Backend Monitoring',
|
||||
type: 'log'
|
||||
};
|
||||
const mailAction: SCMonitoringMailAction = {
|
||||
message: 'Bar monitoring message',
|
||||
recipients: ['xyz@xyz.com'],
|
||||
subject: 'Backend Monitoring',
|
||||
type: 'mail'
|
||||
};
|
||||
let transport: any;
|
||||
// @ts-ignore
|
||||
let mailQueue: any;
|
||||
beforeEach(async function () {
|
||||
transport = getTransport(true);
|
||||
mailQueue = new MailQueue(transport);
|
||||
cronScheduleStub = sandbox.stub(cron, 'schedule');
|
||||
sandbox.stub(templating, 'checkESTemplate');
|
||||
});
|
||||
afterEach(async function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
// const sandbox = sinon.createSandbox();
|
||||
let cronScheduleStub: sinon.SinonStub
|
||||
const minLengthWatcher: SCMonitoringWatcher = {
|
||||
actions: [logAction, mailAction],
|
||||
conditions: [
|
||||
{
|
||||
length: 10,
|
||||
type: 'MinimumLength'
|
||||
}
|
||||
],
|
||||
name: 'foo watcher',
|
||||
query: {foo: 'bar'},
|
||||
triggers: [
|
||||
{
|
||||
executionTime: 'monthly',
|
||||
name: 'beginning of month'
|
||||
},
|
||||
{
|
||||
executionTime: 'daily',
|
||||
name: 'every night'
|
||||
}
|
||||
]
|
||||
};
|
||||
const maxLengthWatcher: SCMonitoringWatcher = {
|
||||
actions: [logAction, mailAction],
|
||||
conditions: [
|
||||
{
|
||||
length: 30,
|
||||
type: 'MaximumLength'
|
||||
}
|
||||
],
|
||||
name: 'foo watcher',
|
||||
query: {bar: 'foo'},
|
||||
triggers: [
|
||||
{
|
||||
executionTime: 'hourly',
|
||||
name: 'every hour'
|
||||
},
|
||||
{
|
||||
executionTime: 'weekly',
|
||||
name: 'every week'
|
||||
},
|
||||
]
|
||||
};
|
||||
const monitoringConfig: SCMonitoringConfiguration = {
|
||||
actions: [logAction, mailAction],
|
||||
watchers: [minLengthWatcher, maxLengthWatcher]
|
||||
};
|
||||
|
||||
it('should create a schedule for each trigger', async function () {
|
||||
await setUp(monitoringConfig, new Client({node: 'http://foohost:9200'}), mailQueue);
|
||||
|
||||
expect(cronScheduleStub.callCount).to.be.equal(4);
|
||||
});
|
||||
|
||||
it('should log errors where conditions failed', async function () {
|
||||
const fakeSearchResponse: Partial<ApiResponse<SearchResponse<SCThings>>> = {
|
||||
// @ts-ignore
|
||||
body: {
|
||||
took: 12,
|
||||
timed_out: false,
|
||||
// @ts-ignore
|
||||
_shards: {},
|
||||
// @ts-ignore
|
||||
hits: {
|
||||
total: 123
|
||||
},
|
||||
},
|
||||
};
|
||||
let fakeClient = new Client({node: 'http://foohost:9200'});
|
||||
const loggerErrorStub = sandbox.stub(Logger, 'error');
|
||||
const mailQueueSpy = sinon.spy(mailQueue, 'push');
|
||||
cronScheduleStub.yields();
|
||||
sandbox.stub(fakeClient, 'search').resolves(fakeSearchResponse);
|
||||
await setUp(monitoringConfig, fakeClient, mailQueue);
|
||||
|
||||
expect(loggerErrorStub.callCount).to.be.equal(2);
|
||||
expect(mailQueueSpy.callCount).to.be.equal(2);
|
||||
});
|
||||
});
|
||||
442
test/storage/elasticsearch/query.spec.ts
Normal file
442
test/storage/elasticsearch/query.spec.ts
Normal file
@@ -0,0 +1,442 @@
|
||||
/*
|
||||
* Copyright (C) 2020 StApps
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
import {
|
||||
SCConfigFile,
|
||||
SCSearchBooleanFilter,
|
||||
SCSearchFilter,
|
||||
SCSearchQuery,
|
||||
SCSearchSort,
|
||||
SCThingType
|
||||
} from '@openstapps/core';
|
||||
import { expect } from 'chai';
|
||||
import {configFile} from '../../../src/common';
|
||||
import {
|
||||
ElasticsearchConfig, ESBooleanFilter, ESDucetSort, ESGeoDistanceFilter,
|
||||
ESGeoDistanceSort,
|
||||
ESTermFilter,
|
||||
ScriptSort
|
||||
} from '../../../src/storage/elasticsearch/common';
|
||||
import {buildBooleanFilter, buildFilter, buildQuery, buildSort} from '../../../src/storage/elasticsearch/query';
|
||||
|
||||
describe('Query', function () {
|
||||
describe('buildBooleanFilter', function () {
|
||||
const booleanFilter: SCSearchBooleanFilter = {
|
||||
arguments: {
|
||||
operation: 'and',
|
||||
filters: [
|
||||
{
|
||||
type: 'value',
|
||||
arguments: {
|
||||
field: 'type',
|
||||
value: SCThingType.Catalog
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'value',
|
||||
arguments: {
|
||||
field: 'type',
|
||||
value: SCThingType.Building
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
type: 'boolean'
|
||||
};
|
||||
const booleanFilters: {[key: string]: SCSearchBooleanFilter} = {
|
||||
and: booleanFilter,
|
||||
or: {...booleanFilter, arguments: {...booleanFilter.arguments, operation: 'or'}},
|
||||
not: {...booleanFilter, arguments: {...booleanFilter.arguments, operation: 'not'}},
|
||||
};
|
||||
const expectedEsFilters: Array<ESTermFilter> = [
|
||||
{
|
||||
term: {
|
||||
'type.raw': 'catalog'
|
||||
}
|
||||
},
|
||||
{
|
||||
term: {
|
||||
'type.raw': 'building'
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
it('should create appropriate elasticsearch "and" filter argument', function () {
|
||||
const {must} = buildBooleanFilter(booleanFilters.and);
|
||||
|
||||
expect(must).to.be.eql(expectedEsFilters);
|
||||
});
|
||||
|
||||
it('should create appropriate elasticsearch "or" filter argument', function () {
|
||||
const {should, minimum_should_match} = buildBooleanFilter(booleanFilters.or);
|
||||
|
||||
expect(should).to.be.eql(expectedEsFilters);
|
||||
expect(minimum_should_match).to.be.equal(1);
|
||||
});
|
||||
|
||||
it('should create appropriate elasticsearch "not" filter argument', function () {
|
||||
const {must_not} = buildBooleanFilter(booleanFilters.not);
|
||||
|
||||
expect(must_not).to.be.eql(expectedEsFilters);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildQuery', function () {
|
||||
const params: SCSearchQuery = {
|
||||
query: 'mathematics',
|
||||
from: 30,
|
||||
size: 5,
|
||||
sort: [
|
||||
{
|
||||
type: 'ducet',
|
||||
order: 'desc',
|
||||
arguments:
|
||||
{
|
||||
field: 'name'
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'ducet',
|
||||
order: 'desc',
|
||||
arguments:
|
||||
{
|
||||
field: 'categories'
|
||||
}
|
||||
},
|
||||
],
|
||||
filter: {
|
||||
type: 'value',
|
||||
arguments: {
|
||||
field: 'type',
|
||||
value: SCThingType.AcademicEvent
|
||||
}
|
||||
}
|
||||
};
|
||||
let esConfig: ElasticsearchConfig = {
|
||||
name: 'elasticsearch',
|
||||
version: '123',
|
||||
query: {
|
||||
minMatch: '75%',
|
||||
queryType: 'dis_max',
|
||||
matchBoosting: 1.3,
|
||||
fuzziness: 'AUTO',
|
||||
cutoffFrequency: 0.0,
|
||||
tieBreaker: 0,
|
||||
},
|
||||
};
|
||||
const query = {
|
||||
minMatch: '75%',
|
||||
queryType: 'dis_max',
|
||||
matchBoosting: 1.3,
|
||||
fuzziness: 'AUTO',
|
||||
cutoffFrequency: 0.0,
|
||||
tieBreaker: 0,
|
||||
}
|
||||
const config: SCConfigFile = {
|
||||
...configFile
|
||||
};
|
||||
beforeEach(function () {
|
||||
esConfig = {
|
||||
name: 'elasticsearch',
|
||||
version: '123'
|
||||
};
|
||||
});
|
||||
|
||||
// TODO: check parts of received elasticsearch query for each test case
|
||||
|
||||
it('should build query that includes sorting when query is undefined', function () {
|
||||
expect(buildQuery(params, config, esConfig)).to.be.an('object');
|
||||
});
|
||||
|
||||
it('should build query that includes sorting when query type is query_string', function () {
|
||||
esConfig.query = {...query, queryType: 'query_string'};
|
||||
|
||||
expect(buildQuery(params, config, esConfig)).to.be.an('object');
|
||||
});
|
||||
|
||||
it('should build query that includes sorting when query type is dis_max', function () {
|
||||
esConfig.query = {...query, queryType: 'dis_max'};
|
||||
|
||||
expect(buildQuery(params, config, esConfig)).to.be.an('object');
|
||||
});
|
||||
|
||||
it('should build query that includes sorting when query type is dis_max', function () {
|
||||
esConfig.query = {...query, queryType: 'dis_max'};
|
||||
|
||||
expect(buildQuery(params, config, esConfig)).to.be.an('object');
|
||||
});
|
||||
|
||||
it('should reject (throw an error) if provided query type is not supported', function () {
|
||||
// @ts-ignore
|
||||
esConfig.query = {...query, queryType: 'invalid_query_type'};
|
||||
|
||||
expect(() => buildQuery(params, config, esConfig)).to.throw('query type');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildFilter', function () {
|
||||
const searchFilters: {[key: string]: SCSearchFilter} = {
|
||||
value: {
|
||||
type: 'value',
|
||||
arguments: {
|
||||
field: 'type',
|
||||
value: SCThingType.Dish
|
||||
}
|
||||
},
|
||||
availability: {
|
||||
type: 'availability',
|
||||
arguments: {
|
||||
time: '2017-01-30T12:05:00.000Z',
|
||||
fromField: 'offers.availabilityStarts',
|
||||
toField: 'offers.availabilityEnds'
|
||||
}
|
||||
},
|
||||
distance: {
|
||||
type: 'distance',
|
||||
arguments: {
|
||||
distance: 1000,
|
||||
field: 'geo.point.coordinates',
|
||||
position: [50.123, 8.123],
|
||||
}
|
||||
},
|
||||
boolean: {
|
||||
type: 'boolean',
|
||||
arguments: {
|
||||
operation: 'and',
|
||||
filters: [
|
||||
{
|
||||
type: 'value',
|
||||
arguments: {
|
||||
field: 'type',
|
||||
value: SCThingType.Dish,
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'availability',
|
||||
arguments: {
|
||||
fromField: 'offers.availabilityStarts',
|
||||
toField: 'offers.availabilityEnds'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
it('should build value filter', function () {
|
||||
const filter = buildFilter(searchFilters.value);
|
||||
const expectedFilter: ESTermFilter = {
|
||||
term: {
|
||||
'type.raw': SCThingType.Dish
|
||||
}
|
||||
};
|
||||
|
||||
expect(filter).to.be.eql(expectedFilter);
|
||||
});
|
||||
|
||||
it('should build availability filter', function () {
|
||||
const filter = buildFilter(searchFilters.availability);
|
||||
const expectedFilter: ESBooleanFilter<any> = {
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
range: {
|
||||
'offers.availabilityStarts': {
|
||||
lte: '2017-01-30T12:05:00.000Z'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
range: {
|
||||
'offers.availabilityEnds': {
|
||||
gte: '2017-01-30T12:05:00.000Z'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: [
|
||||
{
|
||||
exists: {
|
||||
field: 'offers.availabilityStarts'
|
||||
}
|
||||
},
|
||||
{
|
||||
exists: {
|
||||
field: 'offers.availabilityEnds'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
expect(filter).to.be.eql(expectedFilter);
|
||||
});
|
||||
|
||||
it('should build distance filter', function () {
|
||||
const filter = buildFilter(searchFilters.distance);
|
||||
const expectedFilter: ESGeoDistanceFilter = {
|
||||
geo_distance: {
|
||||
distance: '1000m',
|
||||
'geo.point.coordinates': {
|
||||
lat: 8.123,
|
||||
lon: 50.123
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
expect(filter).to.be.eql(expectedFilter);
|
||||
});
|
||||
|
||||
it('should build boolean filter', function () {
|
||||
const filter = buildFilter(searchFilters.boolean);
|
||||
const expectedFilter: ESBooleanFilter<any> = {
|
||||
bool: {
|
||||
minimum_should_match: 0,
|
||||
must: [
|
||||
{
|
||||
term: {
|
||||
'type.raw': 'dish'
|
||||
}
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
range: {
|
||||
'offers.availabilityStarts': {
|
||||
lte: 'now'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
range: {
|
||||
'offers.availabilityEnds': {
|
||||
gte: 'now'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
must_not: [
|
||||
{
|
||||
exists: {
|
||||
field: 'offers.availabilityStarts'
|
||||
}
|
||||
},
|
||||
{
|
||||
exists: {
|
||||
field: 'offers.availabilityEnds'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
must_not: [],
|
||||
should: []
|
||||
}
|
||||
}
|
||||
|
||||
expect(filter).to.be.eql(expectedFilter);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildSort', function () {
|
||||
const searchSCSearchSort: Array<SCSearchSort> = [
|
||||
{
|
||||
type: 'ducet',
|
||||
order: 'desc',
|
||||
arguments: {
|
||||
field: 'name'
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'distance',
|
||||
order: 'desc',
|
||||
arguments: {
|
||||
field: 'geo.point',
|
||||
position: [8.123, 50.123]
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'price',
|
||||
order: 'asc',
|
||||
arguments: {
|
||||
universityRole: 'student',
|
||||
field: 'offers.prices',
|
||||
}
|
||||
},
|
||||
];
|
||||
let sorts: Array<ESDucetSort | ESGeoDistanceSort | ScriptSort> = [];
|
||||
const expectedSorts: {[key: string]: ESDucetSort | ESGeoDistanceSort | ScriptSort} = {
|
||||
ducet: {
|
||||
'name.sort': 'desc'
|
||||
},
|
||||
distance: {
|
||||
_geo_distance: {
|
||||
mode: 'avg',
|
||||
order: 'desc',
|
||||
unit: 'm',
|
||||
'geo.point': {
|
||||
lat: 50.123,
|
||||
lon: 8.123
|
||||
}
|
||||
}
|
||||
},
|
||||
price: {
|
||||
_script: {
|
||||
order: 'asc',
|
||||
script: '\n // foo price sort script',
|
||||
type: 'number'
|
||||
}
|
||||
}
|
||||
};
|
||||
before(function () {
|
||||
sorts = buildSort(searchSCSearchSort);
|
||||
});
|
||||
|
||||
it('should build ducet sort', function () {
|
||||
expect(sorts[0]).to.be.eql(expectedSorts.ducet);
|
||||
});
|
||||
|
||||
it('should build distance sort', function () {
|
||||
expect(sorts[1]).to.be.eql(expectedSorts.distance);
|
||||
});
|
||||
|
||||
it('should build price sort', function () {
|
||||
const priceSortNoScript = {...sorts[2], _script: {...(sorts[2] as ScriptSort)._script, script: (expectedSorts.price as ScriptSort)._script.script}}
|
||||
expect(priceSortNoScript).to.be.eql(expectedSorts.price);
|
||||
});
|
||||
});
|
||||
});
|
||||
201
test/storage/elasticsearch/templating.spec.ts
Normal file
201
test/storage/elasticsearch/templating.spec.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
/*
|
||||
* Copyright (C) 2020 StApps
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
import {SCThingType} from '@openstapps/core';
|
||||
import * as mapping from '@openstapps/core-tools/lib/mapping';
|
||||
import {ElasticsearchTemplateCollection} from '@openstapps/core-tools/lib/mappings/mapping-definitions';
|
||||
import {Logger} from '@openstapps/logger';
|
||||
import {AggregationSchema} from '../../../src/storage/elasticsearch/common';
|
||||
import {checkESTemplate, refreshAllTemplates} from '../../../src/storage/elasticsearch/templating';
|
||||
import sinon from "sinon";
|
||||
import * as path from 'path';
|
||||
import * as common from '@openstapps/core-tools/lib/common';
|
||||
import {expect} from 'chai';
|
||||
import fs from 'fs';
|
||||
import fsExtra from 'fs-extra';
|
||||
import {Client} from '@elastic/elasticsearch';
|
||||
|
||||
describe('templating', function () {
|
||||
describe('checkESTemplate', function () {
|
||||
const sandbox = sinon.createSandbox();
|
||||
let fakeMap: { aggregations: AggregationSchema, errors: string[], mappings: ElasticsearchTemplateCollection };
|
||||
beforeEach(function () {
|
||||
fakeMap = {
|
||||
aggregations: {
|
||||
'@all': {
|
||||
aggs: {
|
||||
type: {
|
||||
terms: {
|
||||
field: 'type.raw',
|
||||
size: 1000
|
||||
}
|
||||
}
|
||||
},
|
||||
filter: {
|
||||
match_all: {}
|
||||
}
|
||||
},
|
||||
},
|
||||
errors: [],
|
||||
mappings: {
|
||||
'template_dish': {
|
||||
mappings: {
|
||||
dish: {
|
||||
// @ts-ignore just mock the mapping
|
||||
foo: 'mapping'
|
||||
}
|
||||
},
|
||||
settings: {
|
||||
analysis: {
|
||||
ducet_sort: {
|
||||
filter: [
|
||||
'german_phonebook'
|
||||
],
|
||||
tokenizer: 'keyword',
|
||||
type: 'custom'
|
||||
},
|
||||
search_german: {
|
||||
filter: [
|
||||
'lowercase',
|
||||
'german_stop',
|
||||
'german_stemmer'
|
||||
],
|
||||
tokenizer: 'stapps_ngram',
|
||||
type: 'custom'
|
||||
}
|
||||
},
|
||||
max_result_window: 30000,
|
||||
},
|
||||
template: 'stapps_dish*'
|
||||
},
|
||||
'template_book': {
|
||||
mappings: {
|
||||
book: {
|
||||
// @ts-ignore just mock the mapping
|
||||
foo: 'mapping'
|
||||
}
|
||||
},
|
||||
settings: {
|
||||
analysis: {
|
||||
ducet_sort: {
|
||||
filter: [
|
||||
'german_phonebook'
|
||||
],
|
||||
tokenizer: 'keyword',
|
||||
type: 'custom'
|
||||
},
|
||||
search_german: {
|
||||
filter: [
|
||||
'lowercase',
|
||||
'german_stop',
|
||||
'german_stemmer'
|
||||
],
|
||||
tokenizer: 'stapps_ngram',
|
||||
type: 'custom'
|
||||
}
|
||||
},
|
||||
max_result_window: 30000,
|
||||
},
|
||||
template: 'stapps_book*'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
afterEach(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should write new templates when "force update" is true', async function () {
|
||||
sandbox.stub(Logger, 'error').resolves();
|
||||
sandbox.stub(fs, 'existsSync').returns(true);
|
||||
sandbox.stub(common, 'getProjectReflection');
|
||||
let caughtData: any = [];
|
||||
const writeFileSyncStub = sandbox.stub(fs, 'writeFileSync');
|
||||
sandbox.stub(path, 'resolve').returns('/foo/bar');
|
||||
sandbox.stub(mapping, 'generateTemplate').returns(fakeMap);
|
||||
|
||||
checkESTemplate(true);
|
||||
|
||||
expect(writeFileSyncStub.callCount).to.be.gt(0);
|
||||
for (let i = 0; i < writeFileSyncStub.callCount; i++) {
|
||||
caughtData.push(writeFileSyncStub.getCall(i).args[1]);
|
||||
}
|
||||
|
||||
expect(caughtData).to.be.eql([
|
||||
JSON.stringify(fakeMap.mappings['template_dish'], null, 2),
|
||||
JSON.stringify(fakeMap.mappings['template_book'], null, 2),
|
||||
JSON.stringify(fakeMap.aggregations),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not write new templates when "force update" is false', async function () {
|
||||
sandbox.stub(Logger, 'error').resolves();
|
||||
sandbox.stub(fs, 'existsSync').returns(true);
|
||||
sandbox.stub(common, 'getProjectReflection');
|
||||
const writeFileSyncStub = sandbox.stub(fs, 'writeFileSync');
|
||||
sandbox.stub(path, 'resolve').returns('/foo/bar');
|
||||
sandbox.stub(mapping, 'generateTemplate').returns(fakeMap);
|
||||
|
||||
checkESTemplate(false);
|
||||
|
||||
expect(writeFileSyncStub.called).to.be.false;
|
||||
});
|
||||
|
||||
it('should terminate if there are errors in the map', async function () {
|
||||
const processExitStub = sandbox.stub(process, 'exit');
|
||||
const fakeMapWithErrors = {
|
||||
...fakeMap,
|
||||
errors: ['Foo Error']
|
||||
};
|
||||
sandbox.stub(Logger, 'error').resolves();
|
||||
sandbox.stub(fs, 'existsSync').returns(true);
|
||||
sandbox.stub(common, 'getProjectReflection');
|
||||
sandbox.stub(fs, 'writeFileSync');
|
||||
sandbox.stub(path, 'resolve').returns('/foo/bar');
|
||||
sandbox.stub(mapping, 'generateTemplate').returns(fakeMapWithErrors);
|
||||
|
||||
checkESTemplate(true);
|
||||
|
||||
expect(processExitStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('refreshAllTemplates', async function () {
|
||||
const sandbox = sinon.createSandbox();
|
||||
const client = {
|
||||
indices: {
|
||||
putTemplate: (_template: any) => {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
after(function () {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should put templates for all types', async function () {
|
||||
const clientPutTemplateStub = sandbox.stub(client.indices, 'putTemplate');
|
||||
sandbox.stub(fsExtra, 'readFile').resolves(Buffer.from('{"foo": "file content"}', 'utf8'));
|
||||
await refreshAllTemplates(client as Client);
|
||||
|
||||
for (const type of Object.values(SCThingType)) {
|
||||
sinon.assert.calledWith(clientPutTemplateStub, {
|
||||
body: {foo: 'file content'},
|
||||
name: `template_${type.split(' ').join('_')}`
|
||||
})
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user