refactor: adjust code after updated dependencies

Closes #39
This commit is contained in:
Jovan Krunić
2019-06-05 13:58:26 +02:00
committed by Rainer Killinger
parent 42c7350c36
commit 8b457c9911
24 changed files with 574 additions and 343 deletions

View File

@@ -14,13 +14,16 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCBulkRequest, SCThingType} from '@openstapps/core';
import {Logger} from '@openstapps/logger';
import * as moment from 'moment';
import * as NodeCache from 'node-cache';
import {promisify} from 'util';
import {v4} from 'uuid';
import {logger} from '../common';
import {Database} from './Database';
import {Database} from './database';
/**
* Possible operations with a bulk
*/
export type BulkOperation = 'create' | 'expired' | 'update';
/**
@@ -68,7 +71,7 @@ export class Bulk implements SCBulkRequest {
/**
* Creates a new bulk process
* @param request
* @param request Data needed for requesting a bulk
*/
constructor(request: SCBulkRequest) {
this.uid = v4();
@@ -77,7 +80,9 @@ export class Bulk implements SCBulkRequest {
if (typeof request.expiration === 'string') {
this.expiration = request.expiration;
} else {
this.expiration = moment().add(1, 'hour').toISOString();
this.expiration = moment()
.add(1, 'hour')
.toISOString();
}
// when should this process be finished
// where does the process come from
@@ -91,8 +96,10 @@ export class Bulk implements SCBulkRequest {
* Cache for bulk-processes
*/
export class BulkStorage {
private cache: NodeCache;
/**
* Cache for temporary storage
*/
private readonly cache: NodeCache;
/**
* Creates a new BulkStorage
@@ -104,11 +111,11 @@ export class BulkStorage {
// the cache is checked every 60 seconds
this.cache = new NodeCache({stdTTL: 3600, checkperiod: 60});
this.cache.on('expired', (_key, bulk: Bulk) => {
this.cache.on('expired', async (_key, bulk: Bulk) => {
// if the bulk is not done
if (bulk.state !== 'done') {
// the database can delete the data associated with this bulk
this.database.bulkExpired(bulk);
await this.database.bulkExpired(bulk);
}
});
}
@@ -119,11 +126,14 @@ export class BulkStorage {
* @returns the bulk process that was saved
*/
private async save(bulk: Bulk): Promise<Bulk> {
const expirationInSeconds = moment(bulk.expiration).diff(moment.now()) / 1000;
logger.info('Bulk expires in ', expirationInSeconds, 'seconds');
const expirationInSeconds = moment(bulk.expiration)
// tslint:disable-next-line: no-magic-numbers
.diff(moment.now()) / 1000;
Logger.info('Bulk expires in ', expirationInSeconds, 'seconds');
// save the item in the cache with it's expected expiration
await promisify<string, Bulk, number>(this.cache.set)(bulk.uid, bulk, expirationInSeconds);
return bulk;
}
@@ -141,6 +151,7 @@ export class BulkStorage {
// tell the database that the bulk was created
await this.database.bulkCreated(bulk);
return bulk;
}
@@ -175,7 +186,8 @@ export class BulkStorage {
await this.save(bulk);
// tell the database that this is the new bulk
this.database.bulkUpdated(bulk);
await this.database.bulkUpdated(bulk);
return;
}
@@ -185,7 +197,7 @@ export class BulkStorage {
* @returns a promise that contains a bulk
*/
public async read(uid: string): Promise<Bulk | undefined> {
return await promisify<string, any>(this.cache.get)(uid);
return promisify<string, Bulk | undefined>(this.cache.get)(uid);
}
}

View File

@@ -13,18 +13,25 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCSearchQuery, SCSearchResponse, SCThings, SCUuid} from '@openstapps/core';
import {Bulk} from './BulkStorage';
import {SCConfigFile, SCSearchQuery, SCSearchResponse, SCThings, SCUuid} from '@openstapps/core';
import {MailQueue} from '../notification/mail-queue';
import {Bulk} from './bulk-storage';
export type DatabaseConstructor = new (...args: any) => Database;
/**
* Creates an instance of a database
*/
export type DatabaseConstructor = new (config: SCConfigFile, mailQueue?: MailQueue) => Database;
/**
* Defines what one database class needs to have defined
*/
export interface Database {
/**
* Gets called if a bulk was created
*
* The database should
* @param bulk
* @param bulk A bulk to be created
*/
bulkCreated(bulk: Bulk): Promise<void>;
@@ -32,7 +39,7 @@ export interface Database {
* Gets called if a bulk expires
*
* The database should delete all data that is associtated with this bulk
* @param bulk
* @param bulk A bulk which data needs to be removed
*/
bulkExpired(bulk: Bulk): Promise<void>;
@@ -42,20 +49,25 @@ export interface Database {
* If the database holds a bulk with the same type and source as the given
* bulk it should be replaced by the given one
*
* @param bulk
* @param bulk A new bulk whose data should be saved instead of the data of the old bulk
*/
bulkUpdated(bulk: Bulk): Promise<void>;
/**
* Get a single document
* @param uid
* @param uid Unique identifier of the document
*/
get(uid: SCUuid): Promise<SCThings>;
/**
* Initialize the database (call and wait for all needed methods)
*/
init(): Promise<void>;
/**
* Add a thing to an existing bulk
* @param object
* @param bulkId
* @param thing A StAppsCore thing to be added
* @param bulk A bulk to which the thing should be added
*/
post(thing: SCThings, bulk: Bulk): Promise<void>;
@@ -64,13 +76,13 @@ export interface Database {
*
* Currently it is not possible to put an non-existing object
*
* @param thing
* @param thing A StAppsCore thing to be added to a bulk
*/
put(thing: SCThings): Promise<void>;
/**
* Search for things
* @param params
* @param params Parameters which form a search query to search the backend data
*/
search(params: SCSearchQuery): Promise<SCSearchResponse>;
}

View File

@@ -16,6 +16,9 @@
import {SCBackendAggregationConfiguration, SCFacet, SCThingType} from '@openstapps/core';
import {AggregationSchema} from './common';
/**
* Provide information on which type (or on all) an aggregation happens
*/
export type aggregationType = SCThingType | '@all';
/**
@@ -30,7 +33,7 @@ export function buildAggregations(aggsConfig: SCBackendAggregationConfiguration[
result[aggregation.fieldName] = {
terms: {
field: aggregation.fieldName + '.raw',
field: `${aggregation.fieldName}.raw`,
size: 1000,
},
};
@@ -43,7 +46,14 @@ export function buildAggregations(aggsConfig: SCBackendAggregationConfiguration[
* An elasticsearch aggregation bucket
*/
interface Bucket {
/**
* Number of documents in the agregation bucket
*/
doc_count: number;
/**
* Text representing the documents in the bucket
*/
key: string;
}
@@ -52,7 +62,14 @@ interface Bucket {
*/
interface AggregationResponse {
[field: string]: {
/**
* Buckets in an aggregation
*/
buckets: Bucket[];
/**
* Number of documents in an aggregation
*/
doc_count?: number;
};
}
@@ -79,10 +96,11 @@ export function parseAggregations(
key: bucket.key,
};
}),
field: aggregationSchema[aggregationName].terms.field + '.raw',
field: `${aggregationSchema[aggregationName].terms.field}.raw`,
};
facets.push(facet);
});
return facets;
}

View File

@@ -15,7 +15,9 @@
*/
import {SCThingType} from '@openstapps/core';
import {SCThing} from '@openstapps/core';
import {NameList} from 'elasticsearch';
/* tslint:disable:completed-docs */ // TODO: document properties of interfaces
/**
* An elasticsearch bucket aggregation
* @see https://www.elastic.co/guide/en/elasticsearch/reference/5.5/search-aggregations-bucket.html
@@ -60,9 +62,11 @@ export interface ElasticsearchObject<T extends SCThing> {
_source: T;
_type: string;
_version?: number;
fields?: any;
fields?: NameList;
// tslint:disable: no-any
highlight?: any;
inner_hits?: any;
// tslint:enable: no-any
matched_queries?: string[];
sort?: string[];
}
@@ -156,7 +160,7 @@ export interface ESBooleanFilter<T> {
export interface ESFunctionScoreQuery {
function_score: {
functions: ESFunctionScoreQueryFunction[];
query: ESBooleanFilter<any>;
query: ESBooleanFilter<unknown>;
score_mode: 'multiply';
};
}

View File

@@ -23,19 +23,21 @@ import {
SCThingType,
SCUuid,
} from '@openstapps/core';
import {Logger} from '@openstapps/logger';
import * as ES from 'elasticsearch';
import * as moment from 'moment';
import {logger} from '../../common';
import {MailQueue} from '../../notification/MailQueue';
import {Bulk} from '../BulkStorage';
import {Database} from '../Database';
import {MailQueue} from '../../notification/mail-queue';
import {Bulk} from '../bulk-storage';
import {Database} from '../database';
import {buildAggregations, parseAggregations} from './aggregations';
import {AggregationSchema, ElasticsearchConfig, ElasticsearchObject} from './common';
import * as Monitoring from './monitoring';
import {buildQuery, buildSort} from './query';
import {putTemplate} from './templating';
// this will match index names such as stapps_<type>_<source>_<random suffix>
/**
* Matches index names such as stapps_<type>_<source>_<random suffix>
*/
const indexRegex = /^stapps_([A-z0-9_]+)_([a-z0-9-_]+)_([-a-z0-9^_]+)$/;
/**
@@ -43,6 +45,14 @@ const indexRegex = /^stapps_([A-z0-9_]+)_([a-z0-9-_]+)_([-a-z0-9^_]+)$/;
*/
export class Elasticsearch implements Database {
/**
* Length of the index UID used for generation of its name
*/
static readonly INDEX_UID_LENGTH = 8;
/**
* Holds aggregations
*/
aggregationsSchema: AggregationSchema;
/**
@@ -53,25 +63,117 @@ export class Elasticsearch implements Database {
[scType: string]: {
// each source is assigned a index name in elasticsearch
[source: string]: string;
},
};
};
/**
* Elasticsearch client
*/
client: ES.Client;
/**
* Queue of mails to be sent
*/
mailQueue: MailQueue | undefined;
/**
* Stores information if elasticsearch is ready (connection to it has been established)
*/
ready: boolean;
/**
* Get the url of elasticsearch
*/
static getElasticsearchUrl(): string {
// check if we have a docker link
if (process.env.ES_PORT_9200_TCP_ADDR !== undefined && process.env.ES_PORT_9200_TCP_PORT !== undefined) {
return `${process.env.ES_PORT_9200_TCP_ADDR}:${process.env.ES_PORT_9200_TCP_PORT}`;
}
// default
return 'localhost:9200';
}
/**
* Gets the index name in elasticsearch for one SCThingType
* @param type SCThingType of data in the index
* @param source source of data in the index
* @param bulk bulk process which created this index
*/
static getIndex(type: SCThingType, source: string, bulk: SCBulkResponse) {
return `stapps_${type.toLowerCase()
.replace(' ', '_')}_${source}_${Elasticsearch.getIndexUID(bulk.uid)}`;
}
/**
* Provides the index UID (for its name) from the bulk UID
* @param uid Bulk UID
*/
static getIndexUID(uid: SCUuid) {
return uid.substring(0, Elasticsearch.INDEX_UID_LENGTH);
}
/**
* Generates a string which matches all indices
*/
static getListOfAllIndices(): string {
// map each SC type in upper camel case
return 'stapps_*_*_*';
}
/**
* Checks for invalid character in alias names and removes them
* @param alias The alias name
* @param uid The UID of the current bulk (for debugging purposes)
*/
static removeAliasChars(alias: string, uid: string | undefined): string {
// spaces are included in some types, so throwing an error in this case would clutter up the log unnecessarily
let formattedAlias = alias.replace(' ', '');
// List of invalid characters: https://www.elastic.co/guide/en/elasticsearch/reference/6.6/indices-create-index.html
['\\', '/', '*', '?', '"', '<', '>', '|', ',', '#'].forEach((value) => {
if (formattedAlias.includes(value)) {
formattedAlias = formattedAlias.replace(value, '');
Logger.warn(`Type of the bulk ${uid} contains an invalid character '${value}'. This can lead to two bulks `
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
`New alias name is "${formattedAlias}."`);
}
});
['-', '_', '+'].forEach((value) => {
if (formattedAlias.charAt(0) === value) {
formattedAlias = formattedAlias.substring(1);
Logger.warn(`Type of the bulk ${uid} begins with '${value}'. This can lead to two bulks `
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
`New alias name is "${formattedAlias}."`);
}
});
if (formattedAlias === '.' || formattedAlias === '..') {
Logger.warn(`Type of the bulk ${uid} is ${formattedAlias}. This is an invalid name, please consider using ` +
`another one, as it will be replaced with 'alias_placeholder', which can lead to strange errors.`);
return 'alias_placeholder';
}
if (formattedAlias.includes(':')) {
Logger.warn(`Type of the bulk ${uid} contains a ':'. This isn't an issue now, but will be in future ` +
`Elasticsearch versions!`);
}
return formattedAlias;
}
/**
* Create a new interface for elasticsearch
* @param config an assembled config file
* @param mailQueue a mailqueue for monitoring
*/
constructor(private config: SCConfigFile, mailQueue?: MailQueue) {
constructor(private readonly config: SCConfigFile, mailQueue?: MailQueue) {
if (!config.internal.database || typeof config.internal.database.version === 'undefined') {
if (typeof config.internal.database === 'undefined' || typeof config.internal.database.version === 'undefined') {
throw new Error('Database version is undefined. Check you config file');
}
const options = {
apiVersion: config.internal.database.version,
host: this.getElasticsearchUrl(),
host: Elasticsearch.getElasticsearchUrl(),
log: 'error',
};
@@ -86,18 +188,7 @@ export class Elasticsearch implements Database {
this.aggregationsSchema = buildAggregations(this.config.internal.aggregations);
this.getAliasMap();
const monitoringConfiguration = this.config.internal.monitoring;
if (typeof monitoringConfiguration !== 'undefined') {
if (typeof mailQueue === 'undefined') {
throw new Error('Monitoring is defined, but MailQueue is undefined. A MailQueue is obligatory for monitoring.');
}
// read all watches and schedule searches on the client
Monitoring.setUp(monitoringConfiguration, this.client, mailQueue);
}
this.mailQueue = mailQueue;
}
/**
@@ -105,7 +196,8 @@ export class Elasticsearch implements Database {
*
* Returns Elasticsearch Object if it exists
*/
private async doesItemExist(object: SCThings): Promise<{exists: boolean; object?: ElasticsearchObject<SCThings>}> {
// tslint:disable-next-line: completed-docs
private async doesItemExist(object: SCThings): Promise<{exists: boolean; object?: ElasticsearchObject<SCThings>; }> {
const searchResponse = await this.client.search<SCThings>({
body: {
query: {
@@ -117,7 +209,7 @@ export class Elasticsearch implements Database {
},
},
from: 0,
index: this.getListOfAllIndices(),
index: Elasticsearch.getListOfAllIndices(),
size: 1,
});
@@ -137,25 +229,30 @@ export class Elasticsearch implements Database {
* Gets a map which contains each alias and all indices that are associated with each alias
*/
private async getAliasMap() {
// delay after which alias map will be fetched again
const RETRY_INTERVAL = 5000;
// create a list of old indices that are not in use
const oldIndicesToDelete: string[] = [];
let aliases: {
[index: string]: {
/**
* Aliases of an index
*/
aliases: {
[K in SCThingType]: any
},
},
[K in SCThingType]: unknown
};
};
};
try {
aliases = await this.client.indices.getAlias({});
} catch (error) {
logger.error('Failed getting alias map:', error);
setTimeout(() => {
this.getAliasMap();
}, 5000); // retry in 5 seconds
await Logger.error('Failed getting alias map:', error);
setTimeout(async () => {
return this.getAliasMap();
}, RETRY_INTERVAL); // retry after a delay
return;
}
@@ -165,6 +262,7 @@ export class Elasticsearch implements Database {
const matches = indexRegex.exec(index);
if (matches !== null) {
const type = matches[1];
// tslint:disable-next-line: no-magic-numbers
const source = matches[2];
// check if there is an alias for the current index
@@ -189,78 +287,11 @@ export class Elasticsearch implements Database {
await this.client.indices.delete({
index: oldIndicesToDelete,
});
logger.warn('Deleted old indices: ' + oldIndicesToDelete);
Logger.warn(`Deleted old indices: oldIndicesToDelete`);
}
logger.ok('Read alias map from elasticsearch: ' + JSON.stringify(this.aliasMap, null, 2));
}
/**
* Get the url of elasticsearch
*/
private getElasticsearchUrl(): string {
// check if we have a docker link
if (process.env.ES_PORT_9200_TCP_ADDR !== undefined && process.env.ES_PORT_9200_TCP_PORT !== undefined) {
return process.env.ES_PORT_9200_TCP_ADDR + ':' + process.env.ES_PORT_9200_TCP_PORT;
}
// default
return 'localhost:9200';
}
/**
* Gets the index name in elasticsearch for one SCThingType
* @param type SCThingType of data in the index
* @param source source of data in the index
* @param bulk bulk process which created this index
*/
private getIndex(type: SCThingType, source: string, bulk: SCBulkResponse) {
return `stapps_${type.toLowerCase().replace(' ', '_')}_${source}_${bulk.uid.substring(0, 8)}`;
}
/**
* Generates a string which matches all indices
*/
private getListOfAllIndices(): string {
// map each SC type in upper camel case
return 'stapps_*_*_*';
}
/**
* Checks for invalid character in alias names and removes them
* @param alias The alias name
* @param uid The UID of the current bulk (for debugging purposes)
*/
private removeAliasChars(alias: string, uid: string | undefined): string {
// spaces are included in some types, so throwing an error in this case would clutter up the log unnecessarily
alias = alias.replace(' ', '');
// List of invalid characters: https://www.elastic.co/guide/en/elasticsearch/reference/6.6/indices-create-index.html
['\\', '/', '*', '?', '"', '<', '>', '|', ',', '#'].forEach((value) => {
if (alias.includes(value)) {
alias = alias.replace(value, '');
logger.warn(`Type of the bulk ${uid} contains an invalid character '${value}'. This can lead to two bulks `
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
`New alias name is "${alias}."`);
}
});
['-', '_', '+'].forEach((value) => {
if (alias.charAt(0) === value) {
alias = alias.substring(1);
logger.warn(`Type of the bulk ${uid} begins with '${value}'. This can lead to two bulks `
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
`New alias name is "${alias}."`);
}
});
if (alias === '.' || alias === '..') {
logger.warn(`Type of the bulk ${uid} is ${alias}. This is an invalid name, please consider using another ` +
`one, as it will be replaced with 'alias_placeholder', which can lead to strange errors.`);
return 'alias_placeholder';
}
if (alias.includes(':')) {
logger.warn(`Type of the bulk ${uid} contains a ':'. This isn't an issue now, but will be in future ` +
`Elasticsearch versions!`);
}
return alias;
// tslint:disable-next-line: no-magic-numbers
Logger.ok(`Read alias map from elasticsearch: ${JSON.stringify(this.aliasMap, null, 2)}`);
}
/**
@@ -274,11 +305,11 @@ export class Elasticsearch implements Database {
}
// index name for elasticsearch
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
// there already is an index with this type and source. We will index the new one and switch the alias to it
// the old one is deleted
const alias = this.removeAliasChars(bulk.type, bulk.uid);
const alias = Elasticsearch.removeAliasChars(bulk.type, bulk.uid);
if (typeof this.aliasMap[alias] === 'undefined') {
this.aliasMap[alias] = {};
@@ -286,8 +317,8 @@ export class Elasticsearch implements Database {
if (!indexRegex.test(index)) {
throw new Error(
'Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.\n' +
'Make sure to set the bulk "source" and "type" to names consisting of the characters above.',
`Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.
Make sure to set the bulk "source" and "type" to names consisting of the characters above.`,
);
}
@@ -297,7 +328,7 @@ export class Elasticsearch implements Database {
index,
});
logger.info('Created index', index);
Logger.info('Created index', index);
}
/**
@@ -306,14 +337,15 @@ export class Elasticsearch implements Database {
*/
public async bulkExpired(bulk: Bulk): Promise<void> {
// index name for elasticsearch
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
logger.info('Bulk expired. Deleting index', index);
Logger.info('Bulk expired. Deleting index', index);
// don't delete indices that are in use already
if (bulk.state !== 'done') {
logger.info('deleting obsolete index', index);
return await this.client.indices.delete({index});
Logger.info('deleting obsolete index', index);
return this.client.indices.delete({index});
}
}
@@ -329,10 +361,10 @@ export class Elasticsearch implements Database {
}
// index name for elasticsearch
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
// alias for the indices
const alias = this.removeAliasChars(bulk.type, bulk.uid);
const alias = Elasticsearch.removeAliasChars(bulk.type, bulk.uid);
if (typeof this.aliasMap[alias] === 'undefined') {
this.aliasMap[alias] = {};
@@ -340,8 +372,8 @@ export class Elasticsearch implements Database {
if (!indexRegex.test(index)) {
throw new Error(
'Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.\n' +
'Make sure to set the bulk "source" and "type" to names consisting of the characters above.',
`Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.
Make sure to set the bulk "source" and "type" to names consisting of the characters above.`,
);
}
@@ -388,9 +420,9 @@ export class Elasticsearch implements Database {
if (typeof oldIndex === 'string') {
// delete the old index
await this.client.indices.delete({index: oldIndex});
logger.info('deleted old index', oldIndex);
Logger.info('deleted old index', oldIndex);
}
logger.info('swapped alias index alias', oldIndex, '=>', index);
Logger.info('swapped alias index alias', oldIndex, '=>', index);
}
/**
@@ -406,7 +438,7 @@ export class Elasticsearch implements Database {
},
},
},
index: this.getListOfAllIndices(),
index: Elasticsearch.getListOfAllIndices(),
});
// get data from response
@@ -414,9 +446,26 @@ export class Elasticsearch implements Database {
if (hits.length !== 1) {
throw new Error('No unique item found.');
} else {
return hits[0]._source as SCThings;
}
return hits[0]._source as SCThings;
}
/**
* Initialize the elasticsearch database (call all needed methods)
*/
public async init(): Promise<void> {
const monitoringConfiguration = this.config.internal.monitoring;
if (typeof monitoringConfiguration !== 'undefined') {
if (typeof this.mailQueue === 'undefined') {
throw new Error('Monitoring is defined, but MailQueue is undefined. A MailQueue is obligatory for monitoring.');
}
// read all watches and schedule searches on the client
Monitoring.setUp(monitoringConfiguration, this.client, this.mailQueue);
}
return this.getAliasMap();
}
/**
@@ -426,24 +475,27 @@ export class Elasticsearch implements Database {
*/
public async post(object: SCThings, bulk: Bulk): Promise<void> {
const obj: SCThings & {creation_date: string} = {
// tslint:disable-next-line: completed-docs
const obj: SCThings & {creation_date: string; } = {
...object,
creation_date: moment().format(),
creation_date: moment()
.format(),
};
const itemMeta = await this.doesItemExist(obj);
// we have to check that the item will get replaced if the index is rolled over
if (itemMeta.exists && typeof itemMeta.object !== 'undefined') {
const indexOfNew = this.getIndex(obj.type, bulk.source, bulk);
const indexOfNew = Elasticsearch.getIndex(obj.type, bulk.source, bulk);
const oldIndex = itemMeta.object._index;
// new item doesn't replace the old one
if (oldIndex.substring(0, oldIndex.length - 9) !== indexOfNew.substring(0, indexOfNew.length - 9)) {
throw new Error(
'Object \"' + obj.uid + '\" already exists. Object was: ' +
JSON.stringify(obj, null, 2),
);
if (oldIndex.substring(0, oldIndex.length - Elasticsearch.INDEX_UID_LENGTH + 1)
!== indexOfNew.substring(0, indexOfNew.length - Elasticsearch.INDEX_UID_LENGTH + 1)) {
throw new Error(
// tslint:disable-next-line: no-magic-numbers
`Object "${obj.uid}" already exists. Object was: ${JSON.stringify(obj, null, 2)}`,
);
}
}
@@ -451,13 +503,13 @@ export class Elasticsearch implements Database {
const searchResponse = await this.client.create({
body: obj,
id: obj.uid,
index: this.getIndex(obj.type, bulk.source, bulk),
index: Elasticsearch.getIndex(obj.type, bulk.source, bulk),
timeout: '90s',
type: obj.type,
});
if (!searchResponse.created) {
throw new Error('Object creation Error: Instance was: ' + JSON.stringify(obj));
throw new Error(`Object creation Error: Instance was: ${JSON.stringify(obj)}`);
}
}
@@ -470,7 +522,7 @@ export class Elasticsearch implements Database {
const itemMeta = await this.doesItemExist(object);
if (itemMeta.exists && typeof itemMeta.object !== 'undefined') {
return await this.client.update({
return this.client.update({
body: {
doc: object,
},
@@ -499,7 +551,7 @@ export class Elasticsearch implements Database {
query: buildQuery(params, this.config, this.config.internal.database as ElasticsearchConfig),
},
from: params.from,
index: this.getListOfAllIndices(),
index: Elasticsearch.getListOfAllIndices(),
size: params.size,
};

View File

@@ -20,10 +20,10 @@ import {
SCMonitoringMaximumLengthCondition,
SCMonitoringMinimumLengthCondition,
} from '@openstapps/core';
import {Logger} from '@openstapps/logger';
import * as ES from 'elasticsearch';
import * as cron from 'node-cron';
import {logger} from '../../common';
import {MailQueue} from '../../notification/MailQueue';
import {MailQueue} from '../../notification/mail-queue';
/**
* Check if the given condition fails on the given number of results and the condition
@@ -37,13 +37,14 @@ function conditionFails(
if (condition.type === 'MaximumLength') {
return maxConditionFails(condition.length, total);
}
return minConditionFails(condition.length, total);
}
/**
* Check if the min condition fails
* @param minimumLength
* @param total
* @param minimumLength Minimal length allowed
* @param total Number of results
*/
function minConditionFails(minimumLength: number, total: number) {
return typeof minimumLength === 'number' && minimumLength > total;
@@ -51,8 +52,8 @@ function minConditionFails(minimumLength: number, total: number) {
/**
* Check if the max condition fails
* @param maximumLength
* @param total
* @param maximumLength Maximal length allowed
* @param total Number of results
*/
function maxConditionFails(maximumLength: number, total: number) {
return typeof maximumLength === 'number' && maximumLength < total;
@@ -74,19 +75,18 @@ export function runActions(
mailQueue: MailQueue,
) {
actions.forEach((action) => {
actions.forEach(async (action) => {
if (action.type === 'log') {
logger.error(
await Logger.error(
action.prefix,
`Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'`, `Found ${total} hits instead`,
action.message,
);
} else {
mailQueue.push({
await mailQueue.push({
subject: action.subject,
text: `Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'\n` +
action.message +
`Found ${total} hits instead`,
text: `Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'
${action.message} Found ${total} hits instead`,
to: action.recipients,
});
}
@@ -137,5 +137,5 @@ export function setUp(monitoringConfig: SCMonitoringConfiguration, esClient: ES.
});
logger.log('Scheduled ' + monitoringConfig.watchers.length + ' watches');
Logger.log(`Scheduled ${monitoringConfig.watchers.length} watches`);
}

View File

@@ -44,9 +44,9 @@ import {
/**
* Builds a boolean filter. Returns an elasticsearch boolean filter
*/
export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBooleanFilterArguments<any> {
export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBooleanFilterArguments<unknown> {
const result: ESBooleanFilterArguments<any> = {
const result: ESBooleanFilterArguments<unknown> = {
minimum_should_match: 0,
must: [],
must_not: [],
@@ -71,24 +71,39 @@ export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBool
/**
* Converts Array of Filters to elasticsearch query-syntax
* @param filter
* @param filter A search filter for the retrieval of the data
*/
export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanceFilter | ESBooleanFilter<any> {
export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanceFilter | ESBooleanFilter<unknown> {
switch (filter.type) {
case 'value':
const filterObj: { [field: string]: string } = {};
filterObj[filter.arguments.field + '.raw'] = filter.arguments.value;
const filterObj: { [field: string]: string; } = {};
filterObj[`${filter.arguments.field}.raw`] = filter.arguments.value;
return {
term: filterObj,
};
case 'availability':
const startRangeFilter: { [field: string]: { lte: string } } = {};
const startRangeFilter: {
[field: string]: {
/**
* Less than or equal
*/
lte: string;
};
} = {};
startRangeFilter[filter.arguments.fromField] = {
lte: 'now',
};
const endRangeFilter: { [field: string]: { gte: string } } = {};
const endRangeFilter: {
[field: string]: {
/**
* Greater than or equal
*/
gte: string;
};
} = {};
endRangeFilter[filter.arguments.toField] = {
gte: 'now',
};
@@ -129,12 +144,13 @@ export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanc
};
case 'distance':
const geoObject: ESGeoDistanceFilterArguments = {
distance: filter.arguments.distanceInM + 'm',
distance: `${filter.arguments.distanceInM}m`,
};
geoObject[filter.arguments.field] = {
lat: filter.arguments.lat,
lon: filter.arguments.lon,
};
return {
geo_distance: geoObject,
};
@@ -171,7 +187,7 @@ function buildFunctions(
/**
* Creates boost functions for all type boost configurations
*
*
* @param boostingTypes Array of type boosting configurations
*/
function buildFunctionsForBoostingTypes(
@@ -195,33 +211,36 @@ function buildFunctionsForBoostingTypes(
const fields = boostingForOneSCType.fields;
Object.keys(boostingForOneSCType.fields).forEach((fieldName) => {
for (const fieldName in boostingForOneSCType.fields) {
if (boostingForOneSCType.fields.hasOwnProperty(fieldName)) {
const boostingForOneField = fields[fieldName];
const boostingForOneField = fields[fieldName];
for (const value in boostingForOneField) {
if (boostingForOneField.hasOwnProperty(value)) {
const factor = boostingForOneField[value];
Object.keys(boostingForOneField).forEach((value) => {
const factor = boostingForOneField[value];
// build term filter
const termFilter: ESTermFilter = {
term: {},
};
termFilter.term[`${fieldName}.raw`] = value;
// build term filter
const termFilter: ESTermFilter = {
term: {},
};
termFilter.term[fieldName + '.raw'] = value;
functions.push({
filter: {
bool: {
must: [
typeFilter,
termFilter,
],
should: [],
},
},
weight: factor,
});
});
});
functions.push({
filter: {
bool: {
must: [
typeFilter,
termFilter,
],
should: [],
},
},
weight: factor,
});
}
}
}
}
}
});
@@ -229,8 +248,8 @@ function buildFunctionsForBoostingTypes(
}
/**
* Builds body for Elasticsearch requests
* @param params
* @param defaultConfig
* @param params Parameters for querying the backend
* @param defaultConfig Default configuration of the backend
* @returns ElasticsearchQuery (body of a search-request)
*/
export function buildQuery(
@@ -355,13 +374,13 @@ export function buildQuery(
// add type filters for sorts
mustMatch.push.apply(mustMatch, typeFiltersToAppend);
}
return functionScoreQuery;
}
/**
* converts query to
* @param params
* @param sortableFields
* @param sorts Sorting rules to apply to the data that is being queried
* @returns an array of sort queries
*/
export function buildSort(
@@ -371,7 +390,8 @@ export function buildSort(
switch (sort.type) {
case 'ducet':
const ducetSort: ESDucetSort = {};
ducetSort[sort.arguments.field + '.sort'] = sort.order;
ducetSort[`${sort.arguments.field}.sort`] = sort.order;
return ducetSort;
case 'distance':
const args: ESGeoDistanceSortArguments = {
@@ -400,6 +420,12 @@ export function buildSort(
});
}
/**
* Provides a script for sorting search results by prices
*
* @param universityRole User group which consumes university services
* @param field Field in which wanted offers with prices are located
*/
export function buildPriceSortScript(universityRole: keyof SCSportCoursePriceGroup, field: SCThingsField): string {
return `
// initialize the sort value with the maximum

View File

@@ -13,23 +13,27 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {Logger} from '@openstapps/logger';
import {Client} from 'elasticsearch';
import {readdir, readFile} from 'fs-extra';
import {resolve} from 'path';
import {logger} from '../../common';
/**
* Assembles an elasticsearch template with all resolved subType-references
* @param templateType
* @param templates
* @param inline
* @returns
* @param templateType Type used in the elasticsearch mapping
* @param templates Templates (elasticsearch mappings)
* @param inline Level of hierarchy
* @deprecated
*/
function assembleElasticsearchTemplate(templateType: string, templates: {[key: string]: any}, inline: number): any {
function assembleElasticsearchTemplate(
templateType: string,
// tslint:disable-next-line: no-any
templates: {[key: string]: any; },
inline: number): object {
const templateBase = JSON.parse(JSON.stringify(templates[templateType]));
if (inline) {
if (typeof inline !== 'undefined') {
delete templateBase.dynamic_templates;
}
@@ -45,12 +49,12 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
try {
// extend the template by the properties of the basetemplate
templateBase.properties = Object.assign(
templateBase.properties,
templates['base.template.json'].mappings._default_.properties,
);
templateBase.properties = {...templateBase.properties,
...templates['base.template.json'].mappings._default_.properties,
};
} catch (e) {
logger.error('Failed to merge properties on: ' + templateType);
// tslint:disable-next-line: no-floating-promises
Logger.error(`Failed to merge properties on: ${templateType}`);
throw e;
}
const fieldKeys = Object.keys(templateBase.properties);
@@ -70,7 +74,7 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
if (Array.isArray(field._typeRef)) {
let obj = {};
field._typeRef.forEach((subType: string) => {
obj = Object.assign(obj, assembleElasticsearchTemplate(subType, templates, inline + 1));
obj = {...obj, ...assembleElasticsearchTemplate(subType, templates, inline + 1)};
});
templateBase.properties[fieldKey] = obj;
} else {
@@ -82,25 +86,29 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
}
});
}
return templateBase;
}
/**
* Reads all template files and returns the assembled template
*/
export async function getElasticsearchTemplate(): Promise<any> {
// TODO: check if redundant
export async function getElasticsearchTemplate(): Promise<object> {
// readIM all templates
const elasticsearchFolder = resolve('.', 'src', 'storage', 'elasticsearch', 'templates');
const templates: {[key: string]: any} = {};
// tslint:disable-next-line: no-any
const templates: {[key: string]: any; } = {};
const fileNames = await readdir(elasticsearchFolder);
const availableTypes = fileNames.filter((fileName) => {
return Array.isArray(fileName.match(/\w*\.sc-type\.template\.json/i));
}).map((fileName) => {
return fileName.substring(0, fileName.indexOf('.sc-type.template.json'));
});
})
.map((fileName) => {
return fileName.substring(0, fileName.indexOf('.sc-type.template.json'));
});
const promises = fileNames.map(async (fileName) => {
const file = await readFile(resolve(elasticsearchFolder, fileName), 'utf8');
@@ -108,7 +116,7 @@ export async function getElasticsearchTemplate(): Promise<any> {
try {
templates[fileName] = JSON.parse(file.toString());
} catch (jsonParsingError) {
logger.error('Failed parsing file: ' + fileName);
await Logger.error(`Failed parsing file: ${fileName}`);
throw jsonParsingError;
}
});
@@ -119,23 +127,24 @@ export async function getElasticsearchTemplate(): Promise<any> {
availableTypes.forEach((configType) => {
template.mappings[configType.toLowerCase()] =
assembleElasticsearchTemplate(configType + '.sc-type.template.json', templates, 0);
assembleElasticsearchTemplate(`${configType}.sc-type.template.json`, templates, 0);
});
// this is like the base type (StappsCoreThing)
const baseProperties = template.mappings._default_.properties;
Object.keys(baseProperties).forEach((basePropertyName) => {
let field = baseProperties[basePropertyName];
field = templates[field._fieldRef];
template.mappings._default_.properties[basePropertyName] = field;
});
Object.keys(baseProperties)
.forEach((basePropertyName) => {
let field = baseProperties[basePropertyName];
field = templates[field._fieldRef];
template.mappings._default_.properties[basePropertyName] = field;
});
return template;
}
/**
* Puts a new global template
* @param client
* @param client An elasticsearch client to use
*/
export async function putTemplate(client: Client): Promise<void> {
return client.indices.putTemplate({