refactor: adjust code after updated dependencies

Closes #39
This commit is contained in:
Jovan Krunić
2019-06-05 13:58:26 +02:00
committed by Rainer Killinger
parent 42c7350c36
commit 8b457c9911
24 changed files with 574 additions and 343 deletions

View File

@@ -2,7 +2,6 @@
# See https://stackoverflow.com/a/29932318
/*
# Except these files/folders
!docs
!lib
!LICENSE
!package.json

View File

@@ -16,16 +16,16 @@
],
"scripts": {
"build": "npm run tslint && npm run compile",
"compile": "rimraf lib && tsc && prepend lib/cli.js '#!/usr/bin/env node\n'",
"changelog": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md && git commit -m 'docs: update changelog'",
"check-configuration": "openstapps-configuration",
"compile": "rimraf lib && tsc && prepend lib/cli.js '#!/usr/bin/env node\n'",
"documentation": "typedoc --includeDeclarations --mode modules --out docs --readme README.md --listInvalidSymbolLinks src",
"start": "NODE_CONFIG_ENV=elasticsearch ALLOW_NO_TRANSPORT=true node ./lib/cli.js",
"tslint": "tslint -p tsconfig.json -c tslint.json 'src/**/*.ts'",
"postversion": "npm run changelog",
"prepublishOnly": "npm ci && npm run build",
"preversion": "npm run prepublishOnly",
"push": "git push && git push origin \"v$npm_package_version\""
"push": "git push && git push origin \"v$npm_package_version\"",
"start": "NODE_CONFIG_ENV=elasticsearch ALLOW_NO_TRANSPORT=true node ./lib/cli.js",
"tslint": "tslint -p tsconfig.json -c tslint.json 'src/**/*.ts'"
},
"dependencies": {
"@openstapps/core": "0.19.0",

View File

@@ -19,26 +19,33 @@ import {
SCSyntaxErrorResponse,
SCUnsupportedMediaTypeErrorResponse,
} from '@openstapps/core';
import {Logger} from '@openstapps/logger';
import * as config from 'config';
import * as cors from 'cors';
import * as express from 'express';
import * as morgan from 'morgan';
import {join} from 'path';
import {configFile, isTestEnvironment, logger, mailer, validator} from './common';
import {MailQueue} from './notification/MailQueue';
import {bulkAddRouter} from './routes/BulkAddRoute';
import {bulkDoneRouter} from './routes/BulkDoneRoute';
import {bulkRouter} from './routes/BulkRoute';
import {indexRouter} from './routes/IndexRoute';
import {multiSearchRouter} from './routes/MultiSearchRoute';
import {searchRouter} from './routes/SearchRoute';
import {thingUpdateRouter} from './routes/ThingUpdateRoute';
import {BulkStorage} from './storage/BulkStorage';
import {DatabaseConstructor} from './storage/Database';
import {Elasticsearch} from './storage/elasticsearch/Elasticsearch';
import {configFile, isTestEnvironment, mailer, validator} from './common';
import {MailQueue} from './notification/mail-queue';
import {bulkAddRouter} from './routes/bulk-add-route';
import {bulkDoneRouter} from './routes/bulk-done-route';
import {bulkRouter} from './routes/bulk-route';
import {indexRouter} from './routes/index-route';
import {multiSearchRouter} from './routes/multi-search-route';
import {searchRouter} from './routes/search-route';
import {thingUpdateRouter} from './routes/thing-update-route';
import {BulkStorage} from './storage/bulk-storage';
import {DatabaseConstructor} from './storage/database';
import {Elasticsearch} from './storage/elasticsearch/elasticsearch';
/**
* Created express application
*/
export const app = express();
/**
* Configure the backend
*/
async function configureApp() {
// request loggers have to be the first middleware to be set in express
app.use(morgan('dev'));
@@ -62,10 +69,11 @@ async function configureApp() {
const err = new SCUnsupportedMediaTypeErrorResponse(isTestEnvironment);
res.status(err.statusCode);
res.json(err);
return;
}
const bodyBuffer: any[] = [];
const bodyBuffer: Buffer[] = [];
// we don't know the full size, the only way we can get is by adding up all individual chunk sizes
let bodySize = 0;
const chunkGatherer = (chunk: Buffer) => {
@@ -78,6 +86,7 @@ async function configureApp() {
const err = new SCRequestBodyTooLargeErrorResponse(isTestEnvironment);
res.status(err.statusCode);
res.json(err);
return;
}
// push the chunk in the buffer
@@ -85,7 +94,8 @@ async function configureApp() {
};
const endCallback = () => {
req.body = Buffer.concat(bodyBuffer).toString();
req.body = Buffer.concat(bodyBuffer)
.toString();
try {
req.body = JSON.parse(req.body);
@@ -94,13 +104,15 @@ async function configureApp() {
const err = new SCSyntaxErrorResponse(catchErr.message, isTestEnvironment);
res.status(err.statusCode);
res.json(err);
return;
}
};
req.on('data', chunkGatherer).on('end', endCallback);
req.on('data', chunkGatherer)
.on('end', endCallback);
});
const databases: {[name: string]: DatabaseConstructor} = {
const databases: {[name: string]: DatabaseConstructor; } = {
elasticsearch: Elasticsearch,
};
@@ -113,8 +125,7 @@ async function configureApp() {
// validation failed
if (configValidation.errors.length > 0) {
throw new Error(
'Validation of config file failed. Errors were: ' +
JSON.stringify(configValidation.errors),
`Validation of config file failed. Errors were: ${JSON.stringify(configValidation.errors)}`,
);
}
@@ -130,11 +141,13 @@ async function configureApp() {
typeof mailer !== 'undefined' && config.has('internal.monitoring') ? new MailQueue(mailer) : undefined,
);
await database.init();
if (typeof database === 'undefined') {
throw new Error('No implementation for configured database found. Please check your configuration.');
}
logger.ok('Validated config file sucessfully');
Logger.ok('Validated config file sucessfully');
// treats /foo and /foo/ as two different routes
// see http://expressjs.com/en/api.html#app.set
@@ -194,8 +207,10 @@ async function configureApp() {
// TODO: implement a route to register plugins
}
configureApp().then(() => {
logger.ok('Sucessfully configured express server');
}).catch((err) => {
configureApp()
.then(() => {
Logger.ok('Sucessfully configured express server');
})
.catch((err) => {
throw err;
});

View File

@@ -13,13 +13,14 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {Logger} from '@openstapps/logger';
import * as http from 'http';
import {app} from './app';
import {logger} from './common';
/**
* Get port from environment and store in Express.
*/
// tslint:disable-next-line: strict-boolean-expressions
const port = normalizePort(process.env.PORT || '3000');
// TODO: Can we remove that? It doesn't look like it is read at all.
app.set('port', port);
@@ -58,23 +59,24 @@ function normalizePort(value: string) {
/**
* Event listener for HTTP server "error" event.
*/
function onError(error: Error | any) {
// tslint:disable-next-line: completed-docs
async function onError(error: { code: string; syscall: string; }) {
if (error.syscall !== 'listen') {
throw error;
}
const bind = typeof port === 'string'
? 'Pipe ' + port
: 'Port ' + port;
? `Pipe ${port}`
: `Port ${port}`;
// handle specific listen errors with friendly messages
switch (error.code) {
case 'EACCES':
logger.error(bind + ' requires elevated privileges');
await Logger.error(`${bind} requires elevated privileges`);
process.exit(1);
break;
case 'EADDRINUSE':
logger.error(bind + ' is already in use');
await Logger.error(`${bind} is already in use`);
process.exit(1);
break;
default:
@@ -88,7 +90,7 @@ function onError(error: Error | any) {
function onListening() {
const addr = server.address();
const bind = typeof addr === 'string'
? 'pipe ' + addr
: 'port ' + addr.port;
logger.ok('Listening on ' + bind);
? `pipe ${addr}`
: `port ${addr.port}`;
Logger.ok(`Listening on ${bind}`);
}

View File

@@ -15,16 +15,25 @@
*/
import {SCConfigFile} from '@openstapps/core';
import {Validator} from '@openstapps/core-tools/lib/validate';
import {Logger} from '@openstapps/logger';
import * as config from 'config';
import {BackendTransport} from './notification/BackendTransport';
import {BackendTransport} from './notification/backend-transport';
/**
* Instance of the transport for sending mails
*/
export const mailer = BackendTransport.getTransportInstance();
export const logger = new Logger(mailer);
/**
* Config file content
*/
export const configFile: SCConfigFile = config.util.toObject();
/**
* A validator instance to check if something is a valid JSON object (e.g. a request or a thing)
*/
export const validator = new Validator();
/**
* Provides information if the backend is executed in the "test" (non-production) environment
*/
export const isTestEnvironment = process.env.NODE_ENV !== 'production';

View File

@@ -13,11 +13,16 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SMTP} from '@openstapps/logger/lib/SMTP';
import {Transport, TransportWithVerification} from '@openstapps/logger/lib/Transport';
import {SMTP} from '@openstapps/logger/lib/smtp';
import {Transport, VerifiableTransport} from '@openstapps/logger/lib/transport';
export function isTransportWithVerification(instance: Transport): instance is TransportWithVerification {
return typeof (instance as TransportWithVerification).verify === 'function';
/**
* Provides information if a transport is a verifiable transport
*
* @param instance A transport that needs to be checked
*/
export function isTransportWithVerification(instance: Transport): instance is VerifiableTransport {
return typeof (instance as VerifiableTransport).verify === 'function';
}
/**
@@ -26,18 +31,32 @@ export function isTransportWithVerification(instance: Transport): instance is Tr
* In the future this may support more than loading SMTP as a transport.
*/
export class BackendTransport {
/**
* One (and only one) instance of the backend transport
*/
private static _instance: BackendTransport;
private waitingForVerification: boolean;
/**
* Stores information if transport is in state of waiting for the verification
*/
private readonly waitingForVerification: boolean;
/**
* A (SMTP) transport which includes settings for sending mails
*/
protected transport: SMTP | undefined;
/**
* Provides an instance of a transport
*/
public static getTransportInstance(): SMTP | undefined {
if (this._instance) {
return this._instance.transport;
if (typeof BackendTransport._instance !== 'undefined') {
return BackendTransport._instance.transport;
}
this._instance = new this();
return this._instance.transport;
BackendTransport._instance = new BackendTransport();
return BackendTransport._instance.transport;
}
private constructor() {
@@ -58,19 +77,24 @@ export class BackendTransport {
if (typeof this.transport !== 'undefined' && isTransportWithVerification(this.transport)) {
this.waitingForVerification = true;
this.transport.verify().then((message) => {
if (typeof message === 'string') {
// tslint:disable-next-line:no-console
console.log(message);
}
}).catch((err) => {
throw err;
});
this.transport.verify()
.then((message) => {
if (typeof message === 'string') {
// tslint:disable-next-line:no-console
console.log(message);
}
})
.catch((err) => {
throw err;
});
} else {
this.waitingForVerification = false;
}
}
/**
* Provides information if transport is in state of waiting for the verification
*/
public isWaitingForVerification(): boolean {
return this.waitingForVerification;
}

View File

@@ -14,15 +14,25 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.nse along with
* this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SMTP} from '@openstapps/logger/lib/SMTP';
import {Logger} from '@openstapps/logger';
import {SMTP} from '@openstapps/logger/lib/smtp';
import {MailOptions} from 'nodemailer/lib/sendmail-transport';
import * as Queue from 'promise-queue';
import {logger} from '../common';
/**
* A queue that can send mails in serial
*/
export class MailQueue {
/**
* Number of allowed verification attempts after which the initialization of transport fails
*/
static readonly MAX_VERIFICATION_ATTEMPTS = 5;
/**
* Number of milliseconds after which verification check should be repeated
*/
static readonly VERIFICATION_TIMEOUT = 5000;
/**
* A queue that saves mails, before the transport is ready. When
* the transport gets ready this mails are getting pushed in to
@@ -42,9 +52,9 @@ export class MailQueue {
/**
* Creates a mail queue
* @param transport
* @param transport Transport which is used for sending mails
*/
constructor(private transport: SMTP) {
constructor(private readonly transport: SMTP) {
this.queue = new Queue(1);
@@ -62,35 +72,36 @@ export class MailQueue {
*/
private checkForVerification() {
if (this.verificationCounter > 5) {
if (this.verificationCounter > MailQueue.MAX_VERIFICATION_ATTEMPTS) {
throw new Error('Failed to initialize the SMTP transport for the mail queue');
}
if (!this.transport.isVerified()) {
this.verificationCounter++;
setTimeout(() => {
logger.warn('Transport not verified yet. Trying to send mails here...');
setTimeout(async () => {
Logger.warn('Transport not verified yet. Trying to send mails here...');
this.checkForVerification();
}, 5000);
}, MailQueue.VERIFICATION_TIMEOUT);
} else {
logger.ok('Transport for mail queue was verified. We can send mails now');
Logger.ok('Transport for mail queue was verified. We can send mails now');
// if the transport finally was verified send all our mails from the dry queue
this.dryQueue.forEach((mail) => {
this.queue.add<string>(() => (this.transport as SMTP).sendMail(mail));
this.dryQueue.forEach(async (mail) => {
await this.queue.add<string>(() => (this.transport as SMTP).sendMail(mail));
});
}
}
/**
* Push a mail into the queue so it gets send when the queue is ready
* @param mail
*
* @param mail Information required for sending a mail
*/
public push(mail: MailOptions) {
public async push(mail: MailOptions) {
if (!this.transport.isVerified()) { // the transport has verification, but is not verified yet
// push to a dry queue which gets pushed to the real queue when the transport is verified
this.dryQueue.push(mail);
} else {
this.queue.add<string>(() => (this.transport as SMTP).sendMail(mail));
await this.queue.add<string>(() => (this.transport as SMTP).sendMail(mail));
}
}
}

View File

@@ -14,10 +14,14 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCBulkAddRequest, SCBulkAddResponse, SCBulkAddRoute, SCNotFoundErrorResponse} from '@openstapps/core';
import {isTestEnvironment, logger} from '../common';
import {BulkStorage} from '../storage/BulkStorage';
import {createRoute} from './Route';
import {Logger} from '@openstapps/logger';
import {isTestEnvironment} from '../common';
import {BulkStorage} from '../storage/bulk-storage';
import {createRoute} from './route';
/**
* Contains information for using the route for adding bulks
*/
const bulkRouteModel = new SCBulkAddRoute();
/**
@@ -27,7 +31,7 @@ export const bulkAddRouter = createRoute<SCBulkAddResponse>(
bulkRouteModel,
async (request: SCBulkAddRequest, app, params) => {
if (!params || typeof params.UID !== 'string') {
if (typeof params === 'undefined' || typeof params.UID !== 'string') {
throw new Error('UID of Bulk was not given, but route with obligatory parameter was called');
}
@@ -35,7 +39,7 @@ export const bulkAddRouter = createRoute<SCBulkAddResponse>(
const bulk = await bulkMemory.read(params.UID);
if (typeof bulk === 'undefined') {
logger.warn(`Bulk with ${params.UID} not found.`);
Logger.warn(`Bulk with ${params.UID} not found.`);
throw new SCNotFoundErrorResponse(isTestEnvironment);
}

View File

@@ -14,10 +14,14 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCBulkDoneRequest, SCBulkDoneResponse, SCBulkDoneRoute, SCNotFoundErrorResponse} from '@openstapps/core';
import {isTestEnvironment, logger} from '../common';
import {BulkStorage} from '../storage/BulkStorage';
import {createRoute} from './Route';
import {Logger} from '@openstapps/logger';
import {isTestEnvironment} from '../common';
import {BulkStorage} from '../storage/bulk-storage';
import {createRoute} from './route';
/**
* Contains information for using the route for closing bulks
*/
const bulkDoneRouteModel = new SCBulkDoneRoute();
/**
@@ -27,7 +31,7 @@ export const bulkDoneRouter = createRoute<SCBulkDoneResponse>(
bulkDoneRouteModel,
async (_request: SCBulkDoneRequest, app, params) => {
if (!params || typeof params.UID !== 'string') {
if (typeof params === 'undefined' || typeof params.UID !== 'string') {
throw new Error('UID of Bulk was not given, but route with obligatory parameter was called');
}
@@ -35,12 +39,13 @@ export const bulkDoneRouter = createRoute<SCBulkDoneResponse>(
const bulk = await bulkMemory.read(params.UID);
if (typeof bulk === 'undefined') {
logger.warn(`Bulk with ${params.UID} not found.`);
Logger.warn(`Bulk with ${params.UID} not found.`);
throw new SCNotFoundErrorResponse(isTestEnvironment);
}
bulk.state = 'done';
await bulkMemory.markAsDone(bulk);
return {};
},
);

View File

@@ -14,9 +14,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCBulkRequest, SCBulkResponse, SCBulkRoute} from '@openstapps/core';
import {BulkStorage} from '../storage/BulkStorage';
import {createRoute} from './Route';
import {BulkStorage} from '../storage/bulk-storage';
import {createRoute} from './route';
/**
* Contains information for using the route for creating bulks
*/
const bulkRouteModel = new SCBulkRoute();
/**
@@ -26,6 +29,7 @@ export const bulkRouter = createRoute<SCBulkResponse>(
bulkRouteModel,
async (request: SCBulkRequest, app) => {
const bulkMemory: BulkStorage = app.get('bulk');
return await bulkMemory.create(request);
return bulkMemory.create(request);
},
);

View File

@@ -13,6 +13,9 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
/**
* Strings that can be used as HTTP verbs (e.g. in requests)
*/
export type HTTPVerb = 'all' |
'get' |
'post' |
@@ -38,6 +41,11 @@ export type HTTPVerb = 'all' |
'unlock' |
'unsubscribe';
/**
* Provides information if a text (representing a method) is an HTTP verb
*
* @param method A text (representing a method) to check
*/
export function isHttpMethod(method: string): method is HTTPVerb {
return ['get', 'post', 'put'].indexOf(method) > -1;
}

View File

@@ -15,8 +15,11 @@
*/
import {SCIndexResponse, SCIndexRoute} from '@openstapps/core';
import {configFile} from '../common';
import {createRoute} from './Route';
import {createRoute} from './route';
/**
* Contains information for using the index route
*/
const indexRouteModel = new SCIndexRoute();
/**

View File

@@ -21,9 +21,11 @@ import {
SCTooManyRequestsErrorResponse,
} from '@openstapps/core';
import {configFile, isTestEnvironment} from '../common';
import {BulkStorage} from '../storage/BulkStorage';
import {createRoute} from './Route';
import {BulkStorage} from '../storage/bulk-storage';
import {createRoute} from './route';
/**
* Contains information for using the multi search route
*/
const multiSearchRouteModel = new SCMultiSearchRoute();
/**
@@ -41,13 +43,13 @@ export const multiSearchRouter = createRoute<SCMultiSearchResponse | SCTooManyRe
}
// get a map of promises for each query
const searchRequests = queryNames.map((queryName) => {
const searchRequests = queryNames.map(async (queryName) => {
return bulkMemory.database.search(request[queryName]);
});
const listOfSearchResponses = await Promise.all(searchRequests);
const response: { [queryName: string]: SCSearchResponse } = {};
const response: { [queryName: string]: SCSearchResponse; } = {};
queryNames.forEach((queryName, index) => {
response[queryName] = listOfSearchResponses[index];
});

View File

@@ -19,24 +19,26 @@ import {
SCRoute,
SCValidationErrorResponse,
} from '@openstapps/core';
import {Logger} from '@openstapps/logger';
import {Application, Router} from 'express';
import PromiseRouter from 'express-promise-router';
import {ValidationError} from 'jsonschema';
import {isTestEnvironment, logger, validator} from '../common';
import {isHttpMethod} from './HTTPTypes';
import {isTestEnvironment, validator} from '../common';
import {isHttpMethod} from './http-types';
/**
* Creates a router from a route class (model of a route) and a handler function which implements the logic
* Creates a router from a route class and a handler function which implements the logic
*
* The given router performs a request and respone validation, sets status codes and checks if the given handler
* only returns errors that are allowed for the client to see
*
* @param routeClass
* @param handler
* @param routeClass Model of a route
* @param handler Implements the logic of the route
*/
export function createRoute<RETURNTYPE>(
routeClass: SCRoute,
handler: (validatedBody: any, app: Application, params?: { [parameterName: string]: string }) => Promise<RETURNTYPE>,
// tslint:disable-next-line: no-any
handler: (validatedBody: any, app: Application, params?: { [parameterName: string]: string; }) => Promise<RETURNTYPE>,
): Router {
// create router
const router = PromiseRouter({mergeParams: true});
@@ -80,7 +82,8 @@ export function createRoute<RETURNTYPE>(
);
res.status(error.statusCode);
res.json(error);
logger.warn(error);
Logger.warn(error);
return;
}
@@ -103,7 +106,8 @@ export function createRoute<RETURNTYPE>(
);
res.status(internalServerError.statusCode);
res.json(internalServerError);
logger.warn(internalServerError);
Logger.warn(internalServerError);
return;
}
@@ -118,7 +122,7 @@ export function createRoute<RETURNTYPE>(
// respond with the error from the handler
res.status(error.statusCode);
res.json(error);
logger.warn(error);
Logger.warn(error);
} else {
// the error is not allowed so something went wrong
const internalServerError = new SCInternalServerErrorResponse(
@@ -127,7 +131,7 @@ export function createRoute<RETURNTYPE>(
);
res.status(internalServerError.statusCode);
res.json(internalServerError);
logger.error(error);
await Logger.error(error);
}
}
});
@@ -140,7 +144,7 @@ export function createRoute<RETURNTYPE>(
const error = new SCMethodNotAllowedErrorResponse(isTestEnvironment);
res.status(error.statusCode);
res.json(error);
logger.warn(error);
Logger.warn(error);
});
// return router

View File

@@ -14,9 +14,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCSearchRequest, SCSearchResponse, SCSearchRoute} from '@openstapps/core';
import {BulkStorage} from '../storage/BulkStorage';
import {createRoute} from './Route';
import {BulkStorage} from '../storage/bulk-storage';
import {createRoute} from './route';
/**
* Contains information for using the search route
*/
const searchRouteModel = new SCSearchRoute();
/**
@@ -24,5 +27,6 @@ const searchRouteModel = new SCSearchRoute();
*/
export const searchRouter = createRoute<SCSearchResponse>(searchRouteModel, async ( request: SCSearchRequest, app) => {
const bulkMemory: BulkStorage = app.get('bulk');
return await bulkMemory.database.search(request);
return bulkMemory.database.search(request);
});

View File

@@ -14,9 +14,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCThingUpdateRequest, SCThingUpdateResponse, SCThingUpdateRoute} from '@openstapps/core';
import {BulkStorage} from '../storage/BulkStorage';
import {createRoute} from './Route';
import {BulkStorage} from '../storage/bulk-storage';
import {createRoute} from './route';
/**
* Contains information for using the route for updating single things
*/
const thingUpdateRouteModel = new SCThingUpdateRoute();
/**
@@ -27,6 +30,7 @@ export const thingUpdateRouter = createRoute<SCThingUpdateResponse>(
async (request: SCThingUpdateRequest, app) => {
const bulkMemory: BulkStorage = app.get('bulk');
await bulkMemory.database.put(request);
return {};
},
);

View File

@@ -14,13 +14,16 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCBulkRequest, SCThingType} from '@openstapps/core';
import {Logger} from '@openstapps/logger';
import * as moment from 'moment';
import * as NodeCache from 'node-cache';
import {promisify} from 'util';
import {v4} from 'uuid';
import {logger} from '../common';
import {Database} from './Database';
import {Database} from './database';
/**
* Possible operations with a bulk
*/
export type BulkOperation = 'create' | 'expired' | 'update';
/**
@@ -68,7 +71,7 @@ export class Bulk implements SCBulkRequest {
/**
* Creates a new bulk process
* @param request
* @param request Data needed for requesting a bulk
*/
constructor(request: SCBulkRequest) {
this.uid = v4();
@@ -77,7 +80,9 @@ export class Bulk implements SCBulkRequest {
if (typeof request.expiration === 'string') {
this.expiration = request.expiration;
} else {
this.expiration = moment().add(1, 'hour').toISOString();
this.expiration = moment()
.add(1, 'hour')
.toISOString();
}
// when should this process be finished
// where does the process come from
@@ -91,8 +96,10 @@ export class Bulk implements SCBulkRequest {
* Cache for bulk-processes
*/
export class BulkStorage {
private cache: NodeCache;
/**
* Cache for temporary storage
*/
private readonly cache: NodeCache;
/**
* Creates a new BulkStorage
@@ -104,11 +111,11 @@ export class BulkStorage {
// the cache is checked every 60 seconds
this.cache = new NodeCache({stdTTL: 3600, checkperiod: 60});
this.cache.on('expired', (_key, bulk: Bulk) => {
this.cache.on('expired', async (_key, bulk: Bulk) => {
// if the bulk is not done
if (bulk.state !== 'done') {
// the database can delete the data associated with this bulk
this.database.bulkExpired(bulk);
await this.database.bulkExpired(bulk);
}
});
}
@@ -119,11 +126,14 @@ export class BulkStorage {
* @returns the bulk process that was saved
*/
private async save(bulk: Bulk): Promise<Bulk> {
const expirationInSeconds = moment(bulk.expiration).diff(moment.now()) / 1000;
logger.info('Bulk expires in ', expirationInSeconds, 'seconds');
const expirationInSeconds = moment(bulk.expiration)
// tslint:disable-next-line: no-magic-numbers
.diff(moment.now()) / 1000;
Logger.info('Bulk expires in ', expirationInSeconds, 'seconds');
// save the item in the cache with it's expected expiration
await promisify<string, Bulk, number>(this.cache.set)(bulk.uid, bulk, expirationInSeconds);
return bulk;
}
@@ -141,6 +151,7 @@ export class BulkStorage {
// tell the database that the bulk was created
await this.database.bulkCreated(bulk);
return bulk;
}
@@ -175,7 +186,8 @@ export class BulkStorage {
await this.save(bulk);
// tell the database that this is the new bulk
this.database.bulkUpdated(bulk);
await this.database.bulkUpdated(bulk);
return;
}
@@ -185,7 +197,7 @@ export class BulkStorage {
* @returns a promise that contains a bulk
*/
public async read(uid: string): Promise<Bulk | undefined> {
return await promisify<string, any>(this.cache.get)(uid);
return promisify<string, Bulk | undefined>(this.cache.get)(uid);
}
}

View File

@@ -13,18 +13,25 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {SCSearchQuery, SCSearchResponse, SCThings, SCUuid} from '@openstapps/core';
import {Bulk} from './BulkStorage';
import {SCConfigFile, SCSearchQuery, SCSearchResponse, SCThings, SCUuid} from '@openstapps/core';
import {MailQueue} from '../notification/mail-queue';
import {Bulk} from './bulk-storage';
export type DatabaseConstructor = new (...args: any) => Database;
/**
* Creates an instance of a database
*/
export type DatabaseConstructor = new (config: SCConfigFile, mailQueue?: MailQueue) => Database;
/**
* Defines what one database class needs to have defined
*/
export interface Database {
/**
* Gets called if a bulk was created
*
* The database should
* @param bulk
* @param bulk A bulk to be created
*/
bulkCreated(bulk: Bulk): Promise<void>;
@@ -32,7 +39,7 @@ export interface Database {
* Gets called if a bulk expires
*
* The database should delete all data that is associtated with this bulk
* @param bulk
* @param bulk A bulk which data needs to be removed
*/
bulkExpired(bulk: Bulk): Promise<void>;
@@ -42,20 +49,25 @@ export interface Database {
* If the database holds a bulk with the same type and source as the given
* bulk it should be replaced by the given one
*
* @param bulk
* @param bulk A new bulk whose data should be saved instead of the data of the old bulk
*/
bulkUpdated(bulk: Bulk): Promise<void>;
/**
* Get a single document
* @param uid
* @param uid Unique identifier of the document
*/
get(uid: SCUuid): Promise<SCThings>;
/**
* Initialize the database (call and wait for all needed methods)
*/
init(): Promise<void>;
/**
* Add a thing to an existing bulk
* @param object
* @param bulkId
* @param thing A StAppsCore thing to be added
* @param bulk A bulk to which the thing should be added
*/
post(thing: SCThings, bulk: Bulk): Promise<void>;
@@ -64,13 +76,13 @@ export interface Database {
*
* Currently it is not possible to put an non-existing object
*
* @param thing
* @param thing A StAppsCore thing to be added to a bulk
*/
put(thing: SCThings): Promise<void>;
/**
* Search for things
* @param params
* @param params Parameters which form a search query to search the backend data
*/
search(params: SCSearchQuery): Promise<SCSearchResponse>;
}

View File

@@ -16,6 +16,9 @@
import {SCBackendAggregationConfiguration, SCFacet, SCThingType} from '@openstapps/core';
import {AggregationSchema} from './common';
/**
* Provide information on which type (or on all) an aggregation happens
*/
export type aggregationType = SCThingType | '@all';
/**
@@ -30,7 +33,7 @@ export function buildAggregations(aggsConfig: SCBackendAggregationConfiguration[
result[aggregation.fieldName] = {
terms: {
field: aggregation.fieldName + '.raw',
field: `${aggregation.fieldName}.raw`,
size: 1000,
},
};
@@ -43,7 +46,14 @@ export function buildAggregations(aggsConfig: SCBackendAggregationConfiguration[
* An elasticsearch aggregation bucket
*/
interface Bucket {
/**
* Number of documents in the agregation bucket
*/
doc_count: number;
/**
* Text representing the documents in the bucket
*/
key: string;
}
@@ -52,7 +62,14 @@ interface Bucket {
*/
interface AggregationResponse {
[field: string]: {
/**
* Buckets in an aggregation
*/
buckets: Bucket[];
/**
* Number of documents in an aggregation
*/
doc_count?: number;
};
}
@@ -79,10 +96,11 @@ export function parseAggregations(
key: bucket.key,
};
}),
field: aggregationSchema[aggregationName].terms.field + '.raw',
field: `${aggregationSchema[aggregationName].terms.field}.raw`,
};
facets.push(facet);
});
return facets;
}

View File

@@ -15,7 +15,9 @@
*/
import {SCThingType} from '@openstapps/core';
import {SCThing} from '@openstapps/core';
import {NameList} from 'elasticsearch';
/* tslint:disable:completed-docs */ // TODO: document properties of interfaces
/**
* An elasticsearch bucket aggregation
* @see https://www.elastic.co/guide/en/elasticsearch/reference/5.5/search-aggregations-bucket.html
@@ -60,9 +62,11 @@ export interface ElasticsearchObject<T extends SCThing> {
_source: T;
_type: string;
_version?: number;
fields?: any;
fields?: NameList;
// tslint:disable: no-any
highlight?: any;
inner_hits?: any;
// tslint:enable: no-any
matched_queries?: string[];
sort?: string[];
}
@@ -156,7 +160,7 @@ export interface ESBooleanFilter<T> {
export interface ESFunctionScoreQuery {
function_score: {
functions: ESFunctionScoreQueryFunction[];
query: ESBooleanFilter<any>;
query: ESBooleanFilter<unknown>;
score_mode: 'multiply';
};
}

View File

@@ -23,19 +23,21 @@ import {
SCThingType,
SCUuid,
} from '@openstapps/core';
import {Logger} from '@openstapps/logger';
import * as ES from 'elasticsearch';
import * as moment from 'moment';
import {logger} from '../../common';
import {MailQueue} from '../../notification/MailQueue';
import {Bulk} from '../BulkStorage';
import {Database} from '../Database';
import {MailQueue} from '../../notification/mail-queue';
import {Bulk} from '../bulk-storage';
import {Database} from '../database';
import {buildAggregations, parseAggregations} from './aggregations';
import {AggregationSchema, ElasticsearchConfig, ElasticsearchObject} from './common';
import * as Monitoring from './monitoring';
import {buildQuery, buildSort} from './query';
import {putTemplate} from './templating';
// this will match index names such as stapps_<type>_<source>_<random suffix>
/**
* Matches index names such as stapps_<type>_<source>_<random suffix>
*/
const indexRegex = /^stapps_([A-z0-9_]+)_([a-z0-9-_]+)_([-a-z0-9^_]+)$/;
/**
@@ -43,6 +45,14 @@ const indexRegex = /^stapps_([A-z0-9_]+)_([a-z0-9-_]+)_([-a-z0-9^_]+)$/;
*/
export class Elasticsearch implements Database {
/**
* Length of the index UID used for generation of its name
*/
static readonly INDEX_UID_LENGTH = 8;
/**
* Holds aggregations
*/
aggregationsSchema: AggregationSchema;
/**
@@ -53,25 +63,117 @@ export class Elasticsearch implements Database {
[scType: string]: {
// each source is assigned a index name in elasticsearch
[source: string]: string;
},
};
};
/**
* Elasticsearch client
*/
client: ES.Client;
/**
* Queue of mails to be sent
*/
mailQueue: MailQueue | undefined;
/**
* Stores information if elasticsearch is ready (connection to it has been established)
*/
ready: boolean;
/**
* Get the url of elasticsearch
*/
static getElasticsearchUrl(): string {
// check if we have a docker link
if (process.env.ES_PORT_9200_TCP_ADDR !== undefined && process.env.ES_PORT_9200_TCP_PORT !== undefined) {
return `${process.env.ES_PORT_9200_TCP_ADDR}:${process.env.ES_PORT_9200_TCP_PORT}`;
}
// default
return 'localhost:9200';
}
/**
* Gets the index name in elasticsearch for one SCThingType
* @param type SCThingType of data in the index
* @param source source of data in the index
* @param bulk bulk process which created this index
*/
static getIndex(type: SCThingType, source: string, bulk: SCBulkResponse) {
return `stapps_${type.toLowerCase()
.replace(' ', '_')}_${source}_${Elasticsearch.getIndexUID(bulk.uid)}`;
}
/**
* Provides the index UID (for its name) from the bulk UID
* @param uid Bulk UID
*/
static getIndexUID(uid: SCUuid) {
return uid.substring(0, Elasticsearch.INDEX_UID_LENGTH);
}
/**
* Generates a string which matches all indices
*/
static getListOfAllIndices(): string {
// map each SC type in upper camel case
return 'stapps_*_*_*';
}
/**
* Checks for invalid character in alias names and removes them
* @param alias The alias name
* @param uid The UID of the current bulk (for debugging purposes)
*/
static removeAliasChars(alias: string, uid: string | undefined): string {
// spaces are included in some types, so throwing an error in this case would clutter up the log unnecessarily
let formattedAlias = alias.replace(' ', '');
// List of invalid characters: https://www.elastic.co/guide/en/elasticsearch/reference/6.6/indices-create-index.html
['\\', '/', '*', '?', '"', '<', '>', '|', ',', '#'].forEach((value) => {
if (formattedAlias.includes(value)) {
formattedAlias = formattedAlias.replace(value, '');
Logger.warn(`Type of the bulk ${uid} contains an invalid character '${value}'. This can lead to two bulks `
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
`New alias name is "${formattedAlias}."`);
}
});
['-', '_', '+'].forEach((value) => {
if (formattedAlias.charAt(0) === value) {
formattedAlias = formattedAlias.substring(1);
Logger.warn(`Type of the bulk ${uid} begins with '${value}'. This can lead to two bulks `
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
`New alias name is "${formattedAlias}."`);
}
});
if (formattedAlias === '.' || formattedAlias === '..') {
Logger.warn(`Type of the bulk ${uid} is ${formattedAlias}. This is an invalid name, please consider using ` +
`another one, as it will be replaced with 'alias_placeholder', which can lead to strange errors.`);
return 'alias_placeholder';
}
if (formattedAlias.includes(':')) {
Logger.warn(`Type of the bulk ${uid} contains a ':'. This isn't an issue now, but will be in future ` +
`Elasticsearch versions!`);
}
return formattedAlias;
}
/**
* Create a new interface for elasticsearch
* @param config an assembled config file
* @param mailQueue a mailqueue for monitoring
*/
constructor(private config: SCConfigFile, mailQueue?: MailQueue) {
constructor(private readonly config: SCConfigFile, mailQueue?: MailQueue) {
if (!config.internal.database || typeof config.internal.database.version === 'undefined') {
if (typeof config.internal.database === 'undefined' || typeof config.internal.database.version === 'undefined') {
throw new Error('Database version is undefined. Check you config file');
}
const options = {
apiVersion: config.internal.database.version,
host: this.getElasticsearchUrl(),
host: Elasticsearch.getElasticsearchUrl(),
log: 'error',
};
@@ -86,18 +188,7 @@ export class Elasticsearch implements Database {
this.aggregationsSchema = buildAggregations(this.config.internal.aggregations);
this.getAliasMap();
const monitoringConfiguration = this.config.internal.monitoring;
if (typeof monitoringConfiguration !== 'undefined') {
if (typeof mailQueue === 'undefined') {
throw new Error('Monitoring is defined, but MailQueue is undefined. A MailQueue is obligatory for monitoring.');
}
// read all watches and schedule searches on the client
Monitoring.setUp(monitoringConfiguration, this.client, mailQueue);
}
this.mailQueue = mailQueue;
}
/**
@@ -105,7 +196,8 @@ export class Elasticsearch implements Database {
*
* Returns Elasticsearch Object if it exists
*/
private async doesItemExist(object: SCThings): Promise<{exists: boolean; object?: ElasticsearchObject<SCThings>}> {
// tslint:disable-next-line: completed-docs
private async doesItemExist(object: SCThings): Promise<{exists: boolean; object?: ElasticsearchObject<SCThings>; }> {
const searchResponse = await this.client.search<SCThings>({
body: {
query: {
@@ -117,7 +209,7 @@ export class Elasticsearch implements Database {
},
},
from: 0,
index: this.getListOfAllIndices(),
index: Elasticsearch.getListOfAllIndices(),
size: 1,
});
@@ -137,25 +229,30 @@ export class Elasticsearch implements Database {
* Gets a map which contains each alias and all indices that are associated with each alias
*/
private async getAliasMap() {
// delay after which alias map will be fetched again
const RETRY_INTERVAL = 5000;
// create a list of old indices that are not in use
const oldIndicesToDelete: string[] = [];
let aliases: {
[index: string]: {
/**
* Aliases of an index
*/
aliases: {
[K in SCThingType]: any
},
},
[K in SCThingType]: unknown
};
};
};
try {
aliases = await this.client.indices.getAlias({});
} catch (error) {
logger.error('Failed getting alias map:', error);
setTimeout(() => {
this.getAliasMap();
}, 5000); // retry in 5 seconds
await Logger.error('Failed getting alias map:', error);
setTimeout(async () => {
return this.getAliasMap();
}, RETRY_INTERVAL); // retry after a delay
return;
}
@@ -165,6 +262,7 @@ export class Elasticsearch implements Database {
const matches = indexRegex.exec(index);
if (matches !== null) {
const type = matches[1];
// tslint:disable-next-line: no-magic-numbers
const source = matches[2];
// check if there is an alias for the current index
@@ -189,78 +287,11 @@ export class Elasticsearch implements Database {
await this.client.indices.delete({
index: oldIndicesToDelete,
});
logger.warn('Deleted old indices: ' + oldIndicesToDelete);
Logger.warn(`Deleted old indices: oldIndicesToDelete`);
}
logger.ok('Read alias map from elasticsearch: ' + JSON.stringify(this.aliasMap, null, 2));
}
/**
* Get the url of elasticsearch
*/
private getElasticsearchUrl(): string {
// check if we have a docker link
if (process.env.ES_PORT_9200_TCP_ADDR !== undefined && process.env.ES_PORT_9200_TCP_PORT !== undefined) {
return process.env.ES_PORT_9200_TCP_ADDR + ':' + process.env.ES_PORT_9200_TCP_PORT;
}
// default
return 'localhost:9200';
}
/**
* Gets the index name in elasticsearch for one SCThingType
* @param type SCThingType of data in the index
* @param source source of data in the index
* @param bulk bulk process which created this index
*/
private getIndex(type: SCThingType, source: string, bulk: SCBulkResponse) {
return `stapps_${type.toLowerCase().replace(' ', '_')}_${source}_${bulk.uid.substring(0, 8)}`;
}
/**
* Generates a string which matches all indices
*/
private getListOfAllIndices(): string {
// map each SC type in upper camel case
return 'stapps_*_*_*';
}
/**
* Checks for invalid character in alias names and removes them
* @param alias The alias name
* @param uid The UID of the current bulk (for debugging purposes)
*/
private removeAliasChars(alias: string, uid: string | undefined): string {
// spaces are included in some types, so throwing an error in this case would clutter up the log unnecessarily
alias = alias.replace(' ', '');
// List of invalid characters: https://www.elastic.co/guide/en/elasticsearch/reference/6.6/indices-create-index.html
['\\', '/', '*', '?', '"', '<', '>', '|', ',', '#'].forEach((value) => {
if (alias.includes(value)) {
alias = alias.replace(value, '');
logger.warn(`Type of the bulk ${uid} contains an invalid character '${value}'. This can lead to two bulks `
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
`New alias name is "${alias}."`);
}
});
['-', '_', '+'].forEach((value) => {
if (alias.charAt(0) === value) {
alias = alias.substring(1);
logger.warn(`Type of the bulk ${uid} begins with '${value}'. This can lead to two bulks `
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
`New alias name is "${alias}."`);
}
});
if (alias === '.' || alias === '..') {
logger.warn(`Type of the bulk ${uid} is ${alias}. This is an invalid name, please consider using another ` +
`one, as it will be replaced with 'alias_placeholder', which can lead to strange errors.`);
return 'alias_placeholder';
}
if (alias.includes(':')) {
logger.warn(`Type of the bulk ${uid} contains a ':'. This isn't an issue now, but will be in future ` +
`Elasticsearch versions!`);
}
return alias;
// tslint:disable-next-line: no-magic-numbers
Logger.ok(`Read alias map from elasticsearch: ${JSON.stringify(this.aliasMap, null, 2)}`);
}
/**
@@ -274,11 +305,11 @@ export class Elasticsearch implements Database {
}
// index name for elasticsearch
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
// there already is an index with this type and source. We will index the new one and switch the alias to it
// the old one is deleted
const alias = this.removeAliasChars(bulk.type, bulk.uid);
const alias = Elasticsearch.removeAliasChars(bulk.type, bulk.uid);
if (typeof this.aliasMap[alias] === 'undefined') {
this.aliasMap[alias] = {};
@@ -286,8 +317,8 @@ export class Elasticsearch implements Database {
if (!indexRegex.test(index)) {
throw new Error(
'Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.\n' +
'Make sure to set the bulk "source" and "type" to names consisting of the characters above.',
`Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.
Make sure to set the bulk "source" and "type" to names consisting of the characters above.`,
);
}
@@ -297,7 +328,7 @@ export class Elasticsearch implements Database {
index,
});
logger.info('Created index', index);
Logger.info('Created index', index);
}
/**
@@ -306,14 +337,15 @@ export class Elasticsearch implements Database {
*/
public async bulkExpired(bulk: Bulk): Promise<void> {
// index name for elasticsearch
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
logger.info('Bulk expired. Deleting index', index);
Logger.info('Bulk expired. Deleting index', index);
// don't delete indices that are in use already
if (bulk.state !== 'done') {
logger.info('deleting obsolete index', index);
return await this.client.indices.delete({index});
Logger.info('deleting obsolete index', index);
return this.client.indices.delete({index});
}
}
@@ -329,10 +361,10 @@ export class Elasticsearch implements Database {
}
// index name for elasticsearch
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
// alias for the indices
const alias = this.removeAliasChars(bulk.type, bulk.uid);
const alias = Elasticsearch.removeAliasChars(bulk.type, bulk.uid);
if (typeof this.aliasMap[alias] === 'undefined') {
this.aliasMap[alias] = {};
@@ -340,8 +372,8 @@ export class Elasticsearch implements Database {
if (!indexRegex.test(index)) {
throw new Error(
'Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.\n' +
'Make sure to set the bulk "source" and "type" to names consisting of the characters above.',
`Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.
Make sure to set the bulk "source" and "type" to names consisting of the characters above.`,
);
}
@@ -388,9 +420,9 @@ export class Elasticsearch implements Database {
if (typeof oldIndex === 'string') {
// delete the old index
await this.client.indices.delete({index: oldIndex});
logger.info('deleted old index', oldIndex);
Logger.info('deleted old index', oldIndex);
}
logger.info('swapped alias index alias', oldIndex, '=>', index);
Logger.info('swapped alias index alias', oldIndex, '=>', index);
}
/**
@@ -406,7 +438,7 @@ export class Elasticsearch implements Database {
},
},
},
index: this.getListOfAllIndices(),
index: Elasticsearch.getListOfAllIndices(),
});
// get data from response
@@ -414,9 +446,26 @@ export class Elasticsearch implements Database {
if (hits.length !== 1) {
throw new Error('No unique item found.');
} else {
return hits[0]._source as SCThings;
}
return hits[0]._source as SCThings;
}
/**
* Initialize the elasticsearch database (call all needed methods)
*/
public async init(): Promise<void> {
const monitoringConfiguration = this.config.internal.monitoring;
if (typeof monitoringConfiguration !== 'undefined') {
if (typeof this.mailQueue === 'undefined') {
throw new Error('Monitoring is defined, but MailQueue is undefined. A MailQueue is obligatory for monitoring.');
}
// read all watches and schedule searches on the client
Monitoring.setUp(monitoringConfiguration, this.client, this.mailQueue);
}
return this.getAliasMap();
}
/**
@@ -426,24 +475,27 @@ export class Elasticsearch implements Database {
*/
public async post(object: SCThings, bulk: Bulk): Promise<void> {
const obj: SCThings & {creation_date: string} = {
// tslint:disable-next-line: completed-docs
const obj: SCThings & {creation_date: string; } = {
...object,
creation_date: moment().format(),
creation_date: moment()
.format(),
};
const itemMeta = await this.doesItemExist(obj);
// we have to check that the item will get replaced if the index is rolled over
if (itemMeta.exists && typeof itemMeta.object !== 'undefined') {
const indexOfNew = this.getIndex(obj.type, bulk.source, bulk);
const indexOfNew = Elasticsearch.getIndex(obj.type, bulk.source, bulk);
const oldIndex = itemMeta.object._index;
// new item doesn't replace the old one
if (oldIndex.substring(0, oldIndex.length - 9) !== indexOfNew.substring(0, indexOfNew.length - 9)) {
throw new Error(
'Object \"' + obj.uid + '\" already exists. Object was: ' +
JSON.stringify(obj, null, 2),
);
if (oldIndex.substring(0, oldIndex.length - Elasticsearch.INDEX_UID_LENGTH + 1)
!== indexOfNew.substring(0, indexOfNew.length - Elasticsearch.INDEX_UID_LENGTH + 1)) {
throw new Error(
// tslint:disable-next-line: no-magic-numbers
`Object "${obj.uid}" already exists. Object was: ${JSON.stringify(obj, null, 2)}`,
);
}
}
@@ -451,13 +503,13 @@ export class Elasticsearch implements Database {
const searchResponse = await this.client.create({
body: obj,
id: obj.uid,
index: this.getIndex(obj.type, bulk.source, bulk),
index: Elasticsearch.getIndex(obj.type, bulk.source, bulk),
timeout: '90s',
type: obj.type,
});
if (!searchResponse.created) {
throw new Error('Object creation Error: Instance was: ' + JSON.stringify(obj));
throw new Error(`Object creation Error: Instance was: ${JSON.stringify(obj)}`);
}
}
@@ -470,7 +522,7 @@ export class Elasticsearch implements Database {
const itemMeta = await this.doesItemExist(object);
if (itemMeta.exists && typeof itemMeta.object !== 'undefined') {
return await this.client.update({
return this.client.update({
body: {
doc: object,
},
@@ -499,7 +551,7 @@ export class Elasticsearch implements Database {
query: buildQuery(params, this.config, this.config.internal.database as ElasticsearchConfig),
},
from: params.from,
index: this.getListOfAllIndices(),
index: Elasticsearch.getListOfAllIndices(),
size: params.size,
};

View File

@@ -20,10 +20,10 @@ import {
SCMonitoringMaximumLengthCondition,
SCMonitoringMinimumLengthCondition,
} from '@openstapps/core';
import {Logger} from '@openstapps/logger';
import * as ES from 'elasticsearch';
import * as cron from 'node-cron';
import {logger} from '../../common';
import {MailQueue} from '../../notification/MailQueue';
import {MailQueue} from '../../notification/mail-queue';
/**
* Check if the given condition fails on the given number of results and the condition
@@ -37,13 +37,14 @@ function conditionFails(
if (condition.type === 'MaximumLength') {
return maxConditionFails(condition.length, total);
}
return minConditionFails(condition.length, total);
}
/**
* Check if the min condition fails
* @param minimumLength
* @param total
* @param minimumLength Minimal length allowed
* @param total Number of results
*/
function minConditionFails(minimumLength: number, total: number) {
return typeof minimumLength === 'number' && minimumLength > total;
@@ -51,8 +52,8 @@ function minConditionFails(minimumLength: number, total: number) {
/**
* Check if the max condition fails
* @param maximumLength
* @param total
* @param maximumLength Maximal length allowed
* @param total Number of results
*/
function maxConditionFails(maximumLength: number, total: number) {
return typeof maximumLength === 'number' && maximumLength < total;
@@ -74,19 +75,18 @@ export function runActions(
mailQueue: MailQueue,
) {
actions.forEach((action) => {
actions.forEach(async (action) => {
if (action.type === 'log') {
logger.error(
await Logger.error(
action.prefix,
`Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'`, `Found ${total} hits instead`,
action.message,
);
} else {
mailQueue.push({
await mailQueue.push({
subject: action.subject,
text: `Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'\n` +
action.message +
`Found ${total} hits instead`,
text: `Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'
${action.message} Found ${total} hits instead`,
to: action.recipients,
});
}
@@ -137,5 +137,5 @@ export function setUp(monitoringConfig: SCMonitoringConfiguration, esClient: ES.
});
logger.log('Scheduled ' + monitoringConfig.watchers.length + ' watches');
Logger.log(`Scheduled ${monitoringConfig.watchers.length} watches`);
}

View File

@@ -44,9 +44,9 @@ import {
/**
* Builds a boolean filter. Returns an elasticsearch boolean filter
*/
export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBooleanFilterArguments<any> {
export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBooleanFilterArguments<unknown> {
const result: ESBooleanFilterArguments<any> = {
const result: ESBooleanFilterArguments<unknown> = {
minimum_should_match: 0,
must: [],
must_not: [],
@@ -71,24 +71,39 @@ export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBool
/**
* Converts Array of Filters to elasticsearch query-syntax
* @param filter
* @param filter A search filter for the retrieval of the data
*/
export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanceFilter | ESBooleanFilter<any> {
export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanceFilter | ESBooleanFilter<unknown> {
switch (filter.type) {
case 'value':
const filterObj: { [field: string]: string } = {};
filterObj[filter.arguments.field + '.raw'] = filter.arguments.value;
const filterObj: { [field: string]: string; } = {};
filterObj[`${filter.arguments.field}.raw`] = filter.arguments.value;
return {
term: filterObj,
};
case 'availability':
const startRangeFilter: { [field: string]: { lte: string } } = {};
const startRangeFilter: {
[field: string]: {
/**
* Less than or equal
*/
lte: string;
};
} = {};
startRangeFilter[filter.arguments.fromField] = {
lte: 'now',
};
const endRangeFilter: { [field: string]: { gte: string } } = {};
const endRangeFilter: {
[field: string]: {
/**
* Greater than or equal
*/
gte: string;
};
} = {};
endRangeFilter[filter.arguments.toField] = {
gte: 'now',
};
@@ -129,12 +144,13 @@ export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanc
};
case 'distance':
const geoObject: ESGeoDistanceFilterArguments = {
distance: filter.arguments.distanceInM + 'm',
distance: `${filter.arguments.distanceInM}m`,
};
geoObject[filter.arguments.field] = {
lat: filter.arguments.lat,
lon: filter.arguments.lon,
};
return {
geo_distance: geoObject,
};
@@ -171,7 +187,7 @@ function buildFunctions(
/**
* Creates boost functions for all type boost configurations
*
*
* @param boostingTypes Array of type boosting configurations
*/
function buildFunctionsForBoostingTypes(
@@ -195,33 +211,36 @@ function buildFunctionsForBoostingTypes(
const fields = boostingForOneSCType.fields;
Object.keys(boostingForOneSCType.fields).forEach((fieldName) => {
for (const fieldName in boostingForOneSCType.fields) {
if (boostingForOneSCType.fields.hasOwnProperty(fieldName)) {
const boostingForOneField = fields[fieldName];
const boostingForOneField = fields[fieldName];
for (const value in boostingForOneField) {
if (boostingForOneField.hasOwnProperty(value)) {
const factor = boostingForOneField[value];
Object.keys(boostingForOneField).forEach((value) => {
const factor = boostingForOneField[value];
// build term filter
const termFilter: ESTermFilter = {
term: {},
};
termFilter.term[`${fieldName}.raw`] = value;
// build term filter
const termFilter: ESTermFilter = {
term: {},
};
termFilter.term[fieldName + '.raw'] = value;
functions.push({
filter: {
bool: {
must: [
typeFilter,
termFilter,
],
should: [],
},
},
weight: factor,
});
});
});
functions.push({
filter: {
bool: {
must: [
typeFilter,
termFilter,
],
should: [],
},
},
weight: factor,
});
}
}
}
}
}
});
@@ -229,8 +248,8 @@ function buildFunctionsForBoostingTypes(
}
/**
* Builds body for Elasticsearch requests
* @param params
* @param defaultConfig
* @param params Parameters for querying the backend
* @param defaultConfig Default configuration of the backend
* @returns ElasticsearchQuery (body of a search-request)
*/
export function buildQuery(
@@ -355,13 +374,13 @@ export function buildQuery(
// add type filters for sorts
mustMatch.push.apply(mustMatch, typeFiltersToAppend);
}
return functionScoreQuery;
}
/**
* converts query to
* @param params
* @param sortableFields
* @param sorts Sorting rules to apply to the data that is being queried
* @returns an array of sort queries
*/
export function buildSort(
@@ -371,7 +390,8 @@ export function buildSort(
switch (sort.type) {
case 'ducet':
const ducetSort: ESDucetSort = {};
ducetSort[sort.arguments.field + '.sort'] = sort.order;
ducetSort[`${sort.arguments.field}.sort`] = sort.order;
return ducetSort;
case 'distance':
const args: ESGeoDistanceSortArguments = {
@@ -400,6 +420,12 @@ export function buildSort(
});
}
/**
* Provides a script for sorting search results by prices
*
* @param universityRole User group which consumes university services
* @param field Field in which wanted offers with prices are located
*/
export function buildPriceSortScript(universityRole: keyof SCSportCoursePriceGroup, field: SCThingsField): string {
return `
// initialize the sort value with the maximum

View File

@@ -13,23 +13,27 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {Logger} from '@openstapps/logger';
import {Client} from 'elasticsearch';
import {readdir, readFile} from 'fs-extra';
import {resolve} from 'path';
import {logger} from '../../common';
/**
* Assembles an elasticsearch template with all resolved subType-references
* @param templateType
* @param templates
* @param inline
* @returns
* @param templateType Type used in the elasticsearch mapping
* @param templates Templates (elasticsearch mappings)
* @param inline Level of hierarchy
* @deprecated
*/
function assembleElasticsearchTemplate(templateType: string, templates: {[key: string]: any}, inline: number): any {
function assembleElasticsearchTemplate(
templateType: string,
// tslint:disable-next-line: no-any
templates: {[key: string]: any; },
inline: number): object {
const templateBase = JSON.parse(JSON.stringify(templates[templateType]));
if (inline) {
if (typeof inline !== 'undefined') {
delete templateBase.dynamic_templates;
}
@@ -45,12 +49,12 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
try {
// extend the template by the properties of the basetemplate
templateBase.properties = Object.assign(
templateBase.properties,
templates['base.template.json'].mappings._default_.properties,
);
templateBase.properties = {...templateBase.properties,
...templates['base.template.json'].mappings._default_.properties,
};
} catch (e) {
logger.error('Failed to merge properties on: ' + templateType);
// tslint:disable-next-line: no-floating-promises
Logger.error(`Failed to merge properties on: ${templateType}`);
throw e;
}
const fieldKeys = Object.keys(templateBase.properties);
@@ -70,7 +74,7 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
if (Array.isArray(field._typeRef)) {
let obj = {};
field._typeRef.forEach((subType: string) => {
obj = Object.assign(obj, assembleElasticsearchTemplate(subType, templates, inline + 1));
obj = {...obj, ...assembleElasticsearchTemplate(subType, templates, inline + 1)};
});
templateBase.properties[fieldKey] = obj;
} else {
@@ -82,25 +86,29 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
}
});
}
return templateBase;
}
/**
* Reads all template files and returns the assembled template
*/
export async function getElasticsearchTemplate(): Promise<any> {
// TODO: check if redundant
export async function getElasticsearchTemplate(): Promise<object> {
// readIM all templates
const elasticsearchFolder = resolve('.', 'src', 'storage', 'elasticsearch', 'templates');
const templates: {[key: string]: any} = {};
// tslint:disable-next-line: no-any
const templates: {[key: string]: any; } = {};
const fileNames = await readdir(elasticsearchFolder);
const availableTypes = fileNames.filter((fileName) => {
return Array.isArray(fileName.match(/\w*\.sc-type\.template\.json/i));
}).map((fileName) => {
return fileName.substring(0, fileName.indexOf('.sc-type.template.json'));
});
})
.map((fileName) => {
return fileName.substring(0, fileName.indexOf('.sc-type.template.json'));
});
const promises = fileNames.map(async (fileName) => {
const file = await readFile(resolve(elasticsearchFolder, fileName), 'utf8');
@@ -108,7 +116,7 @@ export async function getElasticsearchTemplate(): Promise<any> {
try {
templates[fileName] = JSON.parse(file.toString());
} catch (jsonParsingError) {
logger.error('Failed parsing file: ' + fileName);
await Logger.error(`Failed parsing file: ${fileName}`);
throw jsonParsingError;
}
});
@@ -119,23 +127,24 @@ export async function getElasticsearchTemplate(): Promise<any> {
availableTypes.forEach((configType) => {
template.mappings[configType.toLowerCase()] =
assembleElasticsearchTemplate(configType + '.sc-type.template.json', templates, 0);
assembleElasticsearchTemplate(`${configType}.sc-type.template.json`, templates, 0);
});
// this is like the base type (StappsCoreThing)
const baseProperties = template.mappings._default_.properties;
Object.keys(baseProperties).forEach((basePropertyName) => {
let field = baseProperties[basePropertyName];
field = templates[field._fieldRef];
template.mappings._default_.properties[basePropertyName] = field;
});
Object.keys(baseProperties)
.forEach((basePropertyName) => {
let field = baseProperties[basePropertyName];
field = templates[field._fieldRef];
template.mappings._default_.properties[basePropertyName] = field;
});
return template;
}
/**
* Puts a new global template
* @param client
* @param client An elasticsearch client to use
*/
export async function putTemplate(client: Client): Promise<void> {
return client.indices.putTemplate({