mirror of
https://gitlab.com/openstapps/openstapps.git
synced 2026-01-23 18:12:43 +00:00
committed by
Rainer Killinger
parent
42c7350c36
commit
8b457c9911
@@ -2,7 +2,6 @@
|
|||||||
# See https://stackoverflow.com/a/29932318
|
# See https://stackoverflow.com/a/29932318
|
||||||
/*
|
/*
|
||||||
# Except these files/folders
|
# Except these files/folders
|
||||||
!docs
|
|
||||||
!lib
|
!lib
|
||||||
!LICENSE
|
!LICENSE
|
||||||
!package.json
|
!package.json
|
||||||
|
|||||||
@@ -16,16 +16,16 @@
|
|||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "npm run tslint && npm run compile",
|
"build": "npm run tslint && npm run compile",
|
||||||
"compile": "rimraf lib && tsc && prepend lib/cli.js '#!/usr/bin/env node\n'",
|
|
||||||
"changelog": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md && git commit -m 'docs: update changelog'",
|
"changelog": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md && git commit -m 'docs: update changelog'",
|
||||||
"check-configuration": "openstapps-configuration",
|
"check-configuration": "openstapps-configuration",
|
||||||
|
"compile": "rimraf lib && tsc && prepend lib/cli.js '#!/usr/bin/env node\n'",
|
||||||
"documentation": "typedoc --includeDeclarations --mode modules --out docs --readme README.md --listInvalidSymbolLinks src",
|
"documentation": "typedoc --includeDeclarations --mode modules --out docs --readme README.md --listInvalidSymbolLinks src",
|
||||||
"start": "NODE_CONFIG_ENV=elasticsearch ALLOW_NO_TRANSPORT=true node ./lib/cli.js",
|
|
||||||
"tslint": "tslint -p tsconfig.json -c tslint.json 'src/**/*.ts'",
|
|
||||||
"postversion": "npm run changelog",
|
"postversion": "npm run changelog",
|
||||||
"prepublishOnly": "npm ci && npm run build",
|
"prepublishOnly": "npm ci && npm run build",
|
||||||
"preversion": "npm run prepublishOnly",
|
"preversion": "npm run prepublishOnly",
|
||||||
"push": "git push && git push origin \"v$npm_package_version\""
|
"push": "git push && git push origin \"v$npm_package_version\"",
|
||||||
|
"start": "NODE_CONFIG_ENV=elasticsearch ALLOW_NO_TRANSPORT=true node ./lib/cli.js",
|
||||||
|
"tslint": "tslint -p tsconfig.json -c tslint.json 'src/**/*.ts'"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@openstapps/core": "0.19.0",
|
"@openstapps/core": "0.19.0",
|
||||||
|
|||||||
59
src/app.ts
59
src/app.ts
@@ -19,26 +19,33 @@ import {
|
|||||||
SCSyntaxErrorResponse,
|
SCSyntaxErrorResponse,
|
||||||
SCUnsupportedMediaTypeErrorResponse,
|
SCUnsupportedMediaTypeErrorResponse,
|
||||||
} from '@openstapps/core';
|
} from '@openstapps/core';
|
||||||
|
import {Logger} from '@openstapps/logger';
|
||||||
import * as config from 'config';
|
import * as config from 'config';
|
||||||
import * as cors from 'cors';
|
import * as cors from 'cors';
|
||||||
import * as express from 'express';
|
import * as express from 'express';
|
||||||
import * as morgan from 'morgan';
|
import * as morgan from 'morgan';
|
||||||
import {join} from 'path';
|
import {join} from 'path';
|
||||||
import {configFile, isTestEnvironment, logger, mailer, validator} from './common';
|
import {configFile, isTestEnvironment, mailer, validator} from './common';
|
||||||
import {MailQueue} from './notification/MailQueue';
|
import {MailQueue} from './notification/mail-queue';
|
||||||
import {bulkAddRouter} from './routes/BulkAddRoute';
|
import {bulkAddRouter} from './routes/bulk-add-route';
|
||||||
import {bulkDoneRouter} from './routes/BulkDoneRoute';
|
import {bulkDoneRouter} from './routes/bulk-done-route';
|
||||||
import {bulkRouter} from './routes/BulkRoute';
|
import {bulkRouter} from './routes/bulk-route';
|
||||||
import {indexRouter} from './routes/IndexRoute';
|
import {indexRouter} from './routes/index-route';
|
||||||
import {multiSearchRouter} from './routes/MultiSearchRoute';
|
import {multiSearchRouter} from './routes/multi-search-route';
|
||||||
import {searchRouter} from './routes/SearchRoute';
|
import {searchRouter} from './routes/search-route';
|
||||||
import {thingUpdateRouter} from './routes/ThingUpdateRoute';
|
import {thingUpdateRouter} from './routes/thing-update-route';
|
||||||
import {BulkStorage} from './storage/BulkStorage';
|
import {BulkStorage} from './storage/bulk-storage';
|
||||||
import {DatabaseConstructor} from './storage/Database';
|
import {DatabaseConstructor} from './storage/database';
|
||||||
import {Elasticsearch} from './storage/elasticsearch/Elasticsearch';
|
import {Elasticsearch} from './storage/elasticsearch/elasticsearch';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Created express application
|
||||||
|
*/
|
||||||
export const app = express();
|
export const app = express();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configure the backend
|
||||||
|
*/
|
||||||
async function configureApp() {
|
async function configureApp() {
|
||||||
// request loggers have to be the first middleware to be set in express
|
// request loggers have to be the first middleware to be set in express
|
||||||
app.use(morgan('dev'));
|
app.use(morgan('dev'));
|
||||||
@@ -62,10 +69,11 @@ async function configureApp() {
|
|||||||
const err = new SCUnsupportedMediaTypeErrorResponse(isTestEnvironment);
|
const err = new SCUnsupportedMediaTypeErrorResponse(isTestEnvironment);
|
||||||
res.status(err.statusCode);
|
res.status(err.statusCode);
|
||||||
res.json(err);
|
res.json(err);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const bodyBuffer: any[] = [];
|
const bodyBuffer: Buffer[] = [];
|
||||||
// we don't know the full size, the only way we can get is by adding up all individual chunk sizes
|
// we don't know the full size, the only way we can get is by adding up all individual chunk sizes
|
||||||
let bodySize = 0;
|
let bodySize = 0;
|
||||||
const chunkGatherer = (chunk: Buffer) => {
|
const chunkGatherer = (chunk: Buffer) => {
|
||||||
@@ -78,6 +86,7 @@ async function configureApp() {
|
|||||||
const err = new SCRequestBodyTooLargeErrorResponse(isTestEnvironment);
|
const err = new SCRequestBodyTooLargeErrorResponse(isTestEnvironment);
|
||||||
res.status(err.statusCode);
|
res.status(err.statusCode);
|
||||||
res.json(err);
|
res.json(err);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// push the chunk in the buffer
|
// push the chunk in the buffer
|
||||||
@@ -85,7 +94,8 @@ async function configureApp() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const endCallback = () => {
|
const endCallback = () => {
|
||||||
req.body = Buffer.concat(bodyBuffer).toString();
|
req.body = Buffer.concat(bodyBuffer)
|
||||||
|
.toString();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
req.body = JSON.parse(req.body);
|
req.body = JSON.parse(req.body);
|
||||||
@@ -94,13 +104,15 @@ async function configureApp() {
|
|||||||
const err = new SCSyntaxErrorResponse(catchErr.message, isTestEnvironment);
|
const err = new SCSyntaxErrorResponse(catchErr.message, isTestEnvironment);
|
||||||
res.status(err.statusCode);
|
res.status(err.statusCode);
|
||||||
res.json(err);
|
res.json(err);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
req.on('data', chunkGatherer).on('end', endCallback);
|
req.on('data', chunkGatherer)
|
||||||
|
.on('end', endCallback);
|
||||||
});
|
});
|
||||||
|
|
||||||
const databases: {[name: string]: DatabaseConstructor} = {
|
const databases: {[name: string]: DatabaseConstructor; } = {
|
||||||
elasticsearch: Elasticsearch,
|
elasticsearch: Elasticsearch,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -113,8 +125,7 @@ async function configureApp() {
|
|||||||
// validation failed
|
// validation failed
|
||||||
if (configValidation.errors.length > 0) {
|
if (configValidation.errors.length > 0) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Validation of config file failed. Errors were: ' +
|
`Validation of config file failed. Errors were: ${JSON.stringify(configValidation.errors)}`,
|
||||||
JSON.stringify(configValidation.errors),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -130,11 +141,13 @@ async function configureApp() {
|
|||||||
typeof mailer !== 'undefined' && config.has('internal.monitoring') ? new MailQueue(mailer) : undefined,
|
typeof mailer !== 'undefined' && config.has('internal.monitoring') ? new MailQueue(mailer) : undefined,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
await database.init();
|
||||||
|
|
||||||
if (typeof database === 'undefined') {
|
if (typeof database === 'undefined') {
|
||||||
throw new Error('No implementation for configured database found. Please check your configuration.');
|
throw new Error('No implementation for configured database found. Please check your configuration.');
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.ok('Validated config file sucessfully');
|
Logger.ok('Validated config file sucessfully');
|
||||||
|
|
||||||
// treats /foo and /foo/ as two different routes
|
// treats /foo and /foo/ as two different routes
|
||||||
// see http://expressjs.com/en/api.html#app.set
|
// see http://expressjs.com/en/api.html#app.set
|
||||||
@@ -194,8 +207,10 @@ async function configureApp() {
|
|||||||
// TODO: implement a route to register plugins
|
// TODO: implement a route to register plugins
|
||||||
}
|
}
|
||||||
|
|
||||||
configureApp().then(() => {
|
configureApp()
|
||||||
logger.ok('Sucessfully configured express server');
|
.then(() => {
|
||||||
}).catch((err) => {
|
Logger.ok('Sucessfully configured express server');
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
throw err;
|
throw err;
|
||||||
});
|
});
|
||||||
|
|||||||
20
src/cli.ts
20
src/cli.ts
@@ -13,13 +13,14 @@
|
|||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
import {Logger} from '@openstapps/logger';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import {app} from './app';
|
import {app} from './app';
|
||||||
import {logger} from './common';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get port from environment and store in Express.
|
* Get port from environment and store in Express.
|
||||||
*/
|
*/
|
||||||
|
// tslint:disable-next-line: strict-boolean-expressions
|
||||||
const port = normalizePort(process.env.PORT || '3000');
|
const port = normalizePort(process.env.PORT || '3000');
|
||||||
// TODO: Can we remove that? It doesn't look like it is read at all.
|
// TODO: Can we remove that? It doesn't look like it is read at all.
|
||||||
app.set('port', port);
|
app.set('port', port);
|
||||||
@@ -58,23 +59,24 @@ function normalizePort(value: string) {
|
|||||||
/**
|
/**
|
||||||
* Event listener for HTTP server "error" event.
|
* Event listener for HTTP server "error" event.
|
||||||
*/
|
*/
|
||||||
function onError(error: Error | any) {
|
// tslint:disable-next-line: completed-docs
|
||||||
|
async function onError(error: { code: string; syscall: string; }) {
|
||||||
if (error.syscall !== 'listen') {
|
if (error.syscall !== 'listen') {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
const bind = typeof port === 'string'
|
const bind = typeof port === 'string'
|
||||||
? 'Pipe ' + port
|
? `Pipe ${port}`
|
||||||
: 'Port ' + port;
|
: `Port ${port}`;
|
||||||
|
|
||||||
// handle specific listen errors with friendly messages
|
// handle specific listen errors with friendly messages
|
||||||
switch (error.code) {
|
switch (error.code) {
|
||||||
case 'EACCES':
|
case 'EACCES':
|
||||||
logger.error(bind + ' requires elevated privileges');
|
await Logger.error(`${bind} requires elevated privileges`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
break;
|
break;
|
||||||
case 'EADDRINUSE':
|
case 'EADDRINUSE':
|
||||||
logger.error(bind + ' is already in use');
|
await Logger.error(`${bind} is already in use`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
@@ -88,7 +90,7 @@ function onError(error: Error | any) {
|
|||||||
function onListening() {
|
function onListening() {
|
||||||
const addr = server.address();
|
const addr = server.address();
|
||||||
const bind = typeof addr === 'string'
|
const bind = typeof addr === 'string'
|
||||||
? 'pipe ' + addr
|
? `pipe ${addr}`
|
||||||
: 'port ' + addr.port;
|
: `port ${addr.port}`;
|
||||||
logger.ok('Listening on ' + bind);
|
Logger.ok(`Listening on ${bind}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,16 +15,25 @@
|
|||||||
*/
|
*/
|
||||||
import {SCConfigFile} from '@openstapps/core';
|
import {SCConfigFile} from '@openstapps/core';
|
||||||
import {Validator} from '@openstapps/core-tools/lib/validate';
|
import {Validator} from '@openstapps/core-tools/lib/validate';
|
||||||
import {Logger} from '@openstapps/logger';
|
|
||||||
import * as config from 'config';
|
import * as config from 'config';
|
||||||
import {BackendTransport} from './notification/BackendTransport';
|
import {BackendTransport} from './notification/backend-transport';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Instance of the transport for sending mails
|
||||||
|
*/
|
||||||
export const mailer = BackendTransport.getTransportInstance();
|
export const mailer = BackendTransport.getTransportInstance();
|
||||||
|
|
||||||
export const logger = new Logger(mailer);
|
/**
|
||||||
|
* Config file content
|
||||||
|
*/
|
||||||
export const configFile: SCConfigFile = config.util.toObject();
|
export const configFile: SCConfigFile = config.util.toObject();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A validator instance to check if something is a valid JSON object (e.g. a request or a thing)
|
||||||
|
*/
|
||||||
export const validator = new Validator();
|
export const validator = new Validator();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides information if the backend is executed in the "test" (non-production) environment
|
||||||
|
*/
|
||||||
export const isTestEnvironment = process.env.NODE_ENV !== 'production';
|
export const isTestEnvironment = process.env.NODE_ENV !== 'production';
|
||||||
|
|||||||
@@ -13,11 +13,16 @@
|
|||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SMTP} from '@openstapps/logger/lib/SMTP';
|
import {SMTP} from '@openstapps/logger/lib/smtp';
|
||||||
import {Transport, TransportWithVerification} from '@openstapps/logger/lib/Transport';
|
import {Transport, VerifiableTransport} from '@openstapps/logger/lib/transport';
|
||||||
|
|
||||||
export function isTransportWithVerification(instance: Transport): instance is TransportWithVerification {
|
/**
|
||||||
return typeof (instance as TransportWithVerification).verify === 'function';
|
* Provides information if a transport is a verifiable transport
|
||||||
|
*
|
||||||
|
* @param instance A transport that needs to be checked
|
||||||
|
*/
|
||||||
|
export function isTransportWithVerification(instance: Transport): instance is VerifiableTransport {
|
||||||
|
return typeof (instance as VerifiableTransport).verify === 'function';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -26,18 +31,32 @@ export function isTransportWithVerification(instance: Transport): instance is Tr
|
|||||||
* In the future this may support more than loading SMTP as a transport.
|
* In the future this may support more than loading SMTP as a transport.
|
||||||
*/
|
*/
|
||||||
export class BackendTransport {
|
export class BackendTransport {
|
||||||
|
/**
|
||||||
|
* One (and only one) instance of the backend transport
|
||||||
|
*/
|
||||||
private static _instance: BackendTransport;
|
private static _instance: BackendTransport;
|
||||||
private waitingForVerification: boolean;
|
|
||||||
|
/**
|
||||||
|
* Stores information if transport is in state of waiting for the verification
|
||||||
|
*/
|
||||||
|
private readonly waitingForVerification: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A (SMTP) transport which includes settings for sending mails
|
||||||
|
*/
|
||||||
protected transport: SMTP | undefined;
|
protected transport: SMTP | undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides an instance of a transport
|
||||||
|
*/
|
||||||
public static getTransportInstance(): SMTP | undefined {
|
public static getTransportInstance(): SMTP | undefined {
|
||||||
if (this._instance) {
|
if (typeof BackendTransport._instance !== 'undefined') {
|
||||||
return this._instance.transport;
|
return BackendTransport._instance.transport;
|
||||||
}
|
}
|
||||||
|
|
||||||
this._instance = new this();
|
BackendTransport._instance = new BackendTransport();
|
||||||
return this._instance.transport;
|
|
||||||
|
return BackendTransport._instance.transport;
|
||||||
}
|
}
|
||||||
|
|
||||||
private constructor() {
|
private constructor() {
|
||||||
@@ -58,19 +77,24 @@ export class BackendTransport {
|
|||||||
if (typeof this.transport !== 'undefined' && isTransportWithVerification(this.transport)) {
|
if (typeof this.transport !== 'undefined' && isTransportWithVerification(this.transport)) {
|
||||||
this.waitingForVerification = true;
|
this.waitingForVerification = true;
|
||||||
|
|
||||||
this.transport.verify().then((message) => {
|
this.transport.verify()
|
||||||
if (typeof message === 'string') {
|
.then((message) => {
|
||||||
// tslint:disable-next-line:no-console
|
if (typeof message === 'string') {
|
||||||
console.log(message);
|
// tslint:disable-next-line:no-console
|
||||||
}
|
console.log(message);
|
||||||
}).catch((err) => {
|
}
|
||||||
throw err;
|
})
|
||||||
});
|
.catch((err) => {
|
||||||
|
throw err;
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
this.waitingForVerification = false;
|
this.waitingForVerification = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides information if transport is in state of waiting for the verification
|
||||||
|
*/
|
||||||
public isWaitingForVerification(): boolean {
|
public isWaitingForVerification(): boolean {
|
||||||
return this.waitingForVerification;
|
return this.waitingForVerification;
|
||||||
}
|
}
|
||||||
@@ -14,15 +14,25 @@
|
|||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.nse along with
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.nse along with
|
||||||
* this program. If not, see <https://www.gnu.org/licenses/>.
|
* this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SMTP} from '@openstapps/logger/lib/SMTP';
|
import {Logger} from '@openstapps/logger';
|
||||||
|
import {SMTP} from '@openstapps/logger/lib/smtp';
|
||||||
import {MailOptions} from 'nodemailer/lib/sendmail-transport';
|
import {MailOptions} from 'nodemailer/lib/sendmail-transport';
|
||||||
import * as Queue from 'promise-queue';
|
import * as Queue from 'promise-queue';
|
||||||
import {logger} from '../common';
|
|
||||||
/**
|
/**
|
||||||
* A queue that can send mails in serial
|
* A queue that can send mails in serial
|
||||||
*/
|
*/
|
||||||
export class MailQueue {
|
export class MailQueue {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of allowed verification attempts after which the initialization of transport fails
|
||||||
|
*/
|
||||||
|
static readonly MAX_VERIFICATION_ATTEMPTS = 5;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of milliseconds after which verification check should be repeated
|
||||||
|
*/
|
||||||
|
static readonly VERIFICATION_TIMEOUT = 5000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A queue that saves mails, before the transport is ready. When
|
* A queue that saves mails, before the transport is ready. When
|
||||||
* the transport gets ready this mails are getting pushed in to
|
* the transport gets ready this mails are getting pushed in to
|
||||||
@@ -42,9 +52,9 @@ export class MailQueue {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a mail queue
|
* Creates a mail queue
|
||||||
* @param transport
|
* @param transport Transport which is used for sending mails
|
||||||
*/
|
*/
|
||||||
constructor(private transport: SMTP) {
|
constructor(private readonly transport: SMTP) {
|
||||||
|
|
||||||
this.queue = new Queue(1);
|
this.queue = new Queue(1);
|
||||||
|
|
||||||
@@ -62,35 +72,36 @@ export class MailQueue {
|
|||||||
*/
|
*/
|
||||||
private checkForVerification() {
|
private checkForVerification() {
|
||||||
|
|
||||||
if (this.verificationCounter > 5) {
|
if (this.verificationCounter > MailQueue.MAX_VERIFICATION_ATTEMPTS) {
|
||||||
throw new Error('Failed to initialize the SMTP transport for the mail queue');
|
throw new Error('Failed to initialize the SMTP transport for the mail queue');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!this.transport.isVerified()) {
|
if (!this.transport.isVerified()) {
|
||||||
this.verificationCounter++;
|
this.verificationCounter++;
|
||||||
setTimeout(() => {
|
setTimeout(async () => {
|
||||||
logger.warn('Transport not verified yet. Trying to send mails here...');
|
Logger.warn('Transport not verified yet. Trying to send mails here...');
|
||||||
this.checkForVerification();
|
this.checkForVerification();
|
||||||
}, 5000);
|
}, MailQueue.VERIFICATION_TIMEOUT);
|
||||||
} else {
|
} else {
|
||||||
logger.ok('Transport for mail queue was verified. We can send mails now');
|
Logger.ok('Transport for mail queue was verified. We can send mails now');
|
||||||
// if the transport finally was verified send all our mails from the dry queue
|
// if the transport finally was verified send all our mails from the dry queue
|
||||||
this.dryQueue.forEach((mail) => {
|
this.dryQueue.forEach(async (mail) => {
|
||||||
this.queue.add<string>(() => (this.transport as SMTP).sendMail(mail));
|
await this.queue.add<string>(() => (this.transport as SMTP).sendMail(mail));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Push a mail into the queue so it gets send when the queue is ready
|
* Push a mail into the queue so it gets send when the queue is ready
|
||||||
* @param mail
|
*
|
||||||
|
* @param mail Information required for sending a mail
|
||||||
*/
|
*/
|
||||||
public push(mail: MailOptions) {
|
public async push(mail: MailOptions) {
|
||||||
if (!this.transport.isVerified()) { // the transport has verification, but is not verified yet
|
if (!this.transport.isVerified()) { // the transport has verification, but is not verified yet
|
||||||
// push to a dry queue which gets pushed to the real queue when the transport is verified
|
// push to a dry queue which gets pushed to the real queue when the transport is verified
|
||||||
this.dryQueue.push(mail);
|
this.dryQueue.push(mail);
|
||||||
} else {
|
} else {
|
||||||
this.queue.add<string>(() => (this.transport as SMTP).sendMail(mail));
|
await this.queue.add<string>(() => (this.transport as SMTP).sendMail(mail));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -14,10 +14,14 @@
|
|||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SCBulkAddRequest, SCBulkAddResponse, SCBulkAddRoute, SCNotFoundErrorResponse} from '@openstapps/core';
|
import {SCBulkAddRequest, SCBulkAddResponse, SCBulkAddRoute, SCNotFoundErrorResponse} from '@openstapps/core';
|
||||||
import {isTestEnvironment, logger} from '../common';
|
import {Logger} from '@openstapps/logger';
|
||||||
import {BulkStorage} from '../storage/BulkStorage';
|
import {isTestEnvironment} from '../common';
|
||||||
import {createRoute} from './Route';
|
import {BulkStorage} from '../storage/bulk-storage';
|
||||||
|
import {createRoute} from './route';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains information for using the route for adding bulks
|
||||||
|
*/
|
||||||
const bulkRouteModel = new SCBulkAddRoute();
|
const bulkRouteModel = new SCBulkAddRoute();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -27,7 +31,7 @@ export const bulkAddRouter = createRoute<SCBulkAddResponse>(
|
|||||||
bulkRouteModel,
|
bulkRouteModel,
|
||||||
async (request: SCBulkAddRequest, app, params) => {
|
async (request: SCBulkAddRequest, app, params) => {
|
||||||
|
|
||||||
if (!params || typeof params.UID !== 'string') {
|
if (typeof params === 'undefined' || typeof params.UID !== 'string') {
|
||||||
throw new Error('UID of Bulk was not given, but route with obligatory parameter was called');
|
throw new Error('UID of Bulk was not given, but route with obligatory parameter was called');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -35,7 +39,7 @@ export const bulkAddRouter = createRoute<SCBulkAddResponse>(
|
|||||||
const bulk = await bulkMemory.read(params.UID);
|
const bulk = await bulkMemory.read(params.UID);
|
||||||
|
|
||||||
if (typeof bulk === 'undefined') {
|
if (typeof bulk === 'undefined') {
|
||||||
logger.warn(`Bulk with ${params.UID} not found.`);
|
Logger.warn(`Bulk with ${params.UID} not found.`);
|
||||||
throw new SCNotFoundErrorResponse(isTestEnvironment);
|
throw new SCNotFoundErrorResponse(isTestEnvironment);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -14,10 +14,14 @@
|
|||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SCBulkDoneRequest, SCBulkDoneResponse, SCBulkDoneRoute, SCNotFoundErrorResponse} from '@openstapps/core';
|
import {SCBulkDoneRequest, SCBulkDoneResponse, SCBulkDoneRoute, SCNotFoundErrorResponse} from '@openstapps/core';
|
||||||
import {isTestEnvironment, logger} from '../common';
|
import {Logger} from '@openstapps/logger';
|
||||||
import {BulkStorage} from '../storage/BulkStorage';
|
import {isTestEnvironment} from '../common';
|
||||||
import {createRoute} from './Route';
|
import {BulkStorage} from '../storage/bulk-storage';
|
||||||
|
import {createRoute} from './route';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains information for using the route for closing bulks
|
||||||
|
*/
|
||||||
const bulkDoneRouteModel = new SCBulkDoneRoute();
|
const bulkDoneRouteModel = new SCBulkDoneRoute();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -27,7 +31,7 @@ export const bulkDoneRouter = createRoute<SCBulkDoneResponse>(
|
|||||||
bulkDoneRouteModel,
|
bulkDoneRouteModel,
|
||||||
async (_request: SCBulkDoneRequest, app, params) => {
|
async (_request: SCBulkDoneRequest, app, params) => {
|
||||||
|
|
||||||
if (!params || typeof params.UID !== 'string') {
|
if (typeof params === 'undefined' || typeof params.UID !== 'string') {
|
||||||
throw new Error('UID of Bulk was not given, but route with obligatory parameter was called');
|
throw new Error('UID of Bulk was not given, but route with obligatory parameter was called');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -35,12 +39,13 @@ export const bulkDoneRouter = createRoute<SCBulkDoneResponse>(
|
|||||||
const bulk = await bulkMemory.read(params.UID);
|
const bulk = await bulkMemory.read(params.UID);
|
||||||
|
|
||||||
if (typeof bulk === 'undefined') {
|
if (typeof bulk === 'undefined') {
|
||||||
logger.warn(`Bulk with ${params.UID} not found.`);
|
Logger.warn(`Bulk with ${params.UID} not found.`);
|
||||||
throw new SCNotFoundErrorResponse(isTestEnvironment);
|
throw new SCNotFoundErrorResponse(isTestEnvironment);
|
||||||
}
|
}
|
||||||
|
|
||||||
bulk.state = 'done';
|
bulk.state = 'done';
|
||||||
await bulkMemory.markAsDone(bulk);
|
await bulkMemory.markAsDone(bulk);
|
||||||
|
|
||||||
return {};
|
return {};
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@@ -14,9 +14,12 @@
|
|||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SCBulkRequest, SCBulkResponse, SCBulkRoute} from '@openstapps/core';
|
import {SCBulkRequest, SCBulkResponse, SCBulkRoute} from '@openstapps/core';
|
||||||
import {BulkStorage} from '../storage/BulkStorage';
|
import {BulkStorage} from '../storage/bulk-storage';
|
||||||
import {createRoute} from './Route';
|
import {createRoute} from './route';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains information for using the route for creating bulks
|
||||||
|
*/
|
||||||
const bulkRouteModel = new SCBulkRoute();
|
const bulkRouteModel = new SCBulkRoute();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -26,6 +29,7 @@ export const bulkRouter = createRoute<SCBulkResponse>(
|
|||||||
bulkRouteModel,
|
bulkRouteModel,
|
||||||
async (request: SCBulkRequest, app) => {
|
async (request: SCBulkRequest, app) => {
|
||||||
const bulkMemory: BulkStorage = app.get('bulk');
|
const bulkMemory: BulkStorage = app.get('bulk');
|
||||||
return await bulkMemory.create(request);
|
|
||||||
|
return bulkMemory.create(request);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@@ -13,6 +13,9 @@
|
|||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
/**
|
||||||
|
* Strings that can be used as HTTP verbs (e.g. in requests)
|
||||||
|
*/
|
||||||
export type HTTPVerb = 'all' |
|
export type HTTPVerb = 'all' |
|
||||||
'get' |
|
'get' |
|
||||||
'post' |
|
'post' |
|
||||||
@@ -38,6 +41,11 @@ export type HTTPVerb = 'all' |
|
|||||||
'unlock' |
|
'unlock' |
|
||||||
'unsubscribe';
|
'unsubscribe';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides information if a text (representing a method) is an HTTP verb
|
||||||
|
*
|
||||||
|
* @param method A text (representing a method) to check
|
||||||
|
*/
|
||||||
export function isHttpMethod(method: string): method is HTTPVerb {
|
export function isHttpMethod(method: string): method is HTTPVerb {
|
||||||
return ['get', 'post', 'put'].indexOf(method) > -1;
|
return ['get', 'post', 'put'].indexOf(method) > -1;
|
||||||
}
|
}
|
||||||
@@ -15,8 +15,11 @@
|
|||||||
*/
|
*/
|
||||||
import {SCIndexResponse, SCIndexRoute} from '@openstapps/core';
|
import {SCIndexResponse, SCIndexRoute} from '@openstapps/core';
|
||||||
import {configFile} from '../common';
|
import {configFile} from '../common';
|
||||||
import {createRoute} from './Route';
|
import {createRoute} from './route';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains information for using the index route
|
||||||
|
*/
|
||||||
const indexRouteModel = new SCIndexRoute();
|
const indexRouteModel = new SCIndexRoute();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -21,9 +21,11 @@ import {
|
|||||||
SCTooManyRequestsErrorResponse,
|
SCTooManyRequestsErrorResponse,
|
||||||
} from '@openstapps/core';
|
} from '@openstapps/core';
|
||||||
import {configFile, isTestEnvironment} from '../common';
|
import {configFile, isTestEnvironment} from '../common';
|
||||||
import {BulkStorage} from '../storage/BulkStorage';
|
import {BulkStorage} from '../storage/bulk-storage';
|
||||||
import {createRoute} from './Route';
|
import {createRoute} from './route';
|
||||||
|
/**
|
||||||
|
* Contains information for using the multi search route
|
||||||
|
*/
|
||||||
const multiSearchRouteModel = new SCMultiSearchRoute();
|
const multiSearchRouteModel = new SCMultiSearchRoute();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -41,13 +43,13 @@ export const multiSearchRouter = createRoute<SCMultiSearchResponse | SCTooManyRe
|
|||||||
}
|
}
|
||||||
|
|
||||||
// get a map of promises for each query
|
// get a map of promises for each query
|
||||||
const searchRequests = queryNames.map((queryName) => {
|
const searchRequests = queryNames.map(async (queryName) => {
|
||||||
return bulkMemory.database.search(request[queryName]);
|
return bulkMemory.database.search(request[queryName]);
|
||||||
});
|
});
|
||||||
|
|
||||||
const listOfSearchResponses = await Promise.all(searchRequests);
|
const listOfSearchResponses = await Promise.all(searchRequests);
|
||||||
|
|
||||||
const response: { [queryName: string]: SCSearchResponse } = {};
|
const response: { [queryName: string]: SCSearchResponse; } = {};
|
||||||
queryNames.forEach((queryName, index) => {
|
queryNames.forEach((queryName, index) => {
|
||||||
response[queryName] = listOfSearchResponses[index];
|
response[queryName] = listOfSearchResponses[index];
|
||||||
});
|
});
|
||||||
@@ -19,24 +19,26 @@ import {
|
|||||||
SCRoute,
|
SCRoute,
|
||||||
SCValidationErrorResponse,
|
SCValidationErrorResponse,
|
||||||
} from '@openstapps/core';
|
} from '@openstapps/core';
|
||||||
|
import {Logger} from '@openstapps/logger';
|
||||||
import {Application, Router} from 'express';
|
import {Application, Router} from 'express';
|
||||||
import PromiseRouter from 'express-promise-router';
|
import PromiseRouter from 'express-promise-router';
|
||||||
import {ValidationError} from 'jsonschema';
|
import {ValidationError} from 'jsonschema';
|
||||||
import {isTestEnvironment, logger, validator} from '../common';
|
import {isTestEnvironment, validator} from '../common';
|
||||||
import {isHttpMethod} from './HTTPTypes';
|
import {isHttpMethod} from './http-types';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a router from a route class (model of a route) and a handler function which implements the logic
|
* Creates a router from a route class and a handler function which implements the logic
|
||||||
*
|
*
|
||||||
* The given router performs a request and respone validation, sets status codes and checks if the given handler
|
* The given router performs a request and respone validation, sets status codes and checks if the given handler
|
||||||
* only returns errors that are allowed for the client to see
|
* only returns errors that are allowed for the client to see
|
||||||
*
|
*
|
||||||
* @param routeClass
|
* @param routeClass Model of a route
|
||||||
* @param handler
|
* @param handler Implements the logic of the route
|
||||||
*/
|
*/
|
||||||
export function createRoute<RETURNTYPE>(
|
export function createRoute<RETURNTYPE>(
|
||||||
routeClass: SCRoute,
|
routeClass: SCRoute,
|
||||||
handler: (validatedBody: any, app: Application, params?: { [parameterName: string]: string }) => Promise<RETURNTYPE>,
|
// tslint:disable-next-line: no-any
|
||||||
|
handler: (validatedBody: any, app: Application, params?: { [parameterName: string]: string; }) => Promise<RETURNTYPE>,
|
||||||
): Router {
|
): Router {
|
||||||
// create router
|
// create router
|
||||||
const router = PromiseRouter({mergeParams: true});
|
const router = PromiseRouter({mergeParams: true});
|
||||||
@@ -80,7 +82,8 @@ export function createRoute<RETURNTYPE>(
|
|||||||
);
|
);
|
||||||
res.status(error.statusCode);
|
res.status(error.statusCode);
|
||||||
res.json(error);
|
res.json(error);
|
||||||
logger.warn(error);
|
Logger.warn(error);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,7 +106,8 @@ export function createRoute<RETURNTYPE>(
|
|||||||
);
|
);
|
||||||
res.status(internalServerError.statusCode);
|
res.status(internalServerError.statusCode);
|
||||||
res.json(internalServerError);
|
res.json(internalServerError);
|
||||||
logger.warn(internalServerError);
|
Logger.warn(internalServerError);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -118,7 +122,7 @@ export function createRoute<RETURNTYPE>(
|
|||||||
// respond with the error from the handler
|
// respond with the error from the handler
|
||||||
res.status(error.statusCode);
|
res.status(error.statusCode);
|
||||||
res.json(error);
|
res.json(error);
|
||||||
logger.warn(error);
|
Logger.warn(error);
|
||||||
} else {
|
} else {
|
||||||
// the error is not allowed so something went wrong
|
// the error is not allowed so something went wrong
|
||||||
const internalServerError = new SCInternalServerErrorResponse(
|
const internalServerError = new SCInternalServerErrorResponse(
|
||||||
@@ -127,7 +131,7 @@ export function createRoute<RETURNTYPE>(
|
|||||||
);
|
);
|
||||||
res.status(internalServerError.statusCode);
|
res.status(internalServerError.statusCode);
|
||||||
res.json(internalServerError);
|
res.json(internalServerError);
|
||||||
logger.error(error);
|
await Logger.error(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -140,7 +144,7 @@ export function createRoute<RETURNTYPE>(
|
|||||||
const error = new SCMethodNotAllowedErrorResponse(isTestEnvironment);
|
const error = new SCMethodNotAllowedErrorResponse(isTestEnvironment);
|
||||||
res.status(error.statusCode);
|
res.status(error.statusCode);
|
||||||
res.json(error);
|
res.json(error);
|
||||||
logger.warn(error);
|
Logger.warn(error);
|
||||||
});
|
});
|
||||||
|
|
||||||
// return router
|
// return router
|
||||||
@@ -14,9 +14,12 @@
|
|||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SCSearchRequest, SCSearchResponse, SCSearchRoute} from '@openstapps/core';
|
import {SCSearchRequest, SCSearchResponse, SCSearchRoute} from '@openstapps/core';
|
||||||
import {BulkStorage} from '../storage/BulkStorage';
|
import {BulkStorage} from '../storage/bulk-storage';
|
||||||
import {createRoute} from './Route';
|
import {createRoute} from './route';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains information for using the search route
|
||||||
|
*/
|
||||||
const searchRouteModel = new SCSearchRoute();
|
const searchRouteModel = new SCSearchRoute();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -24,5 +27,6 @@ const searchRouteModel = new SCSearchRoute();
|
|||||||
*/
|
*/
|
||||||
export const searchRouter = createRoute<SCSearchResponse>(searchRouteModel, async ( request: SCSearchRequest, app) => {
|
export const searchRouter = createRoute<SCSearchResponse>(searchRouteModel, async ( request: SCSearchRequest, app) => {
|
||||||
const bulkMemory: BulkStorage = app.get('bulk');
|
const bulkMemory: BulkStorage = app.get('bulk');
|
||||||
return await bulkMemory.database.search(request);
|
|
||||||
|
return bulkMemory.database.search(request);
|
||||||
});
|
});
|
||||||
@@ -14,9 +14,12 @@
|
|||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SCThingUpdateRequest, SCThingUpdateResponse, SCThingUpdateRoute} from '@openstapps/core';
|
import {SCThingUpdateRequest, SCThingUpdateResponse, SCThingUpdateRoute} from '@openstapps/core';
|
||||||
import {BulkStorage} from '../storage/BulkStorage';
|
import {BulkStorage} from '../storage/bulk-storage';
|
||||||
import {createRoute} from './Route';
|
import {createRoute} from './route';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains information for using the route for updating single things
|
||||||
|
*/
|
||||||
const thingUpdateRouteModel = new SCThingUpdateRoute();
|
const thingUpdateRouteModel = new SCThingUpdateRoute();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -27,6 +30,7 @@ export const thingUpdateRouter = createRoute<SCThingUpdateResponse>(
|
|||||||
async (request: SCThingUpdateRequest, app) => {
|
async (request: SCThingUpdateRequest, app) => {
|
||||||
const bulkMemory: BulkStorage = app.get('bulk');
|
const bulkMemory: BulkStorage = app.get('bulk');
|
||||||
await bulkMemory.database.put(request);
|
await bulkMemory.database.put(request);
|
||||||
|
|
||||||
return {};
|
return {};
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@@ -14,13 +14,16 @@
|
|||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SCBulkRequest, SCThingType} from '@openstapps/core';
|
import {SCBulkRequest, SCThingType} from '@openstapps/core';
|
||||||
|
import {Logger} from '@openstapps/logger';
|
||||||
import * as moment from 'moment';
|
import * as moment from 'moment';
|
||||||
import * as NodeCache from 'node-cache';
|
import * as NodeCache from 'node-cache';
|
||||||
import {promisify} from 'util';
|
import {promisify} from 'util';
|
||||||
import {v4} from 'uuid';
|
import {v4} from 'uuid';
|
||||||
import {logger} from '../common';
|
import {Database} from './database';
|
||||||
import {Database} from './Database';
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Possible operations with a bulk
|
||||||
|
*/
|
||||||
export type BulkOperation = 'create' | 'expired' | 'update';
|
export type BulkOperation = 'create' | 'expired' | 'update';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -68,7 +71,7 @@ export class Bulk implements SCBulkRequest {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new bulk process
|
* Creates a new bulk process
|
||||||
* @param request
|
* @param request Data needed for requesting a bulk
|
||||||
*/
|
*/
|
||||||
constructor(request: SCBulkRequest) {
|
constructor(request: SCBulkRequest) {
|
||||||
this.uid = v4();
|
this.uid = v4();
|
||||||
@@ -77,7 +80,9 @@ export class Bulk implements SCBulkRequest {
|
|||||||
if (typeof request.expiration === 'string') {
|
if (typeof request.expiration === 'string') {
|
||||||
this.expiration = request.expiration;
|
this.expiration = request.expiration;
|
||||||
} else {
|
} else {
|
||||||
this.expiration = moment().add(1, 'hour').toISOString();
|
this.expiration = moment()
|
||||||
|
.add(1, 'hour')
|
||||||
|
.toISOString();
|
||||||
}
|
}
|
||||||
// when should this process be finished
|
// when should this process be finished
|
||||||
// where does the process come from
|
// where does the process come from
|
||||||
@@ -91,8 +96,10 @@ export class Bulk implements SCBulkRequest {
|
|||||||
* Cache for bulk-processes
|
* Cache for bulk-processes
|
||||||
*/
|
*/
|
||||||
export class BulkStorage {
|
export class BulkStorage {
|
||||||
|
/**
|
||||||
private cache: NodeCache;
|
* Cache for temporary storage
|
||||||
|
*/
|
||||||
|
private readonly cache: NodeCache;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new BulkStorage
|
* Creates a new BulkStorage
|
||||||
@@ -104,11 +111,11 @@ export class BulkStorage {
|
|||||||
// the cache is checked every 60 seconds
|
// the cache is checked every 60 seconds
|
||||||
this.cache = new NodeCache({stdTTL: 3600, checkperiod: 60});
|
this.cache = new NodeCache({stdTTL: 3600, checkperiod: 60});
|
||||||
|
|
||||||
this.cache.on('expired', (_key, bulk: Bulk) => {
|
this.cache.on('expired', async (_key, bulk: Bulk) => {
|
||||||
// if the bulk is not done
|
// if the bulk is not done
|
||||||
if (bulk.state !== 'done') {
|
if (bulk.state !== 'done') {
|
||||||
// the database can delete the data associated with this bulk
|
// the database can delete the data associated with this bulk
|
||||||
this.database.bulkExpired(bulk);
|
await this.database.bulkExpired(bulk);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -119,11 +126,14 @@ export class BulkStorage {
|
|||||||
* @returns the bulk process that was saved
|
* @returns the bulk process that was saved
|
||||||
*/
|
*/
|
||||||
private async save(bulk: Bulk): Promise<Bulk> {
|
private async save(bulk: Bulk): Promise<Bulk> {
|
||||||
const expirationInSeconds = moment(bulk.expiration).diff(moment.now()) / 1000;
|
const expirationInSeconds = moment(bulk.expiration)
|
||||||
logger.info('Bulk expires in ', expirationInSeconds, 'seconds');
|
// tslint:disable-next-line: no-magic-numbers
|
||||||
|
.diff(moment.now()) / 1000;
|
||||||
|
Logger.info('Bulk expires in ', expirationInSeconds, 'seconds');
|
||||||
|
|
||||||
// save the item in the cache with it's expected expiration
|
// save the item in the cache with it's expected expiration
|
||||||
await promisify<string, Bulk, number>(this.cache.set)(bulk.uid, bulk, expirationInSeconds);
|
await promisify<string, Bulk, number>(this.cache.set)(bulk.uid, bulk, expirationInSeconds);
|
||||||
|
|
||||||
return bulk;
|
return bulk;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,6 +151,7 @@ export class BulkStorage {
|
|||||||
|
|
||||||
// tell the database that the bulk was created
|
// tell the database that the bulk was created
|
||||||
await this.database.bulkCreated(bulk);
|
await this.database.bulkCreated(bulk);
|
||||||
|
|
||||||
return bulk;
|
return bulk;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -175,7 +186,8 @@ export class BulkStorage {
|
|||||||
await this.save(bulk);
|
await this.save(bulk);
|
||||||
|
|
||||||
// tell the database that this is the new bulk
|
// tell the database that this is the new bulk
|
||||||
this.database.bulkUpdated(bulk);
|
await this.database.bulkUpdated(bulk);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -185,7 +197,7 @@ export class BulkStorage {
|
|||||||
* @returns a promise that contains a bulk
|
* @returns a promise that contains a bulk
|
||||||
*/
|
*/
|
||||||
public async read(uid: string): Promise<Bulk | undefined> {
|
public async read(uid: string): Promise<Bulk | undefined> {
|
||||||
return await promisify<string, any>(this.cache.get)(uid);
|
return promisify<string, Bulk | undefined>(this.cache.get)(uid);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -13,18 +13,25 @@
|
|||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
import {SCSearchQuery, SCSearchResponse, SCThings, SCUuid} from '@openstapps/core';
|
import {SCConfigFile, SCSearchQuery, SCSearchResponse, SCThings, SCUuid} from '@openstapps/core';
|
||||||
import {Bulk} from './BulkStorage';
|
import {MailQueue} from '../notification/mail-queue';
|
||||||
|
import {Bulk} from './bulk-storage';
|
||||||
|
|
||||||
export type DatabaseConstructor = new (...args: any) => Database;
|
/**
|
||||||
|
* Creates an instance of a database
|
||||||
|
*/
|
||||||
|
export type DatabaseConstructor = new (config: SCConfigFile, mailQueue?: MailQueue) => Database;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines what one database class needs to have defined
|
||||||
|
*/
|
||||||
export interface Database {
|
export interface Database {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets called if a bulk was created
|
* Gets called if a bulk was created
|
||||||
*
|
*
|
||||||
* The database should
|
* The database should
|
||||||
* @param bulk
|
* @param bulk A bulk to be created
|
||||||
*/
|
*/
|
||||||
bulkCreated(bulk: Bulk): Promise<void>;
|
bulkCreated(bulk: Bulk): Promise<void>;
|
||||||
|
|
||||||
@@ -32,7 +39,7 @@ export interface Database {
|
|||||||
* Gets called if a bulk expires
|
* Gets called if a bulk expires
|
||||||
*
|
*
|
||||||
* The database should delete all data that is associtated with this bulk
|
* The database should delete all data that is associtated with this bulk
|
||||||
* @param bulk
|
* @param bulk A bulk which data needs to be removed
|
||||||
*/
|
*/
|
||||||
bulkExpired(bulk: Bulk): Promise<void>;
|
bulkExpired(bulk: Bulk): Promise<void>;
|
||||||
|
|
||||||
@@ -42,20 +49,25 @@ export interface Database {
|
|||||||
* If the database holds a bulk with the same type and source as the given
|
* If the database holds a bulk with the same type and source as the given
|
||||||
* bulk it should be replaced by the given one
|
* bulk it should be replaced by the given one
|
||||||
*
|
*
|
||||||
* @param bulk
|
* @param bulk A new bulk whose data should be saved instead of the data of the old bulk
|
||||||
*/
|
*/
|
||||||
bulkUpdated(bulk: Bulk): Promise<void>;
|
bulkUpdated(bulk: Bulk): Promise<void>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a single document
|
* Get a single document
|
||||||
* @param uid
|
* @param uid Unique identifier of the document
|
||||||
*/
|
*/
|
||||||
get(uid: SCUuid): Promise<SCThings>;
|
get(uid: SCUuid): Promise<SCThings>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the database (call and wait for all needed methods)
|
||||||
|
*/
|
||||||
|
init(): Promise<void>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a thing to an existing bulk
|
* Add a thing to an existing bulk
|
||||||
* @param object
|
* @param thing A StAppsCore thing to be added
|
||||||
* @param bulkId
|
* @param bulk A bulk to which the thing should be added
|
||||||
*/
|
*/
|
||||||
post(thing: SCThings, bulk: Bulk): Promise<void>;
|
post(thing: SCThings, bulk: Bulk): Promise<void>;
|
||||||
|
|
||||||
@@ -64,13 +76,13 @@ export interface Database {
|
|||||||
*
|
*
|
||||||
* Currently it is not possible to put an non-existing object
|
* Currently it is not possible to put an non-existing object
|
||||||
*
|
*
|
||||||
* @param thing
|
* @param thing A StAppsCore thing to be added to a bulk
|
||||||
*/
|
*/
|
||||||
put(thing: SCThings): Promise<void>;
|
put(thing: SCThings): Promise<void>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Search for things
|
* Search for things
|
||||||
* @param params
|
* @param params Parameters which form a search query to search the backend data
|
||||||
*/
|
*/
|
||||||
search(params: SCSearchQuery): Promise<SCSearchResponse>;
|
search(params: SCSearchQuery): Promise<SCSearchResponse>;
|
||||||
}
|
}
|
||||||
@@ -16,6 +16,9 @@
|
|||||||
import {SCBackendAggregationConfiguration, SCFacet, SCThingType} from '@openstapps/core';
|
import {SCBackendAggregationConfiguration, SCFacet, SCThingType} from '@openstapps/core';
|
||||||
import {AggregationSchema} from './common';
|
import {AggregationSchema} from './common';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provide information on which type (or on all) an aggregation happens
|
||||||
|
*/
|
||||||
export type aggregationType = SCThingType | '@all';
|
export type aggregationType = SCThingType | '@all';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -30,7 +33,7 @@ export function buildAggregations(aggsConfig: SCBackendAggregationConfiguration[
|
|||||||
|
|
||||||
result[aggregation.fieldName] = {
|
result[aggregation.fieldName] = {
|
||||||
terms: {
|
terms: {
|
||||||
field: aggregation.fieldName + '.raw',
|
field: `${aggregation.fieldName}.raw`,
|
||||||
size: 1000,
|
size: 1000,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -43,7 +46,14 @@ export function buildAggregations(aggsConfig: SCBackendAggregationConfiguration[
|
|||||||
* An elasticsearch aggregation bucket
|
* An elasticsearch aggregation bucket
|
||||||
*/
|
*/
|
||||||
interface Bucket {
|
interface Bucket {
|
||||||
|
/**
|
||||||
|
* Number of documents in the agregation bucket
|
||||||
|
*/
|
||||||
doc_count: number;
|
doc_count: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Text representing the documents in the bucket
|
||||||
|
*/
|
||||||
key: string;
|
key: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -52,7 +62,14 @@ interface Bucket {
|
|||||||
*/
|
*/
|
||||||
interface AggregationResponse {
|
interface AggregationResponse {
|
||||||
[field: string]: {
|
[field: string]: {
|
||||||
|
/**
|
||||||
|
* Buckets in an aggregation
|
||||||
|
*/
|
||||||
buckets: Bucket[];
|
buckets: Bucket[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of documents in an aggregation
|
||||||
|
*/
|
||||||
doc_count?: number;
|
doc_count?: number;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -79,10 +96,11 @@ export function parseAggregations(
|
|||||||
key: bucket.key,
|
key: bucket.key,
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
field: aggregationSchema[aggregationName].terms.field + '.raw',
|
field: `${aggregationSchema[aggregationName].terms.field}.raw`,
|
||||||
};
|
};
|
||||||
|
|
||||||
facets.push(facet);
|
facets.push(facet);
|
||||||
});
|
});
|
||||||
|
|
||||||
return facets;
|
return facets;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,7 +15,9 @@
|
|||||||
*/
|
*/
|
||||||
import {SCThingType} from '@openstapps/core';
|
import {SCThingType} from '@openstapps/core';
|
||||||
import {SCThing} from '@openstapps/core';
|
import {SCThing} from '@openstapps/core';
|
||||||
|
import {NameList} from 'elasticsearch';
|
||||||
|
|
||||||
|
/* tslint:disable:completed-docs */ // TODO: document properties of interfaces
|
||||||
/**
|
/**
|
||||||
* An elasticsearch bucket aggregation
|
* An elasticsearch bucket aggregation
|
||||||
* @see https://www.elastic.co/guide/en/elasticsearch/reference/5.5/search-aggregations-bucket.html
|
* @see https://www.elastic.co/guide/en/elasticsearch/reference/5.5/search-aggregations-bucket.html
|
||||||
@@ -60,9 +62,11 @@ export interface ElasticsearchObject<T extends SCThing> {
|
|||||||
_source: T;
|
_source: T;
|
||||||
_type: string;
|
_type: string;
|
||||||
_version?: number;
|
_version?: number;
|
||||||
fields?: any;
|
fields?: NameList;
|
||||||
|
// tslint:disable: no-any
|
||||||
highlight?: any;
|
highlight?: any;
|
||||||
inner_hits?: any;
|
inner_hits?: any;
|
||||||
|
// tslint:enable: no-any
|
||||||
matched_queries?: string[];
|
matched_queries?: string[];
|
||||||
sort?: string[];
|
sort?: string[];
|
||||||
}
|
}
|
||||||
@@ -156,7 +160,7 @@ export interface ESBooleanFilter<T> {
|
|||||||
export interface ESFunctionScoreQuery {
|
export interface ESFunctionScoreQuery {
|
||||||
function_score: {
|
function_score: {
|
||||||
functions: ESFunctionScoreQueryFunction[];
|
functions: ESFunctionScoreQueryFunction[];
|
||||||
query: ESBooleanFilter<any>;
|
query: ESBooleanFilter<unknown>;
|
||||||
score_mode: 'multiply';
|
score_mode: 'multiply';
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,19 +23,21 @@ import {
|
|||||||
SCThingType,
|
SCThingType,
|
||||||
SCUuid,
|
SCUuid,
|
||||||
} from '@openstapps/core';
|
} from '@openstapps/core';
|
||||||
|
import {Logger} from '@openstapps/logger';
|
||||||
import * as ES from 'elasticsearch';
|
import * as ES from 'elasticsearch';
|
||||||
import * as moment from 'moment';
|
import * as moment from 'moment';
|
||||||
import {logger} from '../../common';
|
import {MailQueue} from '../../notification/mail-queue';
|
||||||
import {MailQueue} from '../../notification/MailQueue';
|
import {Bulk} from '../bulk-storage';
|
||||||
import {Bulk} from '../BulkStorage';
|
import {Database} from '../database';
|
||||||
import {Database} from '../Database';
|
|
||||||
import {buildAggregations, parseAggregations} from './aggregations';
|
import {buildAggregations, parseAggregations} from './aggregations';
|
||||||
import {AggregationSchema, ElasticsearchConfig, ElasticsearchObject} from './common';
|
import {AggregationSchema, ElasticsearchConfig, ElasticsearchObject} from './common';
|
||||||
import * as Monitoring from './monitoring';
|
import * as Monitoring from './monitoring';
|
||||||
import {buildQuery, buildSort} from './query';
|
import {buildQuery, buildSort} from './query';
|
||||||
import {putTemplate} from './templating';
|
import {putTemplate} from './templating';
|
||||||
|
|
||||||
// this will match index names such as stapps_<type>_<source>_<random suffix>
|
/**
|
||||||
|
* Matches index names such as stapps_<type>_<source>_<random suffix>
|
||||||
|
*/
|
||||||
const indexRegex = /^stapps_([A-z0-9_]+)_([a-z0-9-_]+)_([-a-z0-9^_]+)$/;
|
const indexRegex = /^stapps_([A-z0-9_]+)_([a-z0-9-_]+)_([-a-z0-9^_]+)$/;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -43,6 +45,14 @@ const indexRegex = /^stapps_([A-z0-9_]+)_([a-z0-9-_]+)_([-a-z0-9^_]+)$/;
|
|||||||
*/
|
*/
|
||||||
export class Elasticsearch implements Database {
|
export class Elasticsearch implements Database {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Length of the index UID used for generation of its name
|
||||||
|
*/
|
||||||
|
static readonly INDEX_UID_LENGTH = 8;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Holds aggregations
|
||||||
|
*/
|
||||||
aggregationsSchema: AggregationSchema;
|
aggregationsSchema: AggregationSchema;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -53,25 +63,117 @@ export class Elasticsearch implements Database {
|
|||||||
[scType: string]: {
|
[scType: string]: {
|
||||||
// each source is assigned a index name in elasticsearch
|
// each source is assigned a index name in elasticsearch
|
||||||
[source: string]: string;
|
[source: string]: string;
|
||||||
},
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Elasticsearch client
|
||||||
|
*/
|
||||||
client: ES.Client;
|
client: ES.Client;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Queue of mails to be sent
|
||||||
|
*/
|
||||||
|
mailQueue: MailQueue | undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stores information if elasticsearch is ready (connection to it has been established)
|
||||||
|
*/
|
||||||
ready: boolean;
|
ready: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the url of elasticsearch
|
||||||
|
*/
|
||||||
|
static getElasticsearchUrl(): string {
|
||||||
|
// check if we have a docker link
|
||||||
|
if (process.env.ES_PORT_9200_TCP_ADDR !== undefined && process.env.ES_PORT_9200_TCP_PORT !== undefined) {
|
||||||
|
return `${process.env.ES_PORT_9200_TCP_ADDR}:${process.env.ES_PORT_9200_TCP_PORT}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// default
|
||||||
|
return 'localhost:9200';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the index name in elasticsearch for one SCThingType
|
||||||
|
* @param type SCThingType of data in the index
|
||||||
|
* @param source source of data in the index
|
||||||
|
* @param bulk bulk process which created this index
|
||||||
|
*/
|
||||||
|
static getIndex(type: SCThingType, source: string, bulk: SCBulkResponse) {
|
||||||
|
return `stapps_${type.toLowerCase()
|
||||||
|
.replace(' ', '_')}_${source}_${Elasticsearch.getIndexUID(bulk.uid)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides the index UID (for its name) from the bulk UID
|
||||||
|
* @param uid Bulk UID
|
||||||
|
*/
|
||||||
|
static getIndexUID(uid: SCUuid) {
|
||||||
|
return uid.substring(0, Elasticsearch.INDEX_UID_LENGTH);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a string which matches all indices
|
||||||
|
*/
|
||||||
|
static getListOfAllIndices(): string {
|
||||||
|
// map each SC type in upper camel case
|
||||||
|
return 'stapps_*_*_*';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks for invalid character in alias names and removes them
|
||||||
|
* @param alias The alias name
|
||||||
|
* @param uid The UID of the current bulk (for debugging purposes)
|
||||||
|
*/
|
||||||
|
static removeAliasChars(alias: string, uid: string | undefined): string {
|
||||||
|
// spaces are included in some types, so throwing an error in this case would clutter up the log unnecessarily
|
||||||
|
let formattedAlias = alias.replace(' ', '');
|
||||||
|
// List of invalid characters: https://www.elastic.co/guide/en/elasticsearch/reference/6.6/indices-create-index.html
|
||||||
|
['\\', '/', '*', '?', '"', '<', '>', '|', ',', '#'].forEach((value) => {
|
||||||
|
if (formattedAlias.includes(value)) {
|
||||||
|
formattedAlias = formattedAlias.replace(value, '');
|
||||||
|
Logger.warn(`Type of the bulk ${uid} contains an invalid character '${value}'. This can lead to two bulks `
|
||||||
|
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
|
||||||
|
`New alias name is "${formattedAlias}."`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
['-', '_', '+'].forEach((value) => {
|
||||||
|
if (formattedAlias.charAt(0) === value) {
|
||||||
|
formattedAlias = formattedAlias.substring(1);
|
||||||
|
Logger.warn(`Type of the bulk ${uid} begins with '${value}'. This can lead to two bulks `
|
||||||
|
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
|
||||||
|
`New alias name is "${formattedAlias}."`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (formattedAlias === '.' || formattedAlias === '..') {
|
||||||
|
Logger.warn(`Type of the bulk ${uid} is ${formattedAlias}. This is an invalid name, please consider using ` +
|
||||||
|
`another one, as it will be replaced with 'alias_placeholder', which can lead to strange errors.`);
|
||||||
|
|
||||||
|
return 'alias_placeholder';
|
||||||
|
}
|
||||||
|
if (formattedAlias.includes(':')) {
|
||||||
|
Logger.warn(`Type of the bulk ${uid} contains a ':'. This isn't an issue now, but will be in future ` +
|
||||||
|
`Elasticsearch versions!`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return formattedAlias;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new interface for elasticsearch
|
* Create a new interface for elasticsearch
|
||||||
* @param config an assembled config file
|
* @param config an assembled config file
|
||||||
* @param mailQueue a mailqueue for monitoring
|
* @param mailQueue a mailqueue for monitoring
|
||||||
*/
|
*/
|
||||||
constructor(private config: SCConfigFile, mailQueue?: MailQueue) {
|
constructor(private readonly config: SCConfigFile, mailQueue?: MailQueue) {
|
||||||
|
|
||||||
if (!config.internal.database || typeof config.internal.database.version === 'undefined') {
|
if (typeof config.internal.database === 'undefined' || typeof config.internal.database.version === 'undefined') {
|
||||||
throw new Error('Database version is undefined. Check you config file');
|
throw new Error('Database version is undefined. Check you config file');
|
||||||
}
|
}
|
||||||
|
|
||||||
const options = {
|
const options = {
|
||||||
apiVersion: config.internal.database.version,
|
apiVersion: config.internal.database.version,
|
||||||
host: this.getElasticsearchUrl(),
|
host: Elasticsearch.getElasticsearchUrl(),
|
||||||
log: 'error',
|
log: 'error',
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -86,18 +188,7 @@ export class Elasticsearch implements Database {
|
|||||||
|
|
||||||
this.aggregationsSchema = buildAggregations(this.config.internal.aggregations);
|
this.aggregationsSchema = buildAggregations(this.config.internal.aggregations);
|
||||||
|
|
||||||
this.getAliasMap();
|
this.mailQueue = mailQueue;
|
||||||
|
|
||||||
const monitoringConfiguration = this.config.internal.monitoring;
|
|
||||||
|
|
||||||
if (typeof monitoringConfiguration !== 'undefined') {
|
|
||||||
if (typeof mailQueue === 'undefined') {
|
|
||||||
throw new Error('Monitoring is defined, but MailQueue is undefined. A MailQueue is obligatory for monitoring.');
|
|
||||||
}
|
|
||||||
// read all watches and schedule searches on the client
|
|
||||||
Monitoring.setUp(monitoringConfiguration, this.client, mailQueue);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -105,7 +196,8 @@ export class Elasticsearch implements Database {
|
|||||||
*
|
*
|
||||||
* Returns Elasticsearch Object if it exists
|
* Returns Elasticsearch Object if it exists
|
||||||
*/
|
*/
|
||||||
private async doesItemExist(object: SCThings): Promise<{exists: boolean; object?: ElasticsearchObject<SCThings>}> {
|
// tslint:disable-next-line: completed-docs
|
||||||
|
private async doesItemExist(object: SCThings): Promise<{exists: boolean; object?: ElasticsearchObject<SCThings>; }> {
|
||||||
const searchResponse = await this.client.search<SCThings>({
|
const searchResponse = await this.client.search<SCThings>({
|
||||||
body: {
|
body: {
|
||||||
query: {
|
query: {
|
||||||
@@ -117,7 +209,7 @@ export class Elasticsearch implements Database {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
from: 0,
|
from: 0,
|
||||||
index: this.getListOfAllIndices(),
|
index: Elasticsearch.getListOfAllIndices(),
|
||||||
size: 1,
|
size: 1,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -137,25 +229,30 @@ export class Elasticsearch implements Database {
|
|||||||
* Gets a map which contains each alias and all indices that are associated with each alias
|
* Gets a map which contains each alias and all indices that are associated with each alias
|
||||||
*/
|
*/
|
||||||
private async getAliasMap() {
|
private async getAliasMap() {
|
||||||
|
// delay after which alias map will be fetched again
|
||||||
|
const RETRY_INTERVAL = 5000;
|
||||||
// create a list of old indices that are not in use
|
// create a list of old indices that are not in use
|
||||||
const oldIndicesToDelete: string[] = [];
|
const oldIndicesToDelete: string[] = [];
|
||||||
|
|
||||||
let aliases: {
|
let aliases: {
|
||||||
[index: string]: {
|
[index: string]: {
|
||||||
|
/**
|
||||||
|
* Aliases of an index
|
||||||
|
*/
|
||||||
aliases: {
|
aliases: {
|
||||||
[K in SCThingType]: any
|
[K in SCThingType]: unknown
|
||||||
},
|
};
|
||||||
},
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
aliases = await this.client.indices.getAlias({});
|
aliases = await this.client.indices.getAlias({});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed getting alias map:', error);
|
await Logger.error('Failed getting alias map:', error);
|
||||||
setTimeout(() => {
|
setTimeout(async () => {
|
||||||
this.getAliasMap();
|
return this.getAliasMap();
|
||||||
}, 5000); // retry in 5 seconds
|
}, RETRY_INTERVAL); // retry after a delay
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -165,6 +262,7 @@ export class Elasticsearch implements Database {
|
|||||||
const matches = indexRegex.exec(index);
|
const matches = indexRegex.exec(index);
|
||||||
if (matches !== null) {
|
if (matches !== null) {
|
||||||
const type = matches[1];
|
const type = matches[1];
|
||||||
|
// tslint:disable-next-line: no-magic-numbers
|
||||||
const source = matches[2];
|
const source = matches[2];
|
||||||
|
|
||||||
// check if there is an alias for the current index
|
// check if there is an alias for the current index
|
||||||
@@ -189,78 +287,11 @@ export class Elasticsearch implements Database {
|
|||||||
await this.client.indices.delete({
|
await this.client.indices.delete({
|
||||||
index: oldIndicesToDelete,
|
index: oldIndicesToDelete,
|
||||||
});
|
});
|
||||||
logger.warn('Deleted old indices: ' + oldIndicesToDelete);
|
Logger.warn(`Deleted old indices: oldIndicesToDelete`);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.ok('Read alias map from elasticsearch: ' + JSON.stringify(this.aliasMap, null, 2));
|
// tslint:disable-next-line: no-magic-numbers
|
||||||
}
|
Logger.ok(`Read alias map from elasticsearch: ${JSON.stringify(this.aliasMap, null, 2)}`);
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the url of elasticsearch
|
|
||||||
*/
|
|
||||||
private getElasticsearchUrl(): string {
|
|
||||||
// check if we have a docker link
|
|
||||||
if (process.env.ES_PORT_9200_TCP_ADDR !== undefined && process.env.ES_PORT_9200_TCP_PORT !== undefined) {
|
|
||||||
return process.env.ES_PORT_9200_TCP_ADDR + ':' + process.env.ES_PORT_9200_TCP_PORT;
|
|
||||||
}
|
|
||||||
|
|
||||||
// default
|
|
||||||
return 'localhost:9200';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the index name in elasticsearch for one SCThingType
|
|
||||||
* @param type SCThingType of data in the index
|
|
||||||
* @param source source of data in the index
|
|
||||||
* @param bulk bulk process which created this index
|
|
||||||
*/
|
|
||||||
private getIndex(type: SCThingType, source: string, bulk: SCBulkResponse) {
|
|
||||||
return `stapps_${type.toLowerCase().replace(' ', '_')}_${source}_${bulk.uid.substring(0, 8)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a string which matches all indices
|
|
||||||
*/
|
|
||||||
private getListOfAllIndices(): string {
|
|
||||||
// map each SC type in upper camel case
|
|
||||||
return 'stapps_*_*_*';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks for invalid character in alias names and removes them
|
|
||||||
* @param alias The alias name
|
|
||||||
* @param uid The UID of the current bulk (for debugging purposes)
|
|
||||||
*/
|
|
||||||
private removeAliasChars(alias: string, uid: string | undefined): string {
|
|
||||||
// spaces are included in some types, so throwing an error in this case would clutter up the log unnecessarily
|
|
||||||
alias = alias.replace(' ', '');
|
|
||||||
// List of invalid characters: https://www.elastic.co/guide/en/elasticsearch/reference/6.6/indices-create-index.html
|
|
||||||
['\\', '/', '*', '?', '"', '<', '>', '|', ',', '#'].forEach((value) => {
|
|
||||||
if (alias.includes(value)) {
|
|
||||||
alias = alias.replace(value, '');
|
|
||||||
logger.warn(`Type of the bulk ${uid} contains an invalid character '${value}'. This can lead to two bulks `
|
|
||||||
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
|
|
||||||
`New alias name is "${alias}."`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
['-', '_', '+'].forEach((value) => {
|
|
||||||
if (alias.charAt(0) === value) {
|
|
||||||
alias = alias.substring(1);
|
|
||||||
logger.warn(`Type of the bulk ${uid} begins with '${value}'. This can lead to two bulks `
|
|
||||||
+ `having the same alias despite having different types, as invalid characters are removed automatically. ` +
|
|
||||||
`New alias name is "${alias}."`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (alias === '.' || alias === '..') {
|
|
||||||
logger.warn(`Type of the bulk ${uid} is ${alias}. This is an invalid name, please consider using another ` +
|
|
||||||
`one, as it will be replaced with 'alias_placeholder', which can lead to strange errors.`);
|
|
||||||
return 'alias_placeholder';
|
|
||||||
}
|
|
||||||
if (alias.includes(':')) {
|
|
||||||
logger.warn(`Type of the bulk ${uid} contains a ':'. This isn't an issue now, but will be in future ` +
|
|
||||||
`Elasticsearch versions!`);
|
|
||||||
}
|
|
||||||
return alias;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -274,11 +305,11 @@ export class Elasticsearch implements Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// index name for elasticsearch
|
// index name for elasticsearch
|
||||||
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
|
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
|
||||||
|
|
||||||
// there already is an index with this type and source. We will index the new one and switch the alias to it
|
// there already is an index with this type and source. We will index the new one and switch the alias to it
|
||||||
// the old one is deleted
|
// the old one is deleted
|
||||||
const alias = this.removeAliasChars(bulk.type, bulk.uid);
|
const alias = Elasticsearch.removeAliasChars(bulk.type, bulk.uid);
|
||||||
|
|
||||||
if (typeof this.aliasMap[alias] === 'undefined') {
|
if (typeof this.aliasMap[alias] === 'undefined') {
|
||||||
this.aliasMap[alias] = {};
|
this.aliasMap[alias] = {};
|
||||||
@@ -286,8 +317,8 @@ export class Elasticsearch implements Database {
|
|||||||
|
|
||||||
if (!indexRegex.test(index)) {
|
if (!indexRegex.test(index)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.\n' +
|
`Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.
|
||||||
'Make sure to set the bulk "source" and "type" to names consisting of the characters above.',
|
Make sure to set the bulk "source" and "type" to names consisting of the characters above.`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -297,7 +328,7 @@ export class Elasticsearch implements Database {
|
|||||||
index,
|
index,
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info('Created index', index);
|
Logger.info('Created index', index);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -306,14 +337,15 @@ export class Elasticsearch implements Database {
|
|||||||
*/
|
*/
|
||||||
public async bulkExpired(bulk: Bulk): Promise<void> {
|
public async bulkExpired(bulk: Bulk): Promise<void> {
|
||||||
// index name for elasticsearch
|
// index name for elasticsearch
|
||||||
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
|
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
|
||||||
|
|
||||||
logger.info('Bulk expired. Deleting index', index);
|
Logger.info('Bulk expired. Deleting index', index);
|
||||||
|
|
||||||
// don't delete indices that are in use already
|
// don't delete indices that are in use already
|
||||||
if (bulk.state !== 'done') {
|
if (bulk.state !== 'done') {
|
||||||
logger.info('deleting obsolete index', index);
|
Logger.info('deleting obsolete index', index);
|
||||||
return await this.client.indices.delete({index});
|
|
||||||
|
return this.client.indices.delete({index});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -329,10 +361,10 @@ export class Elasticsearch implements Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// index name for elasticsearch
|
// index name for elasticsearch
|
||||||
const index: string = this.getIndex(bulk.type, bulk.source, bulk);
|
const index: string = Elasticsearch.getIndex(bulk.type, bulk.source, bulk);
|
||||||
|
|
||||||
// alias for the indices
|
// alias for the indices
|
||||||
const alias = this.removeAliasChars(bulk.type, bulk.uid);
|
const alias = Elasticsearch.removeAliasChars(bulk.type, bulk.uid);
|
||||||
|
|
||||||
if (typeof this.aliasMap[alias] === 'undefined') {
|
if (typeof this.aliasMap[alias] === 'undefined') {
|
||||||
this.aliasMap[alias] = {};
|
this.aliasMap[alias] = {};
|
||||||
@@ -340,8 +372,8 @@ export class Elasticsearch implements Database {
|
|||||||
|
|
||||||
if (!indexRegex.test(index)) {
|
if (!indexRegex.test(index)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.\n' +
|
`Index names can only consist of lowercase letters from a-z, "-", "_" and integer numbers.
|
||||||
'Make sure to set the bulk "source" and "type" to names consisting of the characters above.',
|
Make sure to set the bulk "source" and "type" to names consisting of the characters above.`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -388,9 +420,9 @@ export class Elasticsearch implements Database {
|
|||||||
if (typeof oldIndex === 'string') {
|
if (typeof oldIndex === 'string') {
|
||||||
// delete the old index
|
// delete the old index
|
||||||
await this.client.indices.delete({index: oldIndex});
|
await this.client.indices.delete({index: oldIndex});
|
||||||
logger.info('deleted old index', oldIndex);
|
Logger.info('deleted old index', oldIndex);
|
||||||
}
|
}
|
||||||
logger.info('swapped alias index alias', oldIndex, '=>', index);
|
Logger.info('swapped alias index alias', oldIndex, '=>', index);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -406,7 +438,7 @@ export class Elasticsearch implements Database {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
index: this.getListOfAllIndices(),
|
index: Elasticsearch.getListOfAllIndices(),
|
||||||
});
|
});
|
||||||
|
|
||||||
// get data from response
|
// get data from response
|
||||||
@@ -414,9 +446,26 @@ export class Elasticsearch implements Database {
|
|||||||
|
|
||||||
if (hits.length !== 1) {
|
if (hits.length !== 1) {
|
||||||
throw new Error('No unique item found.');
|
throw new Error('No unique item found.');
|
||||||
} else {
|
|
||||||
return hits[0]._source as SCThings;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return hits[0]._source as SCThings;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the elasticsearch database (call all needed methods)
|
||||||
|
*/
|
||||||
|
public async init(): Promise<void> {
|
||||||
|
const monitoringConfiguration = this.config.internal.monitoring;
|
||||||
|
|
||||||
|
if (typeof monitoringConfiguration !== 'undefined') {
|
||||||
|
if (typeof this.mailQueue === 'undefined') {
|
||||||
|
throw new Error('Monitoring is defined, but MailQueue is undefined. A MailQueue is obligatory for monitoring.');
|
||||||
|
}
|
||||||
|
// read all watches and schedule searches on the client
|
||||||
|
Monitoring.setUp(monitoringConfiguration, this.client, this.mailQueue);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.getAliasMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -426,24 +475,27 @@ export class Elasticsearch implements Database {
|
|||||||
*/
|
*/
|
||||||
public async post(object: SCThings, bulk: Bulk): Promise<void> {
|
public async post(object: SCThings, bulk: Bulk): Promise<void> {
|
||||||
|
|
||||||
const obj: SCThings & {creation_date: string} = {
|
// tslint:disable-next-line: completed-docs
|
||||||
|
const obj: SCThings & {creation_date: string; } = {
|
||||||
...object,
|
...object,
|
||||||
creation_date: moment().format(),
|
creation_date: moment()
|
||||||
|
.format(),
|
||||||
};
|
};
|
||||||
|
|
||||||
const itemMeta = await this.doesItemExist(obj);
|
const itemMeta = await this.doesItemExist(obj);
|
||||||
|
|
||||||
// we have to check that the item will get replaced if the index is rolled over
|
// we have to check that the item will get replaced if the index is rolled over
|
||||||
if (itemMeta.exists && typeof itemMeta.object !== 'undefined') {
|
if (itemMeta.exists && typeof itemMeta.object !== 'undefined') {
|
||||||
const indexOfNew = this.getIndex(obj.type, bulk.source, bulk);
|
const indexOfNew = Elasticsearch.getIndex(obj.type, bulk.source, bulk);
|
||||||
const oldIndex = itemMeta.object._index;
|
const oldIndex = itemMeta.object._index;
|
||||||
|
|
||||||
// new item doesn't replace the old one
|
// new item doesn't replace the old one
|
||||||
if (oldIndex.substring(0, oldIndex.length - 9) !== indexOfNew.substring(0, indexOfNew.length - 9)) {
|
if (oldIndex.substring(0, oldIndex.length - Elasticsearch.INDEX_UID_LENGTH + 1)
|
||||||
throw new Error(
|
!== indexOfNew.substring(0, indexOfNew.length - Elasticsearch.INDEX_UID_LENGTH + 1)) {
|
||||||
'Object \"' + obj.uid + '\" already exists. Object was: ' +
|
throw new Error(
|
||||||
JSON.stringify(obj, null, 2),
|
// tslint:disable-next-line: no-magic-numbers
|
||||||
);
|
`Object "${obj.uid}" already exists. Object was: ${JSON.stringify(obj, null, 2)}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -451,13 +503,13 @@ export class Elasticsearch implements Database {
|
|||||||
const searchResponse = await this.client.create({
|
const searchResponse = await this.client.create({
|
||||||
body: obj,
|
body: obj,
|
||||||
id: obj.uid,
|
id: obj.uid,
|
||||||
index: this.getIndex(obj.type, bulk.source, bulk),
|
index: Elasticsearch.getIndex(obj.type, bulk.source, bulk),
|
||||||
timeout: '90s',
|
timeout: '90s',
|
||||||
type: obj.type,
|
type: obj.type,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!searchResponse.created) {
|
if (!searchResponse.created) {
|
||||||
throw new Error('Object creation Error: Instance was: ' + JSON.stringify(obj));
|
throw new Error(`Object creation Error: Instance was: ${JSON.stringify(obj)}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -470,7 +522,7 @@ export class Elasticsearch implements Database {
|
|||||||
const itemMeta = await this.doesItemExist(object);
|
const itemMeta = await this.doesItemExist(object);
|
||||||
|
|
||||||
if (itemMeta.exists && typeof itemMeta.object !== 'undefined') {
|
if (itemMeta.exists && typeof itemMeta.object !== 'undefined') {
|
||||||
return await this.client.update({
|
return this.client.update({
|
||||||
body: {
|
body: {
|
||||||
doc: object,
|
doc: object,
|
||||||
},
|
},
|
||||||
@@ -499,7 +551,7 @@ export class Elasticsearch implements Database {
|
|||||||
query: buildQuery(params, this.config, this.config.internal.database as ElasticsearchConfig),
|
query: buildQuery(params, this.config, this.config.internal.database as ElasticsearchConfig),
|
||||||
},
|
},
|
||||||
from: params.from,
|
from: params.from,
|
||||||
index: this.getListOfAllIndices(),
|
index: Elasticsearch.getListOfAllIndices(),
|
||||||
size: params.size,
|
size: params.size,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -20,10 +20,10 @@ import {
|
|||||||
SCMonitoringMaximumLengthCondition,
|
SCMonitoringMaximumLengthCondition,
|
||||||
SCMonitoringMinimumLengthCondition,
|
SCMonitoringMinimumLengthCondition,
|
||||||
} from '@openstapps/core';
|
} from '@openstapps/core';
|
||||||
|
import {Logger} from '@openstapps/logger';
|
||||||
import * as ES from 'elasticsearch';
|
import * as ES from 'elasticsearch';
|
||||||
import * as cron from 'node-cron';
|
import * as cron from 'node-cron';
|
||||||
import {logger} from '../../common';
|
import {MailQueue} from '../../notification/mail-queue';
|
||||||
import {MailQueue} from '../../notification/MailQueue';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the given condition fails on the given number of results and the condition
|
* Check if the given condition fails on the given number of results and the condition
|
||||||
@@ -37,13 +37,14 @@ function conditionFails(
|
|||||||
if (condition.type === 'MaximumLength') {
|
if (condition.type === 'MaximumLength') {
|
||||||
return maxConditionFails(condition.length, total);
|
return maxConditionFails(condition.length, total);
|
||||||
}
|
}
|
||||||
|
|
||||||
return minConditionFails(condition.length, total);
|
return minConditionFails(condition.length, total);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the min condition fails
|
* Check if the min condition fails
|
||||||
* @param minimumLength
|
* @param minimumLength Minimal length allowed
|
||||||
* @param total
|
* @param total Number of results
|
||||||
*/
|
*/
|
||||||
function minConditionFails(minimumLength: number, total: number) {
|
function minConditionFails(minimumLength: number, total: number) {
|
||||||
return typeof minimumLength === 'number' && minimumLength > total;
|
return typeof minimumLength === 'number' && minimumLength > total;
|
||||||
@@ -51,8 +52,8 @@ function minConditionFails(minimumLength: number, total: number) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the max condition fails
|
* Check if the max condition fails
|
||||||
* @param maximumLength
|
* @param maximumLength Maximal length allowed
|
||||||
* @param total
|
* @param total Number of results
|
||||||
*/
|
*/
|
||||||
function maxConditionFails(maximumLength: number, total: number) {
|
function maxConditionFails(maximumLength: number, total: number) {
|
||||||
return typeof maximumLength === 'number' && maximumLength < total;
|
return typeof maximumLength === 'number' && maximumLength < total;
|
||||||
@@ -74,19 +75,18 @@ export function runActions(
|
|||||||
mailQueue: MailQueue,
|
mailQueue: MailQueue,
|
||||||
) {
|
) {
|
||||||
|
|
||||||
actions.forEach((action) => {
|
actions.forEach(async (action) => {
|
||||||
if (action.type === 'log') {
|
if (action.type === 'log') {
|
||||||
logger.error(
|
await Logger.error(
|
||||||
action.prefix,
|
action.prefix,
|
||||||
`Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'`, `Found ${total} hits instead`,
|
`Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'`, `Found ${total} hits instead`,
|
||||||
action.message,
|
action.message,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
mailQueue.push({
|
await mailQueue.push({
|
||||||
subject: action.subject,
|
subject: action.subject,
|
||||||
text: `Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'\n` +
|
text: `Watcher '${watcherName}' failed. Watcher was triggered by '${triggerName}'
|
||||||
action.message +
|
${action.message} Found ${total} hits instead`,
|
||||||
`Found ${total} hits instead`,
|
|
||||||
to: action.recipients,
|
to: action.recipients,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -137,5 +137,5 @@ export function setUp(monitoringConfig: SCMonitoringConfiguration, esClient: ES.
|
|||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.log('Scheduled ' + monitoringConfig.watchers.length + ' watches');
|
Logger.log(`Scheduled ${monitoringConfig.watchers.length} watches`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,9 +44,9 @@ import {
|
|||||||
/**
|
/**
|
||||||
* Builds a boolean filter. Returns an elasticsearch boolean filter
|
* Builds a boolean filter. Returns an elasticsearch boolean filter
|
||||||
*/
|
*/
|
||||||
export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBooleanFilterArguments<any> {
|
export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBooleanFilterArguments<unknown> {
|
||||||
|
|
||||||
const result: ESBooleanFilterArguments<any> = {
|
const result: ESBooleanFilterArguments<unknown> = {
|
||||||
minimum_should_match: 0,
|
minimum_should_match: 0,
|
||||||
must: [],
|
must: [],
|
||||||
must_not: [],
|
must_not: [],
|
||||||
@@ -71,24 +71,39 @@ export function buildBooleanFilter(booleanFilter: SCSearchBooleanFilter): ESBool
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts Array of Filters to elasticsearch query-syntax
|
* Converts Array of Filters to elasticsearch query-syntax
|
||||||
* @param filter
|
* @param filter A search filter for the retrieval of the data
|
||||||
*/
|
*/
|
||||||
export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanceFilter | ESBooleanFilter<any> {
|
export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanceFilter | ESBooleanFilter<unknown> {
|
||||||
|
|
||||||
switch (filter.type) {
|
switch (filter.type) {
|
||||||
case 'value':
|
case 'value':
|
||||||
const filterObj: { [field: string]: string } = {};
|
const filterObj: { [field: string]: string; } = {};
|
||||||
filterObj[filter.arguments.field + '.raw'] = filter.arguments.value;
|
filterObj[`${filter.arguments.field}.raw`] = filter.arguments.value;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
term: filterObj,
|
term: filterObj,
|
||||||
};
|
};
|
||||||
case 'availability':
|
case 'availability':
|
||||||
const startRangeFilter: { [field: string]: { lte: string } } = {};
|
const startRangeFilter: {
|
||||||
|
[field: string]: {
|
||||||
|
/**
|
||||||
|
* Less than or equal
|
||||||
|
*/
|
||||||
|
lte: string;
|
||||||
|
};
|
||||||
|
} = {};
|
||||||
startRangeFilter[filter.arguments.fromField] = {
|
startRangeFilter[filter.arguments.fromField] = {
|
||||||
lte: 'now',
|
lte: 'now',
|
||||||
};
|
};
|
||||||
|
|
||||||
const endRangeFilter: { [field: string]: { gte: string } } = {};
|
const endRangeFilter: {
|
||||||
|
[field: string]: {
|
||||||
|
/**
|
||||||
|
* Greater than or equal
|
||||||
|
*/
|
||||||
|
gte: string;
|
||||||
|
};
|
||||||
|
} = {};
|
||||||
endRangeFilter[filter.arguments.toField] = {
|
endRangeFilter[filter.arguments.toField] = {
|
||||||
gte: 'now',
|
gte: 'now',
|
||||||
};
|
};
|
||||||
@@ -129,12 +144,13 @@ export function buildFilter(filter: SCSearchFilter): ESTermFilter | ESGeoDistanc
|
|||||||
};
|
};
|
||||||
case 'distance':
|
case 'distance':
|
||||||
const geoObject: ESGeoDistanceFilterArguments = {
|
const geoObject: ESGeoDistanceFilterArguments = {
|
||||||
distance: filter.arguments.distanceInM + 'm',
|
distance: `${filter.arguments.distanceInM}m`,
|
||||||
};
|
};
|
||||||
geoObject[filter.arguments.field] = {
|
geoObject[filter.arguments.field] = {
|
||||||
lat: filter.arguments.lat,
|
lat: filter.arguments.lat,
|
||||||
lon: filter.arguments.lon,
|
lon: filter.arguments.lon,
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
geo_distance: geoObject,
|
geo_distance: geoObject,
|
||||||
};
|
};
|
||||||
@@ -195,33 +211,36 @@ function buildFunctionsForBoostingTypes(
|
|||||||
|
|
||||||
const fields = boostingForOneSCType.fields;
|
const fields = boostingForOneSCType.fields;
|
||||||
|
|
||||||
Object.keys(boostingForOneSCType.fields).forEach((fieldName) => {
|
for (const fieldName in boostingForOneSCType.fields) {
|
||||||
|
if (boostingForOneSCType.fields.hasOwnProperty(fieldName)) {
|
||||||
|
const boostingForOneField = fields[fieldName];
|
||||||
|
|
||||||
const boostingForOneField = fields[fieldName];
|
for (const value in boostingForOneField) {
|
||||||
|
if (boostingForOneField.hasOwnProperty(value)) {
|
||||||
|
const factor = boostingForOneField[value];
|
||||||
|
|
||||||
Object.keys(boostingForOneField).forEach((value) => {
|
// build term filter
|
||||||
const factor = boostingForOneField[value];
|
const termFilter: ESTermFilter = {
|
||||||
|
term: {},
|
||||||
|
};
|
||||||
|
termFilter.term[`${fieldName}.raw`] = value;
|
||||||
|
|
||||||
// build term filter
|
functions.push({
|
||||||
const termFilter: ESTermFilter = {
|
filter: {
|
||||||
term: {},
|
bool: {
|
||||||
};
|
must: [
|
||||||
termFilter.term[fieldName + '.raw'] = value;
|
typeFilter,
|
||||||
|
termFilter,
|
||||||
functions.push({
|
],
|
||||||
filter: {
|
should: [],
|
||||||
bool: {
|
},
|
||||||
must: [
|
},
|
||||||
typeFilter,
|
weight: factor,
|
||||||
termFilter,
|
});
|
||||||
],
|
}
|
||||||
should: [],
|
}
|
||||||
},
|
}
|
||||||
},
|
}
|
||||||
weight: factor,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -229,8 +248,8 @@ function buildFunctionsForBoostingTypes(
|
|||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Builds body for Elasticsearch requests
|
* Builds body for Elasticsearch requests
|
||||||
* @param params
|
* @param params Parameters for querying the backend
|
||||||
* @param defaultConfig
|
* @param defaultConfig Default configuration of the backend
|
||||||
* @returns ElasticsearchQuery (body of a search-request)
|
* @returns ElasticsearchQuery (body of a search-request)
|
||||||
*/
|
*/
|
||||||
export function buildQuery(
|
export function buildQuery(
|
||||||
@@ -355,13 +374,13 @@ export function buildQuery(
|
|||||||
// add type filters for sorts
|
// add type filters for sorts
|
||||||
mustMatch.push.apply(mustMatch, typeFiltersToAppend);
|
mustMatch.push.apply(mustMatch, typeFiltersToAppend);
|
||||||
}
|
}
|
||||||
|
|
||||||
return functionScoreQuery;
|
return functionScoreQuery;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* converts query to
|
* converts query to
|
||||||
* @param params
|
* @param sorts Sorting rules to apply to the data that is being queried
|
||||||
* @param sortableFields
|
|
||||||
* @returns an array of sort queries
|
* @returns an array of sort queries
|
||||||
*/
|
*/
|
||||||
export function buildSort(
|
export function buildSort(
|
||||||
@@ -371,7 +390,8 @@ export function buildSort(
|
|||||||
switch (sort.type) {
|
switch (sort.type) {
|
||||||
case 'ducet':
|
case 'ducet':
|
||||||
const ducetSort: ESDucetSort = {};
|
const ducetSort: ESDucetSort = {};
|
||||||
ducetSort[sort.arguments.field + '.sort'] = sort.order;
|
ducetSort[`${sort.arguments.field}.sort`] = sort.order;
|
||||||
|
|
||||||
return ducetSort;
|
return ducetSort;
|
||||||
case 'distance':
|
case 'distance':
|
||||||
const args: ESGeoDistanceSortArguments = {
|
const args: ESGeoDistanceSortArguments = {
|
||||||
@@ -400,6 +420,12 @@ export function buildSort(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides a script for sorting search results by prices
|
||||||
|
*
|
||||||
|
* @param universityRole User group which consumes university services
|
||||||
|
* @param field Field in which wanted offers with prices are located
|
||||||
|
*/
|
||||||
export function buildPriceSortScript(universityRole: keyof SCSportCoursePriceGroup, field: SCThingsField): string {
|
export function buildPriceSortScript(universityRole: keyof SCSportCoursePriceGroup, field: SCThingsField): string {
|
||||||
return `
|
return `
|
||||||
// initialize the sort value with the maximum
|
// initialize the sort value with the maximum
|
||||||
|
|||||||
@@ -13,23 +13,27 @@
|
|||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
import {Logger} from '@openstapps/logger';
|
||||||
import {Client} from 'elasticsearch';
|
import {Client} from 'elasticsearch';
|
||||||
import {readdir, readFile} from 'fs-extra';
|
import {readdir, readFile} from 'fs-extra';
|
||||||
import {resolve} from 'path';
|
import {resolve} from 'path';
|
||||||
import {logger} from '../../common';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Assembles an elasticsearch template with all resolved subType-references
|
* Assembles an elasticsearch template with all resolved subType-references
|
||||||
* @param templateType
|
* @param templateType Type used in the elasticsearch mapping
|
||||||
* @param templates
|
* @param templates Templates (elasticsearch mappings)
|
||||||
* @param inline
|
* @param inline Level of hierarchy
|
||||||
* @returns
|
* @deprecated
|
||||||
*/
|
*/
|
||||||
function assembleElasticsearchTemplate(templateType: string, templates: {[key: string]: any}, inline: number): any {
|
function assembleElasticsearchTemplate(
|
||||||
|
templateType: string,
|
||||||
|
// tslint:disable-next-line: no-any
|
||||||
|
templates: {[key: string]: any; },
|
||||||
|
inline: number): object {
|
||||||
|
|
||||||
const templateBase = JSON.parse(JSON.stringify(templates[templateType]));
|
const templateBase = JSON.parse(JSON.stringify(templates[templateType]));
|
||||||
|
|
||||||
if (inline) {
|
if (typeof inline !== 'undefined') {
|
||||||
delete templateBase.dynamic_templates;
|
delete templateBase.dynamic_templates;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -45,12 +49,12 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// extend the template by the properties of the basetemplate
|
// extend the template by the properties of the basetemplate
|
||||||
templateBase.properties = Object.assign(
|
templateBase.properties = {...templateBase.properties,
|
||||||
templateBase.properties,
|
...templates['base.template.json'].mappings._default_.properties,
|
||||||
templates['base.template.json'].mappings._default_.properties,
|
};
|
||||||
);
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.error('Failed to merge properties on: ' + templateType);
|
// tslint:disable-next-line: no-floating-promises
|
||||||
|
Logger.error(`Failed to merge properties on: ${templateType}`);
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
const fieldKeys = Object.keys(templateBase.properties);
|
const fieldKeys = Object.keys(templateBase.properties);
|
||||||
@@ -70,7 +74,7 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
|
|||||||
if (Array.isArray(field._typeRef)) {
|
if (Array.isArray(field._typeRef)) {
|
||||||
let obj = {};
|
let obj = {};
|
||||||
field._typeRef.forEach((subType: string) => {
|
field._typeRef.forEach((subType: string) => {
|
||||||
obj = Object.assign(obj, assembleElasticsearchTemplate(subType, templates, inline + 1));
|
obj = {...obj, ...assembleElasticsearchTemplate(subType, templates, inline + 1)};
|
||||||
});
|
});
|
||||||
templateBase.properties[fieldKey] = obj;
|
templateBase.properties[fieldKey] = obj;
|
||||||
} else {
|
} else {
|
||||||
@@ -82,25 +86,29 @@ function assembleElasticsearchTemplate(templateType: string, templates: {[key: s
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return templateBase;
|
return templateBase;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reads all template files and returns the assembled template
|
* Reads all template files and returns the assembled template
|
||||||
*/
|
*/
|
||||||
export async function getElasticsearchTemplate(): Promise<any> {
|
// TODO: check if redundant
|
||||||
|
export async function getElasticsearchTemplate(): Promise<object> {
|
||||||
|
|
||||||
// readIM all templates
|
// readIM all templates
|
||||||
const elasticsearchFolder = resolve('.', 'src', 'storage', 'elasticsearch', 'templates');
|
const elasticsearchFolder = resolve('.', 'src', 'storage', 'elasticsearch', 'templates');
|
||||||
const templates: {[key: string]: any} = {};
|
// tslint:disable-next-line: no-any
|
||||||
|
const templates: {[key: string]: any; } = {};
|
||||||
|
|
||||||
const fileNames = await readdir(elasticsearchFolder);
|
const fileNames = await readdir(elasticsearchFolder);
|
||||||
|
|
||||||
const availableTypes = fileNames.filter((fileName) => {
|
const availableTypes = fileNames.filter((fileName) => {
|
||||||
return Array.isArray(fileName.match(/\w*\.sc-type\.template\.json/i));
|
return Array.isArray(fileName.match(/\w*\.sc-type\.template\.json/i));
|
||||||
}).map((fileName) => {
|
})
|
||||||
return fileName.substring(0, fileName.indexOf('.sc-type.template.json'));
|
.map((fileName) => {
|
||||||
});
|
return fileName.substring(0, fileName.indexOf('.sc-type.template.json'));
|
||||||
|
});
|
||||||
|
|
||||||
const promises = fileNames.map(async (fileName) => {
|
const promises = fileNames.map(async (fileName) => {
|
||||||
const file = await readFile(resolve(elasticsearchFolder, fileName), 'utf8');
|
const file = await readFile(resolve(elasticsearchFolder, fileName), 'utf8');
|
||||||
@@ -108,7 +116,7 @@ export async function getElasticsearchTemplate(): Promise<any> {
|
|||||||
try {
|
try {
|
||||||
templates[fileName] = JSON.parse(file.toString());
|
templates[fileName] = JSON.parse(file.toString());
|
||||||
} catch (jsonParsingError) {
|
} catch (jsonParsingError) {
|
||||||
logger.error('Failed parsing file: ' + fileName);
|
await Logger.error(`Failed parsing file: ${fileName}`);
|
||||||
throw jsonParsingError;
|
throw jsonParsingError;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -119,23 +127,24 @@ export async function getElasticsearchTemplate(): Promise<any> {
|
|||||||
|
|
||||||
availableTypes.forEach((configType) => {
|
availableTypes.forEach((configType) => {
|
||||||
template.mappings[configType.toLowerCase()] =
|
template.mappings[configType.toLowerCase()] =
|
||||||
assembleElasticsearchTemplate(configType + '.sc-type.template.json', templates, 0);
|
assembleElasticsearchTemplate(`${configType}.sc-type.template.json`, templates, 0);
|
||||||
});
|
});
|
||||||
|
|
||||||
// this is like the base type (StappsCoreThing)
|
// this is like the base type (StappsCoreThing)
|
||||||
const baseProperties = template.mappings._default_.properties;
|
const baseProperties = template.mappings._default_.properties;
|
||||||
Object.keys(baseProperties).forEach((basePropertyName) => {
|
Object.keys(baseProperties)
|
||||||
let field = baseProperties[basePropertyName];
|
.forEach((basePropertyName) => {
|
||||||
field = templates[field._fieldRef];
|
let field = baseProperties[basePropertyName];
|
||||||
template.mappings._default_.properties[basePropertyName] = field;
|
field = templates[field._fieldRef];
|
||||||
});
|
template.mappings._default_.properties[basePropertyName] = field;
|
||||||
|
});
|
||||||
|
|
||||||
return template;
|
return template;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Puts a new global template
|
* Puts a new global template
|
||||||
* @param client
|
* @param client An elasticsearch client to use
|
||||||
*/
|
*/
|
||||||
export async function putTemplate(client: Client): Promise<void> {
|
export async function putTemplate(client: Client): Promise<void> {
|
||||||
return client.indices.putTemplate({
|
return client.indices.putTemplate({
|
||||||
|
|||||||
Reference in New Issue
Block a user