Typescript migration: Major refactoring.

This commit is contained in:
baldo 2020-04-10 00:43:15 +02:00
parent 31ecc0cf4f
commit 52822207a5
31 changed files with 2068 additions and 2268 deletions

View file

@ -1,18 +1,25 @@
# Refactoring ideas # Refactoring ideas
## TODO:
* Test email rendering!
## Short term ## Short term
* Integrate typescript in the build and start migrating the server code. * Integrate typescript in the build and start migrating the server code.
* Find a nice way to integrate typescript with grunt. * Find a nice way to integrate typescript with grunt.
* Replace logging framework. * Replace logging framework.
* Bluebird for promises?
## Mid term ## Mid term
* Typesafe db queries.
* Port complete server to typescript. * Port complete server to typescript.
* Port the server code to promises and `async` / `await`. * Port the server code to promises and `async` / `await`.
* Use ES6 style imports instead of `require`. * Use ES6 style imports instead of `require`.
* Store node data in database and export it for gateways. * Store node data in database and export it for gateways.
* Write tests (especially testing quirky node data). * Write tests (especially testing quirky node data).
* Allow terminating running tasks via bluebirds cancellation.
## Long term ## Long term

88
package-lock.json generated
View file

@ -26,6 +26,12 @@
"integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==", "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==",
"dev": true "dev": true
}, },
"@types/async": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/@types/async/-/async-3.2.0.tgz",
"integrity": "sha512-7dhGj2u7hS+Y/NPxFDaTL/kbTvVjOKvZmD+GZp0jGGOLvnakomncrqSReX+xPAGGZuCUSUsXXy9I9pEpSwxpKA==",
"dev": true
},
"@types/babel-types": { "@types/babel-types": {
"version": "7.0.7", "version": "7.0.7",
"resolved": "https://registry.npmjs.org/@types/babel-types/-/babel-types-7.0.7.tgz", "resolved": "https://registry.npmjs.org/@types/babel-types/-/babel-types-7.0.7.tgz",
@ -53,6 +59,12 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"@types/caseless": {
"version": "0.12.2",
"resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz",
"integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==",
"dev": true
},
"@types/command-line-args": { "@types/command-line-args": {
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.0.0.tgz", "resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.0.0.tgz",
@ -137,6 +149,12 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"@types/html-to-text": {
"version": "1.4.31",
"resolved": "https://registry.npmjs.org/@types/html-to-text/-/html-to-text-1.4.31.tgz",
"integrity": "sha512-9vTFw6vYZNnjPOep9WRXs7cw0vg04pAZgcX9bqx70q1BNT7y9sOJovpbiNIcSNyHF/6LscLvGhtb5Og1T0UEvA==",
"dev": true
},
"@types/lodash": { "@types/lodash": {
"version": "4.14.149", "version": "4.14.149",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.149.tgz", "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.149.tgz",
@ -197,6 +215,31 @@
"integrity": "sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==", "integrity": "sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==",
"dev": true "dev": true
}, },
"@types/request": {
"version": "2.48.4",
"resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.4.tgz",
"integrity": "sha512-W1t1MTKYR8PxICH+A4HgEIPuAC3sbljoEVfyZbeFJJDbr30guDspJri2XOaM2E+Un7ZjrihaDi7cf6fPa2tbgw==",
"dev": true,
"requires": {
"@types/caseless": "*",
"@types/node": "*",
"@types/tough-cookie": "*",
"form-data": "^2.5.0"
},
"dependencies": {
"form-data": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz",
"integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==",
"dev": true,
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
"mime-types": "^2.1.12"
}
}
}
},
"@types/serve-static": { "@types/serve-static": {
"version": "1.13.3", "version": "1.13.3",
"resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.3.tgz", "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.3.tgz",
@ -216,6 +259,12 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"@types/tough-cookie": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.0.tgz",
"integrity": "sha512-I99sngh224D0M7XgW1s120zxCt3VYQ3IQsuw3P3jbq5GG4yc79+ZjyKznyOGIQrflfylLgcfekeZW/vk0yng6A==",
"dev": true
},
"@types/tz-offset": { "@types/tz-offset": {
"version": "0.0.0", "version": "0.0.0",
"resolved": "https://registry.npmjs.org/@types/tz-offset/-/tz-offset-0.0.0.tgz", "resolved": "https://registry.npmjs.org/@types/tz-offset/-/tz-offset-0.0.0.tgz",
@ -278,9 +327,9 @@
"integrity": "sha512-vHiZIDK4QysLXKOGrRJ9IZlUfJuvSUQUELJz4oaj0o70KD7v+Fsate1dMVQd+1LS1VbN73BZe1aEPw8mEjeDcw==" "integrity": "sha512-vHiZIDK4QysLXKOGrRJ9IZlUfJuvSUQUELJz4oaj0o70KD7v+Fsate1dMVQd+1LS1VbN73BZe1aEPw8mEjeDcw=="
}, },
"ajv": { "ajv": {
"version": "6.11.0", "version": "6.12.0",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.11.0.tgz", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.0.tgz",
"integrity": "sha512-nCprB/0syFYy9fVYU1ox1l2KN8S9I+tziH8D4zdZuLT3N6RMlGSGt5FSTpAiHB/Whv8Qs1cWHma1aMKZyaHRKA==", "integrity": "sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw==",
"requires": { "requires": {
"fast-deep-equal": "^3.1.1", "fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0", "fast-json-stable-stringify": "^2.0.0",
@ -7502,9 +7551,9 @@
"dev": true "dev": true
}, },
"psl": { "psl": {
"version": "1.7.0", "version": "1.8.0",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
"integrity": "sha512-5NsSEDv8zY70ScRnOTn7bK7eanl2MvFrOrS/R6x+dBt5g1ghnj9Zv90kO8GwT8gxcu2ANyFprnFYB85IogIJOQ==" "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ=="
}, },
"pug": { "pug": {
"version": "2.0.4", "version": "2.0.4",
@ -7931,9 +7980,9 @@
"dev": true "dev": true
}, },
"request": { "request": {
"version": "2.88.0", "version": "2.88.2",
"resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
"integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
"requires": { "requires": {
"aws-sign2": "~0.7.0", "aws-sign2": "~0.7.0",
"aws4": "^1.8.0", "aws4": "^1.8.0",
@ -7942,7 +7991,7 @@
"extend": "~3.0.2", "extend": "~3.0.2",
"forever-agent": "~0.6.1", "forever-agent": "~0.6.1",
"form-data": "~2.3.2", "form-data": "~2.3.2",
"har-validator": "~5.1.0", "har-validator": "~5.1.3",
"http-signature": "~1.2.0", "http-signature": "~1.2.0",
"is-typedarray": "~1.0.0", "is-typedarray": "~1.0.0",
"isstream": "~0.1.2", "isstream": "~0.1.2",
@ -7952,7 +8001,7 @@
"performance-now": "^2.1.0", "performance-now": "^2.1.0",
"qs": "~6.5.2", "qs": "~6.5.2",
"safe-buffer": "^5.1.2", "safe-buffer": "^5.1.2",
"tough-cookie": "~2.4.3", "tough-cookie": "~2.5.0",
"tunnel-agent": "^0.6.0", "tunnel-agent": "^0.6.0",
"uuid": "^3.3.2" "uuid": "^3.3.2"
}, },
@ -9194,12 +9243,19 @@
"optional": true "optional": true
}, },
"tough-cookie": { "tough-cookie": {
"version": "2.4.3", "version": "2.5.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
"integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
"requires": { "requires": {
"psl": "^1.1.24", "psl": "^1.1.28",
"punycode": "^1.4.1" "punycode": "^2.1.1"
},
"dependencies": {
"punycode": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
}
} }
}, },
"trim-newlines": { "trim-newlines": {

View file

@ -35,6 +35,7 @@
"express": "^4.17.1", "express": "^4.17.1",
"glob": "^7.1.6", "glob": "^7.1.6",
"graceful-fs": "^4.2.3", "graceful-fs": "^4.2.3",
"html-to-text": "^5.1.1",
"http-auth": "^3.2.4", "http-auth": "^3.2.4",
"http-errors": "^1.7.3", "http-errors": "^1.7.3",
"lodash": "^4.17.15", "lodash": "^4.17.15",
@ -43,7 +44,7 @@
"node-cron": "^2.0.1", "node-cron": "^2.0.1",
"nodemailer": "^6.4.2", "nodemailer": "^6.4.2",
"nodemailer-html-to-text": "^3.1.0", "nodemailer-html-to-text": "^3.1.0",
"request": "^2.88.0", "request": "^2.88.2",
"scribe-js": "^2.0.4", "scribe-js": "^2.0.4",
"serve-static": "^1.14.1", "serve-static": "^1.14.1",
"sparkson": "^1.3.3", "sparkson": "^1.3.3",
@ -51,6 +52,7 @@
"sqlite3": "^4.1.1" "sqlite3": "^4.1.1"
}, },
"devDependencies": { "devDependencies": {
"@types/async": "^3.2.0",
"@types/command-line-args": "^5.0.0", "@types/command-line-args": "^5.0.0",
"@types/command-line-usage": "^5.0.1", "@types/command-line-usage": "^5.0.1",
"@types/compression": "^1.7.0", "@types/compression": "^1.7.0",
@ -58,10 +60,12 @@
"@types/express": "^4.17.4", "@types/express": "^4.17.4",
"@types/glob": "^7.1.1", "@types/glob": "^7.1.1",
"@types/graceful-fs": "^4.1.3", "@types/graceful-fs": "^4.1.3",
"@types/html-to-text": "^1.4.31",
"@types/lodash": "^4.14.149", "@types/lodash": "^4.14.149",
"@types/node": "^13.11.0", "@types/node": "^13.11.0",
"@types/node-cron": "^2.0.3", "@types/node-cron": "^2.0.3",
"@types/nodemailer": "^6.4.0", "@types/nodemailer": "^6.4.0",
"@types/request": "^2.48.4",
"@types/sqlite3": "^3.1.6", "@types/sqlite3": "^3.1.6",
"bower": "^1.8.8", "bower": "^1.8.8",
"escape-string-regexp": "^2.0.0", "escape-string-regexp": "^2.0.0",

View file

@ -0,0 +1,5 @@
declare module "nodemailer-html-to-text" {
import {PluginFunction} from "nodemailer/lib/mailer";
export function htmlToText(options: HtmlToTextOptions): PluginFunction;
}

View file

@ -2,12 +2,9 @@ import util from "util";
import fs from "graceful-fs"; import fs from "graceful-fs";
import glob from "glob"; import glob from "glob";
import path from "path"; import path from "path";
import sqlite from "sqlite";
import sqlite3 from "sqlite3";
import {config} from "../config"; import {config} from "../config";
import Logger from "../logger"; import Logger from "../logger";
import sqlite, {Database, Statement} from "sqlite";
const pglob = util.promisify(glob); const pglob = util.promisify(glob);
const pReadFile = util.promisify(fs.readFile); const pReadFile = util.promisify(fs.readFile);
@ -51,13 +48,15 @@ async function applyMigrations(db: sqlite.Database): Promise<void> {
} }
} }
const file = config.server.databaseFile;
const dbPromise = sqlite.open(file);
export async function init(): Promise<void> { export async function init(): Promise<void> {
const file = config.server.databaseFile;
Logger.tag('database').info('Setting up database: %s', file); Logger.tag('database').info('Setting up database: %s', file);
let db: sqlite.Database; let db: Database;
try { try {
db = await sqlite.open(file); db = await dbPromise;
} }
catch (error) { catch (error) {
Logger.tag('database').error('Error initialzing database:', error); Logger.tag('database').error('Error initialzing database:', error);
@ -73,19 +72,74 @@ export async function init(): Promise<void> {
Logger.tag('database').error('Error migrating database:', error); Logger.tag('database').error('Error migrating database:', error);
throw error; throw error;
} }
await db.close()
} }
Logger.tag('database').info('Setting up legacy database: %s', config.server.databaseFile); /**
* Wrapper around a Promise<Database> providing the same interface as the Database itself.
*/
class DatabasePromiseWrapper implements Database {
constructor(private db: Promise<Database>) {}
let legacyDB: sqlite3.Database; async close() {
try { const db = await this.db;
legacyDB = new sqlite3.Database(config.server.databaseFile); // @ts-ignore
} return await db.close.apply(db, arguments);
catch (error) { }
Logger.tag('database').error('Error initialzing legacy database lib:', error);
throw error; async run() {
const db = await this.db;
// @ts-ignore
return await db.run.apply(db, arguments);
}
async get() {
const db = await this.db;
// @ts-ignore
return await db.get.apply(db, arguments);
}
async all() {
const db = await this.db;
// @ts-ignore
return await db.all.apply(db, arguments);
}
async exec() {
const db = await this.db;
// @ts-ignore
return await db.exec.apply(db, arguments);
}
async each() {
const db = await this.db;
// @ts-ignore
return await db.each.apply(db, arguments);
}
async prepare() {
const db = await this.db;
// @ts-ignore
return await db.prepare.apply(db, arguments);
}
async configure() {
const db = await this.db;
// @ts-ignore
return await db.configure.apply(db, arguments);
}
async migrate() {
const db = await this.db;
// @ts-ignore
return await db.migrate.apply(db, arguments);
}
async on() {
const db = await this.db;
// @ts-ignore
return await db.on.apply(db, arguments);
}
} }
export const db = legacyDB; export const db: Database = new DatabasePromiseWrapper(dbPromise);
export {Database, Statement};

View file

@ -1,22 +1,8 @@
import Logger from "../logger"; import {fixNodeFilenames} from "../services/nodeService";
import NodeService from "../services/nodeService";
export default { export default {
name: 'FixNodeFilenamesJob', name: 'FixNodeFilenamesJob',
description: 'Makes sure node files (holding fastd key, name, etc.) are correctly named.', description: 'Makes sure node files (holding fastd key, name, etc.) are correctly named.',
run: (): Promise<void> => { run: fixNodeFilenames
return new Promise<void>(
(resolve, reject) => {
NodeService.fixNodeFilenames((err: any): void => {
if (err) {
Logger.tag('nodes', 'fix-filenames').error('Error fixing filenames:', err);
return reject(err);
}
resolve();
});
}
);
}
} }

View file

@ -1,22 +1,8 @@
import Logger from "../logger" import * as MailService from "../services/mailService"
import MailService from "../services/mailService"
export default { export default {
name: 'MailQueueJob', name: 'MailQueueJob',
description: 'Send pending emails (up to 5 attempts in case of failures).', description: 'Send pending emails (up to 5 attempts in case of failures).',
run: (): Promise<void> => { run: MailService.sendPendingMails,
return new Promise<void>(
(resolve, reject) => {
MailService.sendPendingMails((err: any): void => {
if (err) {
Logger.tag('mail', 'queue').error('Error sending pending mails:', err);
return reject(err);
}
resolve();
});
}
)
}
} }

View file

@ -1,20 +1,8 @@
import Logger from "../logger"; import * as MonitoringService from "../services/monitoringService";
import MonitoringService from "../services/monitoringService";
export default { export default {
name: 'MonitoringMailsSendingJob', name: 'MonitoringMailsSendingJob',
description: 'Sends monitoring emails depending on the monitoring state of nodes retrieved by the NodeInformationRetrievalJob.', description: 'Sends monitoring emails depending on the monitoring state of nodes retrieved by the NodeInformationRetrievalJob.',
run: (): Promise<void> => { run: MonitoringService.sendMonitoringMails,
return new Promise<void>((resolve, reject) => {
MonitoringService.sendMonitoringMails((err: any): void => {
if (err) {
Logger.tag('monitoring', 'mail-sending').error('Error sending monitoring mails:', err);
return reject(err);
}
resolve();
});
});
}
}; };

View file

@ -1,22 +1,8 @@
import Logger from "../logger"; import * as MonitoringService from "../services/monitoringService";
import MonitoringService from "../services/monitoringService";
export default { export default {
name: 'NodeInformationRetrievalJob', name: 'NodeInformationRetrievalJob',
description: 'Fetches the nodes.json and calculates and stores the monitoring / online status for registered nodes.', description: 'Fetches the nodes.json and calculates and stores the monitoring / online status for registered nodes.',
run: (): Promise<void> => { run: MonitoringService.retrieveNodeInformation,
return new Promise<void>(
(resolve, reject) => {
MonitoringService.retrieveNodeInformation((err: any): void => {
if (err) {
Logger.tag('monitoring', 'information-retrieval').error('Error retrieving node data:', err);
return reject(err);
}
resolve();
});
}
);
}
}; };

View file

@ -1,20 +1,8 @@
import MonitoringService from "../services/monitoringService"; import * as MonitoringService from "../services/monitoringService";
import Logger from "../logger";
export default { export default {
name: 'OfflineNodesDeletionJob', name: 'OfflineNodesDeletionJob',
description: 'Delete nodes that are offline for more than 100 days.', description: 'Delete nodes that are offline for more than 100 days.',
run: (): Promise<void> => { run: MonitoringService.deleteOfflineNodes,
return new Promise<void>((resolve, reject) => {
MonitoringService.deleteOfflineNodes((err: any): void => {
if (err) {
Logger.tag('nodes', 'delete-offline').error('Error deleting offline nodes:', err);
return reject(err);
}
resolve();
});
});
}
}; };

View file

@ -1,99 +0,0 @@
'use strict';
const Constraints = require('../validation/constraints')
const ErrorTypes = require('../utils/errorTypes')
const Logger = require('../logger')
const MailService = require('../services/mailService')
const Resources = require('../utils/resources')
const Strings = require('../utils/strings')
const Validator = require('../validation/validator')
const isValidId = Validator.forConstraint(Constraints.id);
function withValidMailId(req, res, callback) {
const id = Strings.normalizeString(Resources.getData(req).id);
if (!isValidId(id)) {
return callback({data: 'Invalid mail id.', type: ErrorTypes.badRequest});
}
callback(null, id);
}
module.exports = {
get (req, res) {
withValidMailId(req, res, function (err, id) {
if (err) {
return Resources.error(res, err);
}
MailService.getMail(id, function (err, mail) {
if (err) {
Logger.tag('mails', 'admin').error('Error getting mail:', err);
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
}
if (!mail) {
return Resources.error(res, {data: 'Mail not found.', type: ErrorTypes.notFound});
}
return Resources.success(res, mail);
});
});
},
getAll (req, res) {
Resources.getValidRestParams('list', null, req, function (err, restParams) {
if (err) {
return Resources.error(res, err);
}
return MailService.getPendingMails(
restParams,
function (err, mails, total) {
if (err) {
Logger.tag('mails', 'admin').error('Could not get pending mails:', err);
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
}
res.set('X-Total-Count', total);
return Resources.success(res, mails);
}
);
});
},
delete (req, res) {
withValidMailId(req, res, function (err, id) {
if (err) {
return Resources.error(res, err);
}
MailService.deleteMail(id, function (err) {
if (err) {
Logger.tag('mails', 'admin').error('Error deleting mail:', err);
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
}
return Resources.success(res);
});
});
},
resetFailures (req, res) {
withValidMailId(req, res, function (err, id) {
if (err) {
return Resources.error(res, err);
}
MailService.resetFailures(id, function (err, mail) {
if (err) {
Logger.tag('mails', 'admin').error('Error resetting failure count:', err);
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
}
return Resources.success(res, mail);
});
});
}
}

View file

@ -0,0 +1,67 @@
import CONSTRAINTS from "../validation/constraints";
import ErrorTypes from "../utils/errorTypes";
import * as MailService from "../services/mailService";
import * as Resources from "../utils/resources";
import {normalizeString} from "../utils/strings";
import {forConstraint} from "../validation/validator";
import {Request, Response} from "express";
import {Mail, MailId} from "../types";
const isValidId = forConstraint(CONSTRAINTS.id, false);
async function withValidMailId(req: Request): Promise<MailId> {
const id = normalizeString(Resources.getData(req).id);
if (!isValidId(id)) {
throw {data: 'Invalid mail id.', type: ErrorTypes.badRequest};
}
return id;
}
async function doGet(req: Request): Promise<Mail> {
const id = await withValidMailId(req);
return await MailService.getMail(id);
}
export function get(req: Request, res: Response): void {
doGet(req)
.then(mail => Resources.success(res, mail))
.catch(err => Resources.error(res, err))
}
async function doGetAll(req: Request): Promise<{total: number, mails: Mail[]}> {
const restParams = await Resources.getValidRestParams('list', null, req);
return await MailService.getPendingMails(restParams);
}
export function getAll (req: Request, res: Response): void {
doGetAll(req)
.then(({total, mails}) => {
res.set('X-Total-Count', total.toString(10));
return Resources.success(res, mails);
})
.catch(err => Resources.error(res, err))
}
async function doRemove(req: Request): Promise<void> {
const id = await withValidMailId(req);
await MailService.deleteMail(id);
}
export function remove (req: Request, res: Response): void {
doRemove(req)
.then(() => Resources.success(res, {}))
.catch(err => Resources.error(res, err));
}
async function doResetFailures(req: Request): Promise<Mail> {
const id = await withValidMailId(req);
return await MailService.resetFailures(id);
}
export function resetFailures (req: Request, res: Response): void {
doResetFailures(req)
.then(mail => Resources.success(res, mail))
.catch(err => Resources.error(res, err));
}

View file

@ -1,82 +0,0 @@
'use strict';
const _ = require('lodash')
const Constraints = require('../validation/constraints')
const ErrorTypes = require('../utils/errorTypes')
const Logger = require('../logger')
const MonitoringService = require('../services/monitoringService')
const Resources = require('../utils/resources')
const Strings = require('../utils/strings')
const Validator = require('../validation/validator')
const isValidToken = Validator.forConstraint(Constraints.token);
module.exports = {
getAll (req, res) {
Resources.getValidRestParams('list', null, req, function (err, restParams) {
if (err) {
return Resources.error(res, err);
}
return MonitoringService.getAll(
restParams,
function (err, monitoringStates, total) {
if (err) {
Logger.tag('monitoring', 'admin').error('Could not get monitoring states:', err);
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
}
res.set('X-Total-Count', total);
return Resources.success(res, _.map(monitoringStates, function (state) {
state.mapId = _.toLower(state.mac).replace(/:/g, '');
return state;
}));
}
);
});
},
confirm (req, res) {
const data = Resources.getData(req);
const token = Strings.normalizeString(data.token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
return MonitoringService.confirm(token, function (err, node) {
if (err) {
return Resources.error(res, err);
}
return Resources.success(res, {
hostname: node.hostname,
mac: node.mac,
email: node.email,
monitoring: node.monitoring,
monitoringConfirmed: node.monitoringConfirmed
});
});
},
disable (req, res) {
const data = Resources.getData(req);
const token = Strings.normalizeString(data.token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
return MonitoringService.disable(token, function (err, node) {
if (err) {
return Resources.error(res, err);
}
return Resources.success(res, {
hostname: node.hostname,
mac: node.mac,
email: node.email,
monitoring: node.monitoring
});
});
}
}

View file

@ -0,0 +1,69 @@
import _ from "lodash";
import CONSTRAINTS from "../validation/constraints";
import ErrorTypes from "../utils/errorTypes";
import * as MonitoringService from "../services/monitoringService";
import * as Resources from "../utils/resources";
import {normalizeString} from "../utils/strings";
import {forConstraint} from "../validation/validator";
import {Request, Response} from "express";
const isValidToken = forConstraint(CONSTRAINTS.token, false);
async function doGetAll(req: Request): Promise<{total: number, result: any}> {
const restParams = await Resources.getValidRestParams('list', null, req);
const {monitoringStates, total} = await MonitoringService.getAll(restParams);
return {
total,
result: _.map(monitoringStates, function (state) {
state.mapId = _.toLower(state.mac).replace(/:/g, '');
return state;
})
};
}
export function getAll(req: Request, res: Response): void {
doGetAll(req)
.then(({total, result}) => {
res.set('X-Total-Count', total.toString(10));
Resources.success(res, result)
})
.catch(err => Resources.error(res, err));
}
export function confirm(req: Request, res: Response): void {
const data = Resources.getData(req);
const token = normalizeString(data.token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
MonitoringService.confirm(token)
.then(node => Resources.success(res, {
hostname: node.hostname,
mac: node.mac,
email: node.email,
monitoring: node.monitoring,
monitoringConfirmed: node.monitoringConfirmed
}))
.catch(err => Resources.error(res, err));
}
export function disable(req: Request, res: Response): void {
const data = Resources.getData(req);
const token = normalizeString(data.token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
MonitoringService.disable(token)
.then(node => Resources.success(res, {
hostname: node.hostname,
mac: node.mac,
email: node.email,
monitoring: node.monitoring
}))
.catch(err => Resources.error(res, err));
}

View file

@ -1,181 +0,0 @@
'use strict';
const _ = require('lodash')
const deepExtend = require('deep-extend')
const Constraints = require('../validation/constraints')
const ErrorTypes = require('../utils/errorTypes')
const Logger = require('../logger')
const MonitoringService = require('../services/monitoringService')
const NodeService = require('../services/nodeService')
const Strings = require('../utils/strings')
const Validator = require('../validation/validator')
const Resources = require('../utils/resources')
const nodeFields = ['hostname', 'key', 'email', 'nickname', 'mac', 'coords', 'monitoring'];
function getNormalizedNodeData(reqData) {
const node = {};
_.each(nodeFields, function (field) {
let value = Strings.normalizeString(reqData[field]);
if (field === 'mac') {
value = Strings.normalizeMac(value);
}
node[field] = value;
});
return node;
}
const isValidNode = Validator.forConstraints(Constraints.node);
const isValidToken = Validator.forConstraint(Constraints.token);
module.exports = {
create: function (req, res) {
const data = Resources.getData(req);
const node = getNormalizedNodeData(data);
if (!isValidNode(node)) {
return Resources.error(res, {data: 'Invalid node data.', type: ErrorTypes.badRequest});
}
return NodeService.createNode(node, function (err, token, node) {
if (err) {
return Resources.error(res, err);
}
return Resources.success(res, {token: token, node: node});
});
},
update: function (req, res) {
const data = Resources.getData(req);
const token = Strings.normalizeString(data.token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
const node = getNormalizedNodeData(data);
if (!isValidNode(node)) {
return Resources.error(res, {data: 'Invalid node data.', type: ErrorTypes.badRequest});
}
return NodeService.updateNode(token, node, function (err, token, node) {
if (err) {
return Resources.error(res, err);
}
return Resources.success(res, {token: token, node: node});
});
},
delete: function (req, res) {
const data = Resources.getData(req);
const token = Strings.normalizeString(data.token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
return NodeService.deleteNode(token, function (err) {
if (err) {
return Resources.error(res, err);
}
return Resources.success(res, {});
});
},
get: function (req, res) {
const token = Strings.normalizeString(Resources.getData(req).token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
return NodeService.getNodeDataByToken(token, function (err, node) {
if (err) {
return Resources.error(res, err);
}
return Resources.success(res, node);
});
},
getAll: function (req, res) {
Resources.getValidRestParams('list', 'node', req, function (err, restParams) {
if (err) {
return Resources.error(res, err);
}
return NodeService.getAllNodes(function (err, nodes) {
if (err) {
return Resources.error(res, err);
}
const realNodes = _.filter(nodes, function (node) {
// We ignore nodes without tokens as those are only manually added ones like gateways.
return node.token;
});
const macs = _.map(realNodes, function (node) {
return node.mac;
});
MonitoringService.getByMacs(macs, function (err, nodeStateByMac) {
if (err) {
Logger.tag('nodes', 'admin').error('Error getting nodes by MACs:', err);
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
}
const enhancedNodes = _.map(realNodes, function (node) {
const nodeState = nodeStateByMac[node.mac];
if (nodeState) {
return deepExtend({}, node, {
site: nodeState.site,
domain: nodeState.domain,
onlineState: nodeState.state
});
}
return node;
});
const filteredNodes = Resources.filter(
enhancedNodes,
[
'hostname',
'nickname',
'email',
'token',
'mac',
'site',
'domain',
'key',
'onlineState'
],
restParams
);
const total = filteredNodes.length;
const sortedNodes = Resources.sort(
filteredNodes,
[
'hostname',
'nickname',
'email',
'token',
'mac',
'key',
'site',
'domain',
'coords',
'onlineState',
'monitoringState'
],
restParams
);
const pageNodes = Resources.getPageEntities(sortedNodes, restParams);
res.set('X-Total-Count', total);
return Resources.success(res, pageNodes);
});
});
});
}
}

View file

@ -0,0 +1,160 @@
import _ from "lodash";
import deepExtend from "deep-extend";
import Constraints from "../validation/constraints";
import ErrorTypes from "../utils/errorTypes";
import * as MonitoringService from "../services/monitoringService";
import * as NodeService from "../services/nodeService";
import {normalizeMac, normalizeString} from "../utils/strings";
import {forConstraint, forConstraints} from "../validation/validator";
import * as Resources from "../utils/resources";
import {Entity} from "../utils/resources";
import {Request, Response} from "express";
import {Node} from "../types";
const nodeFields = ['hostname', 'key', 'email', 'nickname', 'mac', 'coords', 'monitoring'];
function getNormalizedNodeData(reqData: any): Node {
const node: {[key: string]: any} = {};
_.each(nodeFields, function (field) {
let value = normalizeString(reqData[field]);
if (field === 'mac') {
value = normalizeMac(value);
}
node[field] = value;
});
return node as Node;
}
const isValidNode = forConstraints(Constraints.node, false);
const isValidToken = forConstraint(Constraints.token, false);
export function create (req: Request, res: Response): void {
const data = Resources.getData(req);
const node = getNormalizedNodeData(data);
if (!isValidNode(node)) {
return Resources.error(res, {data: 'Invalid node data.', type: ErrorTypes.badRequest});
}
NodeService.createNode(node)
.then(result => Resources.success(res, result))
.catch(err => Resources.error(res, err));
}
export function update (req: Request, res: Response): void {
const data = Resources.getData(req);
const token = normalizeString(data.token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
const node = getNormalizedNodeData(data);
if (!isValidNode(node)) {
return Resources.error(res, {data: 'Invalid node data.', type: ErrorTypes.badRequest});
}
NodeService.updateNode(token, node)
.then(result => Resources.success(res, result))
.catch(err => Resources.error(res, err));
}
export function remove(req: Request, res: Response): void {
const data = Resources.getData(req);
const token = normalizeString(data.token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
NodeService.deleteNode(token)
.then(() => Resources.success(res, {}))
.catch(err => Resources.error(res, err));
}
export function get(req: Request, res: Response): void {
const token = normalizeString(Resources.getData(req).token);
if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
}
NodeService.getNodeDataByToken(token)
.then(node => Resources.success(res, node))
.catch(err => Resources.error(res, err));
}
async function doGetAll(req: Request): Promise<{ total: number; pageNodes: any }> {
const restParams = await Resources.getValidRestParams('list', 'node', req);
const nodes = await NodeService.getAllNodes();
const realNodes = _.filter(nodes, node =>
// We ignore nodes without tokens as those are only manually added ones like gateways.
!!node.token
);
const macs = _.map(realNodes, (node: Node): string => node.mac);
const nodeStateByMac = await MonitoringService.getByMacs(macs);
const enhancedNodes: Entity[] = _.map(realNodes, (node: Node) => {
const nodeState = nodeStateByMac[node.mac];
if (nodeState) {
return deepExtend({}, node, {
site: nodeState.site,
domain: nodeState.domain,
onlineState: nodeState.state
});
}
return node;
});
const filteredNodes = Resources.filter(
enhancedNodes,
[
'hostname',
'nickname',
'email',
'token',
'mac',
'site',
'domain',
'key',
'onlineState'
],
restParams
);
const total = filteredNodes.length;
const sortedNodes = Resources.sort(
filteredNodes,
[
'hostname',
'nickname',
'email',
'token',
'mac',
'key',
'site',
'domain',
'coords',
'onlineState',
'monitoringState'
],
restParams
);
const pageNodes = Resources.getPageEntities(sortedNodes, restParams);
return {total, pageNodes};
}
export function getAll(req: Request, res: Response): void {
doGetAll(req)
.then((result: {total: number, pageNodes: any[]}) => {
res.set('X-Total-Count', result.total.toString(10));
return Resources.success(res, result.pageNodes);
})
.catch((err: any) => Resources.error(res, err));
}

View file

@ -1,24 +0,0 @@
'use strict';
const ErrorTypes = require('../utils/errorTypes')
const Logger = require('../logger')
const NodeService = require('../services/nodeService')
const Resources = require('../utils/resources')
module.exports = {
get (req, res) {
NodeService.getNodeStatistics((err, nodeStatistics) => {
if (err) {
Logger.tag('statistics').error('Error getting statistics:', err);
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
}
return Resources.success(
res,
{
nodes: nodeStatistics
}
);
});
}
}

View file

@ -0,0 +1,20 @@
import ErrorTypes from "../utils/errorTypes";
import Logger from "../logger";
import {getNodeStatistics} from "../services/nodeService";
import * as Resources from "../utils/resources";
import {Request, Response} from "express";
export function get (req: Request, res: Response): void {
// TODO: Promises and types.
getNodeStatistics()
.then(nodeStatistics => Resources.success(
res,
{
nodes: nodeStatistics
}
))
.catch(err => {
Logger.tag('statistics').error('Error getting statistics:', err);
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
});
}

View file

@ -7,6 +7,7 @@ import {getTasks, Task} from "../jobs/scheduler";
import {normalizeString} from "../utils/strings"; import {normalizeString} from "../utils/strings";
import {forConstraint} from "../validation/validator"; import {forConstraint} from "../validation/validator";
import {Request, Response} from "express"; import {Request, Response} from "express";
import {Entity} from "../utils/resources";
const isValidId = forConstraint(CONSTRAINTS.id, false); const isValidId = forConstraint(CONSTRAINTS.id, false);
@ -71,39 +72,36 @@ function setTaskEnabled(req: Request, res: Response, enable: boolean) {
.catch(err => Resources.error(res, err)) .catch(err => Resources.error(res, err))
} }
async function doGetAll(req: Request): Promise<{total: number, pageTasks: Entity[]}> {
const restParams = await Resources.getValidRestParams('list', null, req);
const tasks = Resources.sort(
_.values(getTasks()),
['id', 'name', 'schedule', 'state', 'runningSince', 'lastRunStarted'],
restParams
);
const filteredTasks = Resources.filter(
tasks,
['id', 'name', 'schedule', 'state'],
restParams
);
const total = filteredTasks.length;
const pageTasks = Resources.getPageEntities(filteredTasks, restParams);
return {
total,
pageTasks,
};
}
export function getAll (req: Request, res: Response): void { export function getAll (req: Request, res: Response): void {
Resources.getValidRestParams('list', null, req, function (err, restParams) { doGetAll(req)
if (err) { .then(({total, pageTasks}) => {
return Resources.error(res, err); res.set('X-Total-Count', total.toString(10));
} Resources.success(res, _.map(pageTasks, toExternalTask));
})
if (!restParams) { .catch(err => Resources.error(res, err));
return Resources.error(
res,
{
data: "Unexpected state: restParams is not set.",
type: ErrorTypes.internalError
}
);
}
const tasks = Resources.sort(
_.values(getTasks()),
['id', 'name', 'schedule', 'state', 'runningSince', 'lastRunStarted'],
restParams
);
const filteredTasks = Resources.filter(
tasks,
['id', 'name', 'schedule', 'state'],
restParams
);
const total = filteredTasks.length;
const pageTasks = Resources.getPageEntities(filteredTasks, restParams);
res.set('X-Total-Count', total.toString(10));
return Resources.success(res, _.map(pageTasks, toExternalTask));
});
} }
export function run (req: Request, res: Response): void { export function run (req: Request, res: Response): void {

View file

@ -4,12 +4,12 @@ import app from "./app"
import {config} from "./config" import {config} from "./config"
import * as VersionResource from "./resources/versionResource" import * as VersionResource from "./resources/versionResource"
import StatisticsResource from "./resources/statisticsResource" import * as StatisticsResource from "./resources/statisticsResource"
import * as FrontendResource from "./resources/frontendResource" import * as FrontendResource from "./resources/frontendResource"
import NodeResource from "./resources/nodeResource" import * as NodeResource from "./resources/nodeResource"
import MonitoringResource from "./resources/monitoringResource" import * as MonitoringResource from "./resources/monitoringResource"
import * as TaskResource from "./resources/taskResource" import * as TaskResource from "./resources/taskResource"
import MailResource from "./resources/mailResource" import * as MailResource from "./resources/mailResource"
export function init (): void { export function init (): void {
const router = express.Router(); const router = express.Router();
@ -20,7 +20,7 @@ export function init (): void {
router.post('/api/node', NodeResource.create); router.post('/api/node', NodeResource.create);
router.put('/api/node/:token', NodeResource.update); router.put('/api/node/:token', NodeResource.update);
router.delete('/api/node/:token', NodeResource.delete); router.delete('/api/node/:token', NodeResource.remove);
router.get('/api/node/:token', NodeResource.get); router.get('/api/node/:token', NodeResource.get);
router.put('/api/monitoring/confirm/:token', MonitoringResource.confirm); router.put('/api/monitoring/confirm/:token', MonitoringResource.confirm);
@ -37,11 +37,11 @@ export function init (): void {
router.get('/internal/api/mails', MailResource.getAll); router.get('/internal/api/mails', MailResource.getAll);
router.get('/internal/api/mails/:id', MailResource.get); router.get('/internal/api/mails/:id', MailResource.get);
router.delete('/internal/api/mails/:id', MailResource.delete); router.delete('/internal/api/mails/:id', MailResource.remove);
router.put('/internal/api/mails/reset/:id', MailResource.resetFailures); router.put('/internal/api/mails/reset/:id', MailResource.resetFailures);
router.put('/internal/api/nodes/:token', NodeResource.update); router.put('/internal/api/nodes/:token', NodeResource.update);
router.delete('/internal/api/nodes/:token', NodeResource.delete); router.delete('/internal/api/nodes/:token', NodeResource.remove);
router.get('/internal/api/nodes', NodeResource.getAll); router.get('/internal/api/nodes', NodeResource.getAll);
router.get('/internal/api/nodes/:token', NodeResource.get); router.get('/internal/api/nodes/:token', NodeResource.get);

View file

@ -1,235 +0,0 @@
'use strict';
const _ = require('lodash')
const async = require('async')
const deepExtend = require('deep-extend')
const moment = require('moment')
const config = require('../config').config
const Database = require('../db/database').db
const Logger = require('../logger')
const MailTemplateService = require('./mailTemplateService')
const Resources = require('../utils/resources')
const MAIL_QUEUE_DB_BATCH_SIZE = 50;
const MAIL_QUEUE_MAX_PARALLEL_SENDING = 3;
const transporter = require('nodemailer').createTransport(deepExtend(
{},
config.server.email.smtp,
{
transport: 'smtp',
pool: true
}
));
MailTemplateService.configureTransporter(transporter);
function sendMail(options, callback) {
Logger
.tag('mail', 'queue')
.info(
'Sending pending mail[%d] of type %s. ' +
'Had %d failures before.',
options.id, options.email, options.failures
);
MailTemplateService.render(options, function (err, renderedTemplate) {
if (err) {
return callback(err);
}
const mailOptions = {
from: options.sender,
to: options.recipient,
subject: renderedTemplate.subject,
html: renderedTemplate.body
};
transporter.sendMail(mailOptions, function (err) {
if (err) {
return callback(err);
}
Logger.tag('mail', 'queue').info('Mail[%d] has been send.', options.id);
callback(null);
});
}
);
}
function findPendingMailsBefore(beforeMoment, limit, callback) {
Database.all(
'SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?',
[beforeMoment.unix(), 5, limit],
function (err, rows) {
if (err) {
return callback(err);
}
let pendingMails;
try {
pendingMails = _.map(rows, function (row) {
return deepExtend(
{},
row,
{
data: JSON.parse(row.data)
}
);
});
}
catch (error) {
return callback(error);
}
callback(null, pendingMails);
}
);
}
function removePendingMailFromQueue(id, callback) {
Database.run('DELETE FROM email_queue WHERE id = ?', [id], callback);
}
function incrementFailureCounterForPendingEmail(id, callback) {
const now = moment();
Database.run(
'UPDATE email_queue SET failures = failures + 1, modified_at = ? WHERE id = ?',
[now.unix(), id],
callback
);
}
function sendPendingMail(pendingMail, callback) {
sendMail(pendingMail, function (err) {
if (err) {
// we only log the error and increment the failure counter as we want to continue with pending mails
Logger.tag('mail', 'queue').error('Error sending pending mail[' + pendingMail.id + ']:', err);
return incrementFailureCounterForPendingEmail(pendingMail.id, function (err) {
if (err) {
return callback(err);
}
return callback(null);
});
}
removePendingMailFromQueue(pendingMail.id, callback);
});
}
function doGetMail(id, callback) {
Database.get('SELECT * FROM email_queue WHERE id = ?', [id], callback);
}
module.exports = {
enqueue (sender, recipient, email, data, callback) {
if (!_.isPlainObject(data)) {
return callback(new Error('Unexpected data: ' + data));
}
Database.run(
'INSERT INTO email_queue ' +
'(failures, sender, recipient, email, data) ' +
'VALUES (?, ?, ?, ?, ?)',
[0, sender, recipient, email, JSON.stringify(data)],
function (err, res) {
callback(err, res);
}
);
},
getMail (id, callback) {
doGetMail(id, callback);
},
getPendingMails (restParams, callback) {
Database.get(
'SELECT count(*) AS total FROM email_queue',
[],
function (err, row) {
if (err) {
return callback(err);
}
const total = row.total;
const filter = Resources.filterClause(
restParams,
'id',
['id', 'failures', 'sender', 'recipient', 'email', 'created_at', 'modified_at'],
['id', 'failures', 'sender', 'recipient', 'email']
);
Database.all(
'SELECT * FROM email_queue WHERE ' + filter.query,
_.concat([], filter.params),
function (err, rows) {
if (err) {
return callback(err);
}
callback(null, rows, total);
}
);
}
);
},
deleteMail (id, callback) {
removePendingMailFromQueue(id, callback);
},
resetFailures (id, callback) {
Database.run(
'UPDATE email_queue SET failures = 0, modified_at = ? WHERE id = ?',
[moment().unix(), id],
function (err) {
if (err) {
return callback(err);
}
if (!this.changes) {
return callback('Error: could not reset failure count for mail: ' + id);
}
doGetMail(id, callback);
}
);
},
sendPendingMails (callback) {
Logger.tag('mail', 'queue').debug('Start sending pending mails...');
const startTime = moment();
const sendNextBatch = function (err) {
if (err) {
return callback(err);
}
Logger.tag('mail', 'queue').debug('Sending next batch...');
findPendingMailsBefore(startTime, MAIL_QUEUE_DB_BATCH_SIZE, function (err, pendingMails) {
if (err) {
return callback(err);
}
if (_.isEmpty(pendingMails)) {
Logger.tag('mail', 'queue').debug('Done sending pending mails.');
return callback(null);
}
async.eachLimit(
pendingMails,
MAIL_QUEUE_MAX_PARALLEL_SENDING,
sendPendingMail,
sendNextBatch
);
});
};
sendNextBatch(null);
}
}

View file

@ -0,0 +1,174 @@
import _ from "lodash";
import deepExtend from "deep-extend";
import moment, {Moment} from "moment";
import {createTransport} from "nodemailer";
import {config} from "../config";
import {db, Statement} from "../db/database";
import Logger from "../logger";
import * as MailTemplateService from "./mailTemplateService";
import * as Resources from "../utils/resources";
import {RestParams} from "../utils/resources";
import {Mail, MailData, MailId, MailType} from "../types";
const MAIL_QUEUE_DB_BATCH_SIZE = 50;
const transporter = createTransport(deepExtend(
{},
config.server.email.smtp,
{
transport: 'smtp',
pool: true
}
));
MailTemplateService.configureTransporter(transporter);
async function sendMail(options: Mail): Promise<void> {
Logger
.tag('mail', 'queue')
.info(
'Sending pending mail[%d] of type %s. ' +
'Had %d failures before.',
options.id, options.email, options.failures
);
const renderedTemplate = await MailTemplateService.render(options);
const mailOptions = {
from: options.sender,
to: options.recipient,
subject: renderedTemplate.subject,
html: renderedTemplate.body
};
await transporter.sendMail(mailOptions);
Logger.tag('mail', 'queue').info('Mail[%d] has been send.', options.id);
}
async function findPendingMailsBefore(beforeMoment: Moment, limit: number): Promise<Mail[]> {
const rows = await db.all(
'SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?',
[beforeMoment.unix(), 5, limit],
);
return _.map(rows, row => deepExtend(
{},
row,
{
data: JSON.parse(row.data)
}
));
}
async function removePendingMailFromQueue(id: MailId): Promise<void> {
await db.run('DELETE FROM email_queue WHERE id = ?', [id]);
}
async function incrementFailureCounterForPendingEmail(id: MailId): Promise<void> {
const now = moment();
await db.run(
'UPDATE email_queue SET failures = failures + 1, modified_at = ? WHERE id = ?',
[now.unix(), id],
);
}
async function sendPendingMail(pendingMail: Mail): Promise<void> {
try {
await sendMail(pendingMail);
}
catch (error) {
// we only log the error and increment the failure counter as we want to continue with pending mails
Logger.tag('mail', 'queue').error('Error sending pending mail[' + pendingMail.id + ']:', error);
await incrementFailureCounterForPendingEmail(pendingMail.id);
return;
}
await removePendingMailFromQueue(pendingMail.id);
}
async function doGetMail(id: MailId): Promise<Mail> {
return await db.get('SELECT * FROM email_queue WHERE id = ?', [id]);
}
export async function enqueue (sender: string, recipient: string, email: MailType, data: MailData): Promise<void> {
if (!_.isPlainObject(data)) {
throw new Error('Unexpected data: ' + data);
}
await db.run(
'INSERT INTO email_queue ' +
'(failures, sender, recipient, email, data) ' +
'VALUES (?, ?, ?, ?, ?)',
[0, sender, recipient, email, JSON.stringify(data)],
);
}
export async function getMail (id: MailId): Promise<Mail> {
return await doGetMail(id);
}
export async function getPendingMails (restParams: RestParams): Promise<{mails: Mail[], total: number}> {
const row = await db.get(
'SELECT count(*) AS total FROM email_queue',
[],
);
const total = row.total;
const filter = Resources.filterClause(
restParams,
'id',
['id', 'failures', 'sender', 'recipient', 'email', 'created_at', 'modified_at'],
['id', 'failures', 'sender', 'recipient', 'email']
);
const mails = await db.all(
'SELECT * FROM email_queue WHERE ' + filter.query,
_.concat([], filter.params),
);
return {
mails,
total
}
}
export async function deleteMail (id: MailId): Promise<void> {
await removePendingMailFromQueue(id);
}
export async function resetFailures (id: MailId): Promise<Mail> {
const statement = await db.run(
'UPDATE email_queue SET failures = 0, modified_at = ? WHERE id = ?',
[moment().unix(), id],
);
if (!statement.changes) {
throw new Error('Error: could not reset failure count for mail: ' + id);
}
return await doGetMail(id);
}
export async function sendPendingMails (): Promise<void> {
Logger.tag('mail', 'queue').debug('Start sending pending mails...');
const startTime = moment();
while (true) {
Logger.tag('mail', 'queue').debug('Sending next batch...');
const pendingMails = await findPendingMailsBefore(startTime, MAIL_QUEUE_DB_BATCH_SIZE);
if (_.isEmpty(pendingMails)) {
Logger.tag('mail', 'queue').debug('Done sending pending mails.');
return;
}
for (const pendingMail of pendingMails) {
await sendPendingMail(pendingMail);
}
}
}

View file

@ -1,122 +0,0 @@
'use strict';
const _ = require('lodash')
const async = require('async')
const deepExtend = require('deep-extend')
const fs = require('graceful-fs')
const moment = require('moment')
const config = require('../config').config
const Logger = require('../logger')
const UrlBuilder = require('../utils/urlBuilder')
const templateBasePath = __dirname + '/../mailTemplates';
const snippetsBasePath = templateBasePath + '/snippets';
const templateFunctions = {};
function renderSnippet(name, data) {
const snippetFile = snippetsBasePath + '/' + name + '.html';
return _.template(fs.readFileSync(snippetFile).toString())(deepExtend(
{},
// jshint -W040
this, // parent data
// jshint +W040
data,
templateFunctions
));
}
function snippet(name) {
return function (data) {
return renderSnippet.bind(this)(name, data);
};
}
function renderLink(href, text) {
return _.template(
'<a href="<%- href %>#" style="color: #E5287A;"><%- text %></a>'
)({
href: href,
text: text || href
});
}
function renderHR() {
return '<hr style="border-top: 1px solid #333333; border-left: 0; border-right: 0; border-bottom: 0;" />';
}
function formatDateTime(unix) {
return moment.unix(unix).locale('de').local().format('DD.MM.YYYY HH:mm');
}
function formatFromNow(unix) {
return moment.unix(unix).locale('de').fromNow();
}
templateFunctions.header = snippet('header');
templateFunctions.footer = snippet('footer');
templateFunctions.monitoringFooter = snippet('monitoring-footer');
templateFunctions.snippet = renderSnippet;
templateFunctions.link = renderLink;
templateFunctions.hr = renderHR;
templateFunctions.formatDateTime = formatDateTime;
templateFunctions.formatFromNow = formatFromNow;
module.exports = {
configureTransporter (transporter) {
const htmlToText = require('nodemailer-html-to-text').htmlToText;
transporter.use('compile', htmlToText({
tables: ['.table']
}));
},
render (mailOptions, callback) {
const templatePathPrefix = templateBasePath + '/' + mailOptions.email;
async.parallel({
subject: _.partial(fs.readFile, templatePathPrefix + '.subject.txt'),
body: _.partial(fs.readFile, templatePathPrefix + '.body.html')
},
function (err, templates) {
if (err) {
return callback(err);
}
const data = deepExtend(
{},
mailOptions.data,
{
community: config.client.community,
editNodeUrl: UrlBuilder.editNodeUrl()
},
templateFunctions
);
function render (field) {
return _.template(templates[field].toString())(data);
}
let renderedTemplate;
try {
renderedTemplate = {
subject: _.trim(render('subject')),
body: render('body')
};
} catch (error) {
Logger
.tag('mail', 'template')
.error('Error rendering template for mail[' + mailOptions.id + ']:', error);
return callback(error);
}
callback(null, renderedTemplate);
}
);
}
}

View file

@ -0,0 +1,103 @@
import _ from "lodash";
import deepExtend from "deep-extend";
import {readFileSync, promises as fs} from "graceful-fs";
import moment from "moment";
import {htmlToText} from "nodemailer-html-to-text";
import {config} from "../config";
import Logger from "../logger";
import {editNodeUrl} from "../utils/urlBuilder";
import {Transporter} from "nodemailer";
import {MailData, Mail} from "../types";
const templateBasePath = __dirname + '/../mailTemplates';
const snippetsBasePath = templateBasePath + '/snippets';
const templateFunctions: {[key: string]: (...data: MailData) => string} = {};
function renderSnippet(this: any, name: string, data: MailData): string {
const snippetFile = snippetsBasePath + '/' + name + '.html';
return _.template(readFileSync(snippetFile).toString())(deepExtend(
{},
this, // parent data
data,
templateFunctions
));
}
function snippet(name: string): ((this: any, data: MailData) => string) {
return function (this: any, data: MailData): string {
return renderSnippet.bind(this)(name, data);
};
}
function renderLink(href: string, text: string): string {
// noinspection HtmlUnknownTarget
return _.template(
'<a href="<%- href %>#" style="color: #E5287A;"><%- text %></a>'
)({
href: href,
text: text || href
});
}
function renderHR(): string {
return '<hr style="border-top: 1px solid #333333; border-left: 0; border-right: 0; border-bottom: 0;" />';
}
function formatDateTime(unix: number): string {
return moment.unix(unix).locale('de').local().format('DD.MM.YYYY HH:mm');
}
function formatFromNow(unix: number): string {
return moment.unix(unix).locale('de').fromNow();
}
templateFunctions.header = snippet('header');
templateFunctions.footer = snippet('footer');
templateFunctions.monitoringFooter = snippet('monitoring-footer');
templateFunctions.snippet = renderSnippet;
templateFunctions.link = renderLink;
templateFunctions.hr = renderHR;
templateFunctions.formatDateTime = formatDateTime;
templateFunctions.formatFromNow = formatFromNow;
export function configureTransporter (transporter: Transporter): void {
transporter.use('compile', htmlToText({
tables: ['.table']
}));
}
export async function render(mailOptions: Mail): Promise<{subject: string, body: string}> {
const templatePathPrefix = templateBasePath + '/' + mailOptions.email;
const subject = await fs.readFile(templatePathPrefix + '.subject.txt');
const body = await fs.readFile(templatePathPrefix + '.body.html');
const data = deepExtend(
{},
mailOptions.data,
{
community: config.client.community,
editNodeUrl: editNodeUrl()
},
templateFunctions
);
try {
return {
subject: _.trim(_.template(subject.toString())(data)),
body: _.template(body.toString())(data)
};
} catch (error) {
Logger
.tag('mail', 'template')
.error('Error rendering template for mail[' + mailOptions.id + ']:', error);
throw error;
}
}

View file

@ -1,826 +0,0 @@
'use strict';
const _ = require('lodash')
const async = require('async')
const moment = require('moment')
const request = require('request')
const config = require('../config').config
const Constraints = require('../validation/constraints')
const Database = require('../db/database').db
const DatabaseUtil = require('../utils/databaseUtil')
const ErrorTypes = require('../utils/errorTypes')
const Logger = require('../logger')
const MailService = require('../services/mailService')
const NodeService = require('../services/nodeService')
const Resources = require('../utils/resources')
const Strings = require('../utils/strings')
const UrlBuilder = require('../utils/urlBuilder')
const Validator = require('../validation/validator')
const MONITORING_STATE_MACS_CHUNK_SIZE = 100;
const MONITORING_MAILS_DB_BATCH_SIZE = 50;
/**
* Defines the intervals emails are sent if a node is offline
*/
const MONITORING_OFFLINE_MAILS_SCHEDULE = {
1: { amount: 3, unit: 'hours' },
2: { amount: 1, unit: 'days' },
3: { amount: 7, unit: 'days' }
};
const DELETE_OFFLINE_NODES_AFTER_DURATION = {
amount: 100,
unit: 'days'
};
let previousImportTimestamp = null;
function insertNodeInformation(nodeData, node, callback) {
Logger
.tag('monitoring', 'information-retrieval')
.debug('Node is new in monitoring, creating data: %s', nodeData.mac);
return Database.run(
'INSERT INTO node_state ' +
'(hostname, mac, site, domain, monitoring_state, state, last_seen, import_timestamp, last_status_mail_sent, last_status_mail_type) ' +
'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[
node.hostname,
node.mac,
nodeData.site,
nodeData.domain,
node.monitoringState,
nodeData.state,
nodeData.lastSeen.unix(),
nodeData.importTimestamp.unix(),
null, // new node so we haven't send a mail yet
null // new node so we haven't send a mail yet
],
callback
);
}
function updateNodeInformation(nodeData, node, row, callback) {
Logger
.tag('monitoring', 'information-retrieval')
.debug('Node is known in monitoring: %s', nodeData.mac);
// jshint -W106
if (!moment(row.import_timestamp).isBefore(nodeData.importTimestamp)) {
// jshint +W106
Logger
.tag('monitoring', 'information-retrieval')
.debug('No new data for node, skipping: %s', nodeData.mac);
return callback();
}
Logger
.tag('monitoring', 'information-retrieval')
.debug('New data for node, updating: %s', nodeData.mac);
return Database.run(
'UPDATE node_state ' +
'SET ' +
'hostname = ?, ' +
'site = ?, ' +
'domain = ?, ' +
'monitoring_state = ?, ' +
'state = ?, ' +
'last_seen = ?, ' +
'import_timestamp = ?, ' +
'modified_at = ? ' +
'WHERE id = ? AND mac = ?',
[
node.hostname,
nodeData.site || row.site,
nodeData.domain || row.domain,
node.monitoringState,
nodeData.state,
nodeData.lastSeen.unix(),
nodeData.importTimestamp.unix(),
moment().unix(),
row.id,
node.mac
],
callback
);
}
function storeNodeInformation(nodeData, node, callback) {
Logger.tag('monitoring', 'information-retrieval').debug('Storing status for node: %s', nodeData.mac);
return Database.get('SELECT * FROM node_state WHERE mac = ?', [node.mac], function (err, row) {
if (err) {
return callback(err);
}
let nodeDataForStoring;
if (nodeData === 'missing') {
nodeDataForStoring = {
mac: node.mac,
site: _.isUndefined(row) ? null : row.site,
domain: _.isUndefined(row) ? null : row.domain,
state: 'OFFLINE',
// jshint -W106
lastSeen: _.isUndefined(row) ? moment() : moment.unix(row.last_seen),
// jshint +W106
importTimestamp: moment()
};
} else {
nodeDataForStoring = nodeData;
}
if (_.isUndefined(row)) {
return insertNodeInformation(nodeDataForStoring, node, callback);
} else {
return updateNodeInformation(nodeDataForStoring, node, row, callback);
}
});
}
const isValidMac = Validator.forConstraint(Constraints.node.mac);
function parseTimestamp (timestamp) {
if (!_.isString(timestamp)) {
return moment.invalid();
}
return moment.utc(timestamp);
}
function parseNode (importTimestamp, nodeData, nodeId) {
if (!_.isPlainObject(nodeData)) {
throw new Error(
'Node ' + nodeId + ': Unexpected node type: ' + (typeof nodeData)
);
}
if (!_.isPlainObject(nodeData.nodeinfo)) {
throw new Error(
'Node ' + nodeId + ': Unexpected nodeinfo type: ' + (typeof nodeData.nodeinfo)
);
}
if (!_.isPlainObject(nodeData.nodeinfo.network)) {
throw new Error(
'Node ' + nodeId + ': Unexpected nodeinfo.network type: ' + (typeof nodeData.nodeinfo.network)
);
}
if (!isValidMac(nodeData.nodeinfo.network.mac)) {
throw new Error(
'Node ' + nodeId + ': Invalid MAC: ' + nodeData.nodeinfo.network.mac
);
}
const mac = Strings.normalizeMac(nodeData.nodeinfo.network.mac);
if (!_.isPlainObject(nodeData.flags)) {
throw new Error(
'Node ' + nodeId + ': Unexpected flags type: ' + (typeof nodeData.flags)
);
}
if (!_.isBoolean(nodeData.flags.online)) {
throw new Error(
'Node ' + nodeId + ': Unexpected flags.online type: ' + (typeof nodeData.flags.online)
);
}
const isOnline = nodeData.flags.online;
const lastSeen = parseTimestamp(nodeData.lastseen);
if (!lastSeen.isValid()) {
throw new Error(
'Node ' + nodeId + ': Invalid lastseen timestamp: ' + nodeData.lastseen
);
}
let site = null;
// jshint -W106
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.site_code)) {
site = nodeData.nodeinfo.system.site_code;
}
// jshint +W106
let domain = null;
// jshint -W106
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.domain_code)) {
domain = nodeData.nodeinfo.system.domain_code;
}
// jshint +W106
return {
mac: mac,
importTimestamp: importTimestamp,
state: isOnline ? 'ONLINE' : 'OFFLINE',
lastSeen: lastSeen,
site: site,
domain: domain
};
}
function parseNodesJson (body, callback) {
Logger.tag('monitoring', 'information-retrieval').debug('Parsing nodes.json...');
const data = {};
try {
const json = JSON.parse(body);
if (json.version !== 1) {
return callback(new Error('Unexpected nodes.json version: ' + json.version));
}
data.importTimestamp = parseTimestamp(json.timestamp);
if (!data.importTimestamp.isValid()) {
return callback(new Error('Invalid timestamp: ' + json.timestamp));
}
if (!_.isPlainObject(json.nodes)) {
return callback(new Error('Invalid nodes object type: ' + (typeof json.nodes)));
}
data.nodes = _.filter(
_.values(
_.map(
json.nodes,
function (nodeData, nodeId) {
try {
return parseNode(data.importTimestamp, nodeData, nodeId);
}
catch (error) {
Logger.tag('monitoring', 'information-retrieval').error(error);
return null;
}
}
)
),
function (node) {
return node !== null;
}
);
}
catch (error) {
return callback(error);
}
callback(null, data);
}
function updateSkippedNode(id, node, callback) {
Database.run(
'UPDATE node_state ' +
'SET hostname = ?, monitoring_state = ?, modified_at = ?' +
'WHERE id = ?',
[
node ? node.hostname : '', node ? node.monitoringState : '', moment().unix(),
id
],
callback
);
}
function sendMonitoringMailsBatched(name, mailType, findBatchFun, callback) {
Logger.tag('monitoring', 'mail-sending').debug('Sending "%s" mails...', name);
const sendNextBatch = function (err) {
if (err) {
return callback(err);
}
Logger.tag('monitoring', 'mail-sending').debug('Sending next batch...');
findBatchFun(function (err, nodeStates) {
if (err) {
return callback(err);
}
if (_.isEmpty(nodeStates)) {
Logger.tag('monitoring', 'mail-sending').debug('Done sending "%s" mails.', name);
return callback(null);
}
async.each(
nodeStates,
function (nodeState, mailCallback) {
const mac = nodeState.mac;
Logger.tag('monitoring', 'mail-sending').debug('Loading node data for: %s', mac);
NodeService.getNodeDataByMac(mac, function (err, node, nodeSecrets) {
if (err) {
Logger
.tag('monitoring', 'mail-sending')
.error('Error sending "' + name + '" mail for node: ' + mac, err);
return mailCallback(err);
}
if (!node) {
Logger
.tag('monitoring', 'mail-sending')
.debug(
'Node not found. Skipping sending of "' + name + '" mail: ' + mac
);
return updateSkippedNode(nodeState.id, {}, mailCallback);
}
if (node.monitoring && node.monitoringConfirmed) {
Logger
.tag('monitoring', 'mail-sending')
.info('Sending "%s" mail for: %s', name, mac);
MailService.enqueue(
config.server.email.from,
node.nickname + ' <' + node.email + '>',
mailType,
{
node: node,
// jshint -W106
lastSeen: nodeState.last_seen,
// jshint +W106
disableUrl: UrlBuilder.monitoringDisableUrl(nodeSecrets)
},
function (err) {
if (err) {
Logger
.tag('monitoring', 'mail-sending')
.error('Error sending "' + name + '" mail for node: ' + mac, err);
return mailCallback(err);
}
Logger
.tag('monitoring', 'mail-sending')
.debug('Updating node state: ', mac);
const now = moment().unix();
Database.run(
'UPDATE node_state ' +
'SET hostname = ?, monitoring_state = ?, modified_at = ?, last_status_mail_sent = ?, last_status_mail_type = ?' +
'WHERE id = ?',
[
node.hostname, node.monitoringState, now, now, mailType,
nodeState.id
],
mailCallback
);
}
);
} else {
Logger
.tag('monitoring', 'mail-sending')
.debug('Monitoring disabled, skipping "%s" mail for: %s', name, mac);
return updateSkippedNode(nodeState.id, {}, mailCallback);
}
});
},
sendNextBatch
);
});
};
sendNextBatch(null);
}
function sendOnlineAgainMails(startTime, callback) {
sendMonitoringMailsBatched(
'online again',
'monitoring-online-again',
function (findBatchCallback) {
Database.all(
'SELECT * FROM node_state ' +
'WHERE modified_at < ? AND state = ? AND last_status_mail_type IN (' +
'\'monitoring-offline-1\', \'monitoring-offline-2\', \'monitoring-offline-3\'' +
')' +
'ORDER BY id ASC LIMIT ?',
[
startTime.unix(),
'ONLINE',
MONITORING_MAILS_DB_BATCH_SIZE
],
findBatchCallback
);
},
callback
);
}
/**
* sends one of three mails if a node is offline
* @param {moment} startTime the moment the job started
* @param {Number} mailNumber which of three mails
* @param {Function} callback gets all nodes that are offline
*/
function sendOfflineMails(startTime, mailNumber, callback) {
sendMonitoringMailsBatched(
'offline ' + mailNumber,
'monitoring-offline-' + mailNumber,
function (findBatchCallback) {
/**
* descriptive string that stores, which was the last mail type, stored in the database as last_status_mail_type
*/
const previousType =
mailNumber === 1 ? 'monitoring-online-again' : ('monitoring-offline-' + (mailNumber - 1));
// the first time the first offline mail is send, there was no mail before
const allowNull = mailNumber === 1 ? ' OR last_status_mail_type IS NULL' : '';
const schedule = MONITORING_OFFLINE_MAILS_SCHEDULE[mailNumber];
const scheduledTimeBefore = moment().subtract(schedule.amount, schedule.unit);
Database.all(
'SELECT * FROM node_state ' +
'WHERE modified_at < ? AND state = ? AND (last_status_mail_type = ?' + allowNull + ') AND ' +
'last_seen <= ? AND (last_status_mail_sent <= ? OR last_status_mail_sent IS NULL) ' +
'ORDER BY id ASC LIMIT ?',
[
startTime.unix(),
'OFFLINE',
previousType,
scheduledTimeBefore.unix(),
scheduledTimeBefore.unix(),
MONITORING_MAILS_DB_BATCH_SIZE
],
findBatchCallback
);
},
callback
);
}
function withUrlsData(urls, callback) {
async.map(urls, function (url, urlCallback) {
Logger.tag('monitoring', 'information-retrieval').debug('Retrieving nodes.json: %s', url);
request(url, function (err, response, body) {
if (err) {
return urlCallback(err);
}
if (response.statusCode !== 200) {
return urlCallback(new Error(
'Could not download nodes.json from ' + url + ': ' +
response.statusCode + ' - ' + response.statusMessage
));
}
parseNodesJson(body, urlCallback);
});
}, callback);
}
function retrieveNodeInformationForUrls(urls, callback) {
withUrlsData(urls, function (err, datas) {
if (err) {
return callback(err);
}
let maxTimestamp = datas[0].importTimestamp;
let minTimestamp = maxTimestamp;
_.each(datas, function (data) {
if (data.importTimestamp.isAfter(maxTimestamp)) {
maxTimestamp = data.importTimestamp;
}
if (data.importTimestamp.isBefore(minTimestamp)) {
minTimestamp = data.importTimestamp;
}
});
if (previousImportTimestamp !== null && !maxTimestamp.isAfter(previousImportTimestamp)) {
Logger
.tag('monitoring', 'information-retrieval')
.debug(
'No new data, skipping. Current timestamp: %s, previous timestamp: %s',
maxTimestamp.format(),
previousImportTimestamp.format()
);
return callback();
}
previousImportTimestamp = maxTimestamp;
// We do not parallelize here as the sqlite will start slowing down and blocking with too many
// parallel queries. This has resulted in blocking other requests too and thus in a major slowdown.
const allNodes = _.flatMap(datas, function (data) {
return data.nodes;
});
// Get rid of duplicates from different nodes.json files. Always use the one with the newest
const sortedNodes = _.orderBy(allNodes, [function (node) {
return node.lastSeen.unix();
}], ['desc']);
const uniqueNodes = _.uniqBy(sortedNodes, function (node) {
return node.mac;
});
async.eachSeries(
uniqueNodes,
function (nodeData, nodeCallback) {
Logger.tag('monitoring', 'information-retrieval').debug('Importing: %s', nodeData.mac);
NodeService.getNodeDataByMac(nodeData.mac, function (err, node) {
if (err) {
Logger
.tag('monitoring', 'information-retrieval')
.error('Error importing: ' + nodeData.mac, err);
return nodeCallback(err);
}
if (!node) {
Logger
.tag('monitoring', 'information-retrieval')
.debug('Unknown node, skipping: %s', nodeData.mac);
return nodeCallback(null);
}
storeNodeInformation(nodeData, node, function (err) {
if (err) {
Logger
.tag('monitoring', 'information-retrieval')
.debug('Could not update / deleting node data: %s', nodeData.mac, err);
return nodeCallback(err);
}
Logger
.tag('monitoring', 'information-retrieval')
.debug('Updating / deleting node data done: %s', nodeData.mac);
nodeCallback();
});
});
},
function (err) {
if (err) {
return callback(err);
}
Logger
.tag('monitoring', 'information-retrieval')
.debug('Marking missing nodes as offline.');
// Mark nodes as offline that haven't been imported in this run.
Database.run(
'UPDATE node_state ' +
'SET state = ?, modified_at = ?' +
'WHERE import_timestamp < ?',
[
'OFFLINE', moment().unix(),
minTimestamp.unix()
],
callback
);
}
);
});
}
module.exports = {
getAll: function (restParams, callback) {
const sortFields = [
'id',
'hostname',
'mac',
'site',
'domain',
'monitoring_state',
'state',
'last_seen',
'import_timestamp',
'last_status_mail_type',
'last_status_mail_sent',
'created_at',
'modified_at'
];
const filterFields = [
'hostname',
'mac',
'monitoring_state',
'state',
'last_status_mail_type'
];
const where = Resources.whereCondition(restParams, filterFields);
Database.get(
'SELECT count(*) AS total FROM node_state WHERE ' + where.query,
_.concat([], where.params),
function (err, row) {
if (err) {
return callback(err);
}
const total = row.total;
const filter = Resources.filterClause(
restParams,
'id',
sortFields,
filterFields
);
Database.all(
'SELECT * FROM node_state WHERE ' + filter.query,
_.concat([], filter.params),
function (err, rows) {
if (err) {
return callback(err);
}
callback(null, rows, total);
}
);
}
);
},
getByMacs: function (macs, callback) {
if (_.isEmpty(macs)) {
return callback(null, {});
}
async.map(
_.chunk(macs, MONITORING_STATE_MACS_CHUNK_SIZE),
function (subMacs, subCallback) {
const inCondition = DatabaseUtil.inCondition('mac', subMacs);
Database.all(
'SELECT * FROM node_state WHERE ' + inCondition.query,
_.concat([], inCondition.params),
subCallback
);
},
function (err, rowsArrays) {
if (err) {
return callback(err);
}
const nodeStateByMac = {};
_.each(_.flatten(rowsArrays), function (row) {
nodeStateByMac[row.mac] = row;
});
return callback(null, nodeStateByMac);
}
);
},
confirm: function (token, callback) {
NodeService.getNodeDataByMonitoringToken(token, function (err, node, nodeSecrets) {
if (err) {
return callback(err);
}
if (!node.monitoring || !nodeSecrets.monitoringToken || nodeSecrets.monitoringToken !== token) {
return callback({data: 'Invalid token.', type: ErrorTypes.badRequest});
}
if (node.monitoringConfirmed) {
return callback(null, node);
}
node.monitoringConfirmed = true;
NodeService.internalUpdateNode(node.token, node, nodeSecrets, function (err, token, node) {
if (err) {
return callback(err);
}
callback(null, node);
});
});
},
disable: function (token, callback) {
NodeService.getNodeDataByMonitoringToken(token, function (err, node, nodeSecrets) {
if (err) {
return callback(err);
}
if (!node.monitoring || !nodeSecrets.monitoringToken || nodeSecrets.monitoringToken !== token) {
return callback({data: 'Invalid token.', type: ErrorTypes.badRequest});
}
node.monitoring = false;
node.monitoringConfirmed = false;
nodeSecrets.monitoringToken = '';
NodeService.internalUpdateNode(node.token, node, nodeSecrets, function (err, token, node) {
if (err) {
return callback(err);
}
callback(null, node);
});
});
},
retrieveNodeInformation: function (callback) {
let urls = config.server.map.nodesJsonUrl;
if (_.isEmpty(urls)) {
return callback(
new Error('No nodes.json-URLs set. Please adjust config.json: server.map.nodesJsonUrl')
);
}
if (_.isString(urls)) {
urls = [urls];
}
retrieveNodeInformationForUrls(urls, callback);
},
sendMonitoringMails: function (callback) {
Logger.tag('monitoring', 'mail-sending').debug('Sending monitoring mails...');
const startTime = moment();
sendOnlineAgainMails(startTime, function (err) {
if (err) {
// only logging an continuing with next type
Logger
.tag('monitoring', 'mail-sending')
.error('Error sending "online again" mails.', err);
}
sendOfflineMails(startTime, 1, function (err) {
if (err) {
// only logging an continuing with next type
Logger
.tag('monitoring', 'mail-sending')
.error('Error sending "offline 1" mails.', err);
}
sendOfflineMails(startTime, 2, function (err) {
if (err) {
// only logging an continuing with next type
Logger
.tag('monitoring', 'mail-sending')
.error('Error sending "offline 2" mails.', err);
}
sendOfflineMails(startTime, 3, function (err) {
if (err) {
// only logging an continuing with next type
Logger
.tag('monitoring', 'mail-sending')
.error('Error sending "offline 3" mails.', err);
}
callback(null);
});
});
});
});
},
deleteOfflineNodes: function (callback) {
Logger
.tag('nodes', 'delete-offline')
.info(
'Deleting offline nodes older than ' +
DELETE_OFFLINE_NODES_AFTER_DURATION.amount + ' ' +
DELETE_OFFLINE_NODES_AFTER_DURATION.unit
);
Database.all(
'SELECT * FROM node_state WHERE state = ? AND last_seen < ?',
[
'OFFLINE',
moment().subtract(
DELETE_OFFLINE_NODES_AFTER_DURATION.amount,
DELETE_OFFLINE_NODES_AFTER_DURATION.unit
).unix()
],
function (err, rows) {
async.eachSeries(
rows,
function (row, nodeCallback) {
const mac = row.mac;
Logger.tag('nodes', 'delete-offline').info('Deleting node ' + mac);
NodeService.getNodeDataByMac(mac, function (err, node) {
if (err) {
Logger.tag('nodes', 'delete-offline').error('Error getting node ' + mac, err);
return nodeCallback(err);
}
async.seq(
function (callback) {
if (node && node.token) {
// If the node has no token it is a special node (e.g. a gateway)
// we need to skip.
return NodeService.deleteNode(node.token, callback);
}
return callback(null);
},
function (callback) {
Database.run(
'DELETE FROM node_state WHERE mac = ? AND state = ?',
[mac, 'OFFLINE'],
callback
);
}
)(function (err) {
if (err) {
Logger.tag('nodes', 'delete-offline').error('Error deleting node ' + mac, err);
return nodeCallback(err);
}
nodeCallback(null);
});
});
},
callback
);
}
);
}
}

View file

@ -0,0 +1,687 @@
import _ from "lodash";
import moment, {Moment, unitOfTime} from "moment";
import request from "request";
import {config} from "../config";
import {db, Statement} from "../db/database";
import * as DatabaseUtil from "../utils/databaseUtil";
import ErrorTypes from "../utils/errorTypes";
import Logger from "../logger";
import * as MailService from "../services/mailService";
import * as NodeService from "../services/nodeService";
import * as Resources from "../utils/resources";
import {RestParams} from "../utils/resources";
import {normalizeMac} from "../utils/strings";
import {monitoringDisableUrl} from "../utils/urlBuilder";
import CONSTRAINTS from "../validation/constraints";
import {forConstraint} from "../validation/validator";
import {MailType, Node, NodeId, NodeState, NodeStateData} from "../types";
const MONITORING_STATE_MACS_CHUNK_SIZE = 100;
const MONITORING_MAILS_DB_BATCH_SIZE = 50;
/**
* Defines the intervals emails are sent if a node is offline
*/
const MONITORING_OFFLINE_MAILS_SCHEDULE: {[key: number]: {amount: number, unit: unitOfTime.DurationConstructor}} = {
1: { amount: 3, unit: 'hours' },
2: { amount: 1, unit: 'days' },
3: { amount: 7, unit: 'days' }
};
const DELETE_OFFLINE_NODES_AFTER_DURATION: {amount: number, unit: unitOfTime.DurationConstructor} = {
amount: 100,
unit: 'days'
};
type ParsedNode = {
mac: string,
importTimestamp: Moment,
state: NodeState,
lastSeen: Moment,
site: string,
domain: string,
};
type NodesParsingResult = {
importTimestamp: Moment,
nodes: ParsedNode[],
}
let previousImportTimestamp: Moment | null = null;
async function insertNodeInformation(nodeData: ParsedNode, node: Node): Promise<void> {
Logger
.tag('monitoring', 'information-retrieval')
.debug('Node is new in monitoring, creating data: %s', nodeData.mac);
await db.run(
'INSERT INTO node_state ' +
'(hostname, mac, site, domain, monitoring_state, state, last_seen, import_timestamp, last_status_mail_sent, last_status_mail_type) ' +
'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[
node.hostname,
node.mac,
nodeData.site,
nodeData.domain,
node.monitoringState,
nodeData.state,
nodeData.lastSeen.unix(),
nodeData.importTimestamp.unix(),
null, // new node so we haven't send a mail yet
null // new node so we haven't send a mail yet
]
);
}
async function updateNodeInformation(nodeData: ParsedNode, node: Node, row: any): Promise<void> {
Logger
.tag('monitoring', 'informacallbacktion-retrieval')
.debug('Node is known in monitoring: %s', nodeData.mac);
if (!moment(row.import_timestamp).isBefore(nodeData.importTimestamp)) {
Logger
.tag('monitoring', 'information-retrieval')
.debug('No new data for node, skipping: %s', nodeData.mac);
return;
}
Logger
.tag('monitoring', 'information-retrieval')
.debug('New data for node, updating: %s', nodeData.mac);
await db.run(
'UPDATE node_state ' +
'SET ' +
'hostname = ?, ' +
'site = ?, ' +
'domain = ?, ' +
'monitoring_state = ?, ' +
'state = ?, ' +
'last_seen = ?, ' +
'import_timestamp = ?, ' +
'modified_at = ? ' +
'WHERE id = ? AND mac = ?',
[
node.hostname,
nodeData.site || row.site,
nodeData.domain || row.domain,
node.monitoringState,
nodeData.state,
nodeData.lastSeen.unix(),
nodeData.importTimestamp.unix(),
moment().unix(),
row.id,
node.mac
]
);
}
async function storeNodeInformation(nodeData: ParsedNode, node: Node): Promise<void> {
Logger.tag('monitoring', 'information-retrieval').debug('Storing status for node: %s', nodeData.mac);
const row = await db.get('SELECT * FROM node_state WHERE mac = ?', [node.mac]);
if (_.isUndefined(row)) {
return await insertNodeInformation(nodeData, node);
} else {
return await updateNodeInformation(nodeData, node, row);
}
}
const isValidMac = forConstraint(CONSTRAINTS.node.mac, false);
function parseTimestamp(timestamp: any): Moment {
if (!_.isString(timestamp)) {
return moment.invalid();
}
return moment.utc(timestamp);
}
function parseNode(importTimestamp: Moment, nodeData: any, nodeId: NodeId): ParsedNode {
if (!_.isPlainObject(nodeData)) {
throw new Error(
'Node ' + nodeId + ': Unexpected node type: ' + (typeof nodeData)
);
}
if (!_.isPlainObject(nodeData.nodeinfo)) {
throw new Error(
'Node ' + nodeId + ': Unexpected nodeinfo type: ' + (typeof nodeData.nodeinfo)
);
}
if (!_.isPlainObject(nodeData.nodeinfo.network)) {
throw new Error(
'Node ' + nodeId + ': Unexpected nodeinfo.network type: ' + (typeof nodeData.nodeinfo.network)
);
}
if (!isValidMac(nodeData.nodeinfo.network.mac)) {
throw new Error(
'Node ' + nodeId + ': Invalid MAC: ' + nodeData.nodeinfo.network.mac
);
}
const mac = normalizeMac(nodeData.nodeinfo.network.mac);
if (!_.isPlainObject(nodeData.flags)) {
throw new Error(
'Node ' + nodeId + ': Unexpected flags type: ' + (typeof nodeData.flags)
);
}
if (!_.isBoolean(nodeData.flags.online)) {
throw new Error(
'Node ' + nodeId + ': Unexpected flags.online type: ' + (typeof nodeData.flags.online)
);
}
const isOnline = nodeData.flags.online;
const lastSeen = parseTimestamp(nodeData.lastseen);
if (!lastSeen.isValid()) {
throw new Error(
'Node ' + nodeId + ': Invalid lastseen timestamp: ' + nodeData.lastseen
);
}
let site = null;
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.site_code)) {
site = nodeData.nodeinfo.system.site_code;
}
let domain = null;
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.domain_code)) {
domain = nodeData.nodeinfo.system.domain_code;
}
return {
mac: mac,
importTimestamp: importTimestamp,
state: isOnline ? NodeState.ONLINE : NodeState.OFFLINE,
lastSeen: lastSeen,
site: site,
domain: domain
};
}
function parseNodesJson (body: string): NodesParsingResult {
Logger.tag('monitoring', 'information-retrieval').debug('Parsing nodes.json...');
const data: {[key: string]: any} = {};
const json = JSON.parse(body);
if (json.version !== 1) {
throw new Error('Unexpected nodes.json version: ' + json.version);
}
data.importTimestamp = parseTimestamp(json.timestamp);
if (!data.importTimestamp.isValid()) {
throw new Error('Invalid timestamp: ' + json.timestamp);
}
if (!_.isPlainObject(json.nodes)) {
throw new Error('Invalid nodes object type: ' + (typeof json.nodes));
}
data.nodes = _.filter(
_.values(
_.map(
json.nodes,
function (nodeData, nodeId) {
try {
return parseNode(data.importTimestamp, nodeData, nodeId);
}
catch (error) {
Logger.tag('monitoring', 'information-retrieval').error(error);
return null;
}
}
)
),
function (node) {
return node !== null;
}
);
return data as NodesParsingResult;
}
async function updateSkippedNode(id: NodeId, node?: Node): Promise<Statement> {
return await db.run(
'UPDATE node_state ' +
'SET hostname = ?, monitoring_state = ?, modified_at = ?' +
'WHERE id = ?',
[
node ? node.hostname : '', node ? node.monitoringState : '', moment().unix(),
id
]
);
}
async function sendMonitoringMailsBatched(
name: string,
mailType: MailType,
findBatchFun: () => Promise<any[]>,
): Promise<void> {
Logger.tag('monitoring', 'mail-sending').debug('Sending "%s" mails...', name);
while (true) {
Logger.tag('monitoring', 'mail-sending').debug('Sending next batch...');
const nodeStates = await findBatchFun();
if (_.isEmpty(nodeStates)) {
Logger.tag('monitoring', 'mail-sending').debug('Done sending "%s" mails.', name);
return;
}
for (const nodeState of nodeStates) {
const mac = nodeState.mac;
Logger.tag('monitoring', 'mail-sending').debug('Loading node data for: %s', mac);
const result = await NodeService.getNodeDataByMac(mac);
if (!result) {
Logger
.tag('monitoring', 'mail-sending')
.debug(
'Node not found. Skipping sending of "' + name + '" mail: ' + mac
);
await updateSkippedNode(nodeState.id);
continue;
}
const {node, nodeSecrets} = result;
if (!(node.monitoring && node.monitoringConfirmed)) {
Logger
.tag('monitoring', 'mail-sending')
.debug('Monitoring disabled, skipping "%s" mail for: %s', name, mac);
await updateSkippedNode(nodeState.id);
continue;
}
const monitoringToken = nodeSecrets.monitoringToken;
if (!monitoringToken) {
Logger
.tag('monitoring', 'mail-sending')
.error('Node has no monitoring token. Cannot send mail "%s" for: %s', name, mac);
await updateSkippedNode(nodeState.id);
continue;
}
Logger
.tag('monitoring', 'mail-sending')
.info('Sending "%s" mail for: %s', name, mac);
await MailService.enqueue(
config.server.email.from,
node.nickname + ' <' + node.email + '>',
mailType,
{
node: node,
lastSeen: nodeState.last_seen,
disableUrl: monitoringDisableUrl(monitoringToken)
}
);
Logger
.tag('monitoring', 'mail-sending')
.debug('Updating node state: ', mac);
const now = moment().unix();
await db.run(
'UPDATE node_state ' +
'SET hostname = ?, monitoring_state = ?, modified_at = ?, last_status_mail_sent = ?, last_status_mail_type = ?' +
'WHERE id = ?',
[
node.hostname, node.monitoringState, now, now, mailType,
nodeState.id
]
);
}
}
}
async function sendOnlineAgainMails(startTime: Moment): Promise<void> {
await sendMonitoringMailsBatched(
'online again',
'monitoring-online-again',
async (): Promise<any[]> => await db.all(
'SELECT * FROM node_state ' +
'WHERE modified_at < ? AND state = ? AND last_status_mail_type IN (' +
'\'monitoring-offline-1\', \'monitoring-offline-2\', \'monitoring-offline-3\'' +
')' +
'ORDER BY id ASC LIMIT ?',
[
startTime.unix(),
'ONLINE',
MONITORING_MAILS_DB_BATCH_SIZE
],
),
);
}
async function sendOfflineMails(startTime: Moment, mailNumber: number): Promise<void> {
await sendMonitoringMailsBatched(
'offline ' + mailNumber,
'monitoring-offline-' + mailNumber,
async (): Promise<any[]> => {
const previousType =
mailNumber === 1 ? 'monitoring-online-again' : ('monitoring-offline-' + (mailNumber - 1));
// the first time the first offline mail is send, there was no mail before
const allowNull = mailNumber === 1 ? ' OR last_status_mail_type IS NULL' : '';
const schedule = MONITORING_OFFLINE_MAILS_SCHEDULE[mailNumber];
const scheduledTimeBefore = moment().subtract(schedule.amount, schedule.unit);
return await db.all(
'SELECT * FROM node_state ' +
'WHERE modified_at < ? AND state = ? AND (last_status_mail_type = ?' + allowNull + ') AND ' +
'last_seen <= ? AND (last_status_mail_sent <= ? OR last_status_mail_sent IS NULL) ' +
'ORDER BY id ASC LIMIT ?',
[
startTime.unix(),
'OFFLINE',
previousType,
scheduledTimeBefore.unix(),
scheduledTimeBefore.unix(),
MONITORING_MAILS_DB_BATCH_SIZE
],
);
},
);
}
function doRequest(url: string): Promise<{response: request.Response, body: string}> {
return new Promise<{response: request.Response, body: string}>((resolve, reject) => {
request(url, function (err, response, body) {
if (err) {
return reject(err);
}
resolve({response, body});
});
});
}
async function withUrlsData(urls: string[]): Promise<NodesParsingResult[]> {
const results: NodesParsingResult[] = [];
for (const url of urls) {
Logger.tag('monitoring', 'information-retrieval').debug('Retrieving nodes.json: %s', url);
const {response, body} = await doRequest(url);
if (response.statusCode !== 200) {
throw new Error(
'Could not download nodes.json from ' + url + ': ' +
response.statusCode + ' - ' + response.statusMessage
);
}
results.push(await parseNodesJson(body));
}
return results;
}
async function retrieveNodeInformationForUrls(urls: string[]): Promise<void> {
const datas = await withUrlsData(urls);
let maxTimestamp = datas[0].importTimestamp;
let minTimestamp = maxTimestamp;
for (const data of datas) {
if (data.importTimestamp.isAfter(maxTimestamp)) {
maxTimestamp = data.importTimestamp;
}
if (data.importTimestamp.isBefore(minTimestamp)) {
minTimestamp = data.importTimestamp;
}
}
if (previousImportTimestamp !== null && !maxTimestamp.isAfter(previousImportTimestamp)) {
Logger
.tag('monitoring', 'information-retrieval')
.debug(
'No new data, skipping. Current timestamp: %s, previous timestamp: %s',
maxTimestamp.format(),
previousImportTimestamp.format()
);
return;
}
previousImportTimestamp = maxTimestamp;
// We do not parallelize here as the sqlite will start slowing down and blocking with too many
// parallel queries. This has resulted in blocking other requests too and thus in a major slowdown.
const allNodes = _.flatMap(datas, data => data.nodes);
// Get rid of duplicates from different nodes.json files. Always use the one with the newest
const sortedNodes = _.orderBy(allNodes, [node => node.lastSeen.unix()], ['desc']);
const uniqueNodes = _.uniqBy(sortedNodes, function (node) {
return node.mac;
});
for (const nodeData of uniqueNodes) {
Logger.tag('monitoring', 'information-retrieval').debug('Importing: %s', nodeData.mac);
const result = await NodeService.getNodeDataByMac(nodeData.mac);
if (!result) {
Logger
.tag('monitoring', 'information-retrieval')
.debug('Unknown node, skipping: %s', nodeData.mac);
continue;
}
await storeNodeInformation(nodeData, result.node);
Logger
.tag('monitoring', 'information-retrieval')
.debug('Updating / deleting node data done: %s', nodeData.mac);
}
Logger
.tag('monitoring', 'information-retrieval')
.debug('Marking missing nodes as offline.');
// Mark nodes as offline that haven't been imported in this run.
await db.run(
'UPDATE node_state ' +
'SET state = ?, modified_at = ?' +
'WHERE import_timestamp < ?',
[
NodeState.OFFLINE, moment().unix(),
minTimestamp.unix()
]
);
}
export async function getAll(restParams: RestParams): Promise<{total: number, monitoringStates: any[]}> {
const sortFields = [
'id',
'hostname',
'mac',
'site',
'domain',
'monitoring_state',
'state',
'last_seen',
'import_timestamp',
'last_status_mail_type',
'last_status_mail_sent',
'created_at',
'modified_at'
];
const filterFields = [
'hostname',
'mac',
'monitoring_state',
'state',
'last_status_mail_type'
];
const where = Resources.whereCondition(restParams, filterFields);
const row = await db.get(
'SELECT count(*) AS total FROM node_state WHERE ' + where.query,
_.concat([], where.params),
);
const total = row.total;
const filter = Resources.filterClause(
restParams,
'id',
sortFields,
filterFields
);
const monitoringStates = await db.all(
'SELECT * FROM node_state WHERE ' + filter.query,
_.concat([], filter.params),
);
return {monitoringStates, total};
}
export async function getByMacs(macs: string[]): Promise<{[key: string]: NodeStateData}> {
if (_.isEmpty(macs)) {
return {};
}
const nodeStateByMac: {[key: string]: NodeStateData} = {};
for (const subMacs of _.chunk(macs, MONITORING_STATE_MACS_CHUNK_SIZE)) {
const inCondition = DatabaseUtil.inCondition('mac', subMacs);
const rows = await db.all(
'SELECT * FROM node_state WHERE ' + inCondition.query,
_.concat([], inCondition.params),
);
for (const row of rows) {
nodeStateByMac[row.mac] = row;
}
}
return nodeStateByMac;
}
export async function confirm(token: string): Promise<Node> {
const {node, nodeSecrets} = await NodeService.getNodeDataByMonitoringToken(token);
if (!node.monitoring || !nodeSecrets.monitoringToken || nodeSecrets.monitoringToken !== token) {
throw {data: 'Invalid token.', type: ErrorTypes.badRequest};
}
if (node.monitoringConfirmed) {
return node;
}
node.monitoringConfirmed = true;
const {node: newNode} = await NodeService.internalUpdateNode(node.token, node, nodeSecrets);
return newNode;
}
export async function disable(token: string): Promise<Node> {
const {node, nodeSecrets} = await NodeService.getNodeDataByMonitoringToken(token);
if (!node.monitoring || !nodeSecrets.monitoringToken || nodeSecrets.monitoringToken !== token) {
throw {data: 'Invalid token.', type: ErrorTypes.badRequest};
}
node.monitoring = false;
node.monitoringConfirmed = false;
nodeSecrets.monitoringToken = '';
const {node: newNode} = await NodeService.internalUpdateNode(node.token, node, nodeSecrets);
return newNode;
}
export async function retrieveNodeInformation(): Promise<void> {
const urls = config.server.map.nodesJsonUrl;
if (_.isEmpty(urls)) {
throw new Error('No nodes.json-URLs set. Please adjust config.json: server.map.nodesJsonUrl')
}
return await retrieveNodeInformationForUrls(urls);
}
export async function sendMonitoringMails(): Promise<void> {
Logger.tag('monitoring', 'mail-sending').debug('Sending monitoring mails...');
const startTime = moment();
try {
await sendOnlineAgainMails(startTime);
}
catch (error) {
// only logging an continuing with next type
Logger
.tag('monitoring', 'mail-sending')
.error('Error sending "online again" mails.', error);
}
for (let mailNumber = 1; mailNumber <= 3; mailNumber++) {
try {
await sendOfflineMails(startTime, mailNumber);
}
catch (error) {
// only logging an continuing with next type
Logger
.tag('monitoring', 'mail-sending')
.error('Error sending "offline ' + mailNumber + '" mails.', error);
}
}
}
export async function deleteOfflineNodes(): Promise<void> {
Logger
.tag('nodes', 'delete-offline')
.info(
'Deleting offline nodes older than ' +
DELETE_OFFLINE_NODES_AFTER_DURATION.amount + ' ' +
DELETE_OFFLINE_NODES_AFTER_DURATION.unit
);
const rows = await db.all(
'SELECT * FROM node_state WHERE state = ? AND last_seen < ?',
[
'OFFLINE',
moment().subtract(
DELETE_OFFLINE_NODES_AFTER_DURATION.amount,
DELETE_OFFLINE_NODES_AFTER_DURATION.unit
).unix()
],
);
for (const row of rows) {
const mac = row.mac;
Logger.tag('nodes', 'delete-offline').info('Deleting node ' + mac);
let node;
try {
const result = await NodeService.getNodeDataByMac(mac);
node = result && result.node;
}
catch (error) {
// Only log error. We try to delete the nodes state anyways.
Logger.tag('nodes', 'delete-offline').error('Could not find node to delete: ' + mac, error);
}
if (node && node.token) {
await NodeService.deleteNode(node.token);
}
try {
await db.run(
'DELETE FROM node_state WHERE mac = ? AND state = ?',
[mac, 'OFFLINE'],
);
}
catch (error) {
// Only log error and continue with next node.
Logger.tag('nodes', 'delete-offline').error('Could not delete node state: ' + mac, error);
}
}
}

View file

@ -1,532 +0,0 @@
'use strict';
const _ = require('lodash')
const async = require('async')
const crypto = require('crypto')
const fs = require('graceful-fs')
const glob = require('glob')
const config = require('../config').config
const ErrorTypes = require('../utils/errorTypes')
const Logger = require('../logger')
const MailService = require('../services/mailService')
const Strings = require('../utils/strings')
const UrlBuilder = require('../utils/urlBuilder')
const MAX_PARALLEL_NODES_PARSING = 10;
const linePrefixes = {
hostname: '# Knotenname: ',
nickname: '# Ansprechpartner: ',
email: '# Kontakt: ',
coords: '# Koordinaten: ',
mac: '# MAC: ',
token: '# Token: ',
monitoring: '# Monitoring: ',
monitoringToken: '# Monitoring-Token: '
};
const filenameParts = ['hostname', 'mac', 'key', 'token', 'monitoringToken'];
function generateToken() {
return crypto.randomBytes(8).toString('hex');
}
function toNodeFilesPattern(filter) {
const pattern = _.join(
_.map(filenameParts, function (field) {
return filter.hasOwnProperty(field) ? filter[field] : '*';
}),
'@'
);
return config.server.peersPath + '/' + pattern.toLowerCase();
}
function findNodeFiles(filter, callback) {
glob(toNodeFilesPattern(filter), callback);
}
function findNodeFilesSync(filter) {
return glob.sync(toNodeFilesPattern(filter));
}
function findFilesInPeersPath(callback) {
glob(config.server.peersPath + '/*', function (err, files) {
if (err) {
return callback(err);
}
async.filter(files, function (file, fileCallback) {
if (file[0] === '.') {
return fileCallback(null, false);
}
fs.lstat(file, function (err, stats) {
if (err) {
return fileCallback(err);
}
fileCallback(null, stats.isFile());
});
}, callback);
});
}
function parseNodeFilename(filename) {
const parts = _.split(filename, '@', filenameParts.length);
const parsed = {};
_.each(_.zip(filenameParts, parts), function (part) {
parsed[part[0]] = part[1];
});
return parsed;
}
function isDuplicate(filter, token) {
const files = findNodeFilesSync(filter);
if (files.length === 0) {
return false;
}
if (files.length > 1 || !token /* node is being created*/) {
return true;
}
return parseNodeFilename(files[0]).token !== token;
}
function checkNoDuplicates(token, node, nodeSecrets) {
if (isDuplicate({ hostname: node.hostname }, token)) {
return {data: {msg: 'Already exists.', field: 'hostname'}, type: ErrorTypes.conflict};
}
if (node.key) {
if (isDuplicate({ key: node.key }, token)) {
return {data: {msg: 'Already exists.', field: 'key'}, type: ErrorTypes.conflict};
}
}
if (isDuplicate({ mac: node.mac }, token)) {
return {data: {msg: 'Already exists.', field: 'mac'}, type: ErrorTypes.conflict};
}
if (nodeSecrets.monitoringToken && isDuplicate({ monitoringToken: nodeSecrets.monitoringToken }, token)) {
return {data: {msg: 'Already exists.', field: 'monitoringToken'}, type: ErrorTypes.conflict};
}
return null;
}
function toNodeFilename(token, node, nodeSecrets) {
return config.server.peersPath + '/' +
(
(node.hostname || '') + '@' +
(node.mac || '') + '@' +
(node.key || '') + '@' +
(token || '') + '@' +
(nodeSecrets.monitoringToken || '')
).toLowerCase();
}
function writeNodeFile(isUpdate, token, node, nodeSecrets, callback) {
const filename = toNodeFilename(token, node, nodeSecrets);
let data = '';
_.each(linePrefixes, function (prefix, key) {
let value;
switch (key) {
case 'monitoring':
if (node.monitoring && node.monitoringConfirmed) {
value = 'aktiv';
} else if (node.monitoring && !node.monitoringConfirmed) {
value = 'pending';
} else {
value = '';
}
break;
case 'monitoringToken':
value = nodeSecrets.monitoringToken || '';
break;
default:
value = key === 'token' ? token : node[key];
if (_.isUndefined(value)) {
value = _.isUndefined(nodeSecrets[key]) ? '' : nodeSecrets[key];
}
break;
}
data += prefix + value + '\n';
});
if (node.key) {
data += 'key "' + node.key + '";\n';
}
// since node.js is single threaded we don't need a lock
let error;
if (isUpdate) {
const files = findNodeFilesSync({ token: token });
if (files.length !== 1) {
return callback({data: 'Node not found.', type: ErrorTypes.notFound});
}
error = checkNoDuplicates(token, node, nodeSecrets);
if (error) {
return callback(error);
}
const file = files[0];
try {
fs.unlinkSync(file);
}
catch (error) {
Logger.tag('node', 'save').error('Could not delete old node file: ' + file, error);
return callback({data: 'Could not remove old node data.', type: ErrorTypes.internalError});
}
} else {
error = checkNoDuplicates(null, node, nodeSecrets);
if (error) {
return callback(error);
}
}
try {
fs.writeFileSync(filename, data, 'utf8');
}
catch (error) {
Logger.tag('node', 'save').error('Could not write node file: ' + filename, error);
return callback({data: 'Could not write node data.', type: ErrorTypes.internalError});
}
return callback(null, token, node);
}
function deleteNodeFile(token, callback) {
findNodeFiles({ token: token }, function (err, files) {
if (err) {
Logger.tag('node', 'delete').error('Could not find node file: ' + files, err);
return callback({data: 'Could not delete node.', type: ErrorTypes.internalError});
}
if (files.length !== 1) {
return callback({data: 'Node not found.', type: ErrorTypes.notFound});
}
try {
fs.unlinkSync(files[0]);
}
catch (error) {
Logger.tag('node', 'delete').error('Could not delete node file: ' + files, error);
return callback({data: 'Could not delete node.', type: ErrorTypes.internalError});
}
return callback(null);
});
}
function parseNodeFile(file, callback) {
fs.readFile(file, function (err, contents) {
if (err) {
return callback(err);
}
const lines = contents.toString();
const node = {};
const nodeSecrets = {};
_.each(lines.split('\n'), function (line) {
const entries = {};
for (const key in linePrefixes) {
if (linePrefixes.hasOwnProperty(key)) {
const prefix = linePrefixes[key];
if (line.substring(0, prefix.length) === prefix) {
entries[key] = Strings.normalizeString(line.substr(prefix.length));
break;
}
}
}
if (_.isEmpty(entries) && line.substring(0, 5) === 'key "') {
entries.key = Strings.normalizeString(line.split('"')[1]);
}
_.each(entries, function (value, key) {
if (key === 'mac') {
node.mac = value;
node.mapId = _.toLower(value).replace(/:/g, '');
} else if (key === 'monitoring') {
const active = value === 'aktiv';
const pending = value === 'pending';
node.monitoring = active || pending;
node.monitoringConfirmed = active;
node.monitoringState = active ? 'active' : (pending ? 'pending' : 'disabled');
} else if (key === 'monitoringToken') {
nodeSecrets.monitoringToken = value;
} else {
node[key] = value;
}
});
});
callback(null, node, nodeSecrets);
});
}
function findNodeDataByFilePattern(filter, callback) {
findNodeFiles(filter, function (err, files) {
if (err) {
return callback(err);
}
if (files.length !== 1) {
return callback(null);
}
const file = files[0];
return parseNodeFile(file, callback);
});
}
function getNodeDataByFilePattern(filter, callback) {
findNodeDataByFilePattern(filter, function (err, node, nodeSecrets) {
if (err) {
return callback(err);
}
if (!node) {
return callback({data: 'Node not found.', type: ErrorTypes.notFound});
}
callback(null, node, nodeSecrets);
});
}
function sendMonitoringConfirmationMail(node, nodeSecrets, callback) {
const confirmUrl = UrlBuilder.monitoringConfirmUrl(nodeSecrets);
const disableUrl = UrlBuilder.monitoringDisableUrl(nodeSecrets);
MailService.enqueue(
config.server.email.from,
node.nickname + ' <' + node.email + '>',
'monitoring-confirmation',
{
node: node,
confirmUrl: confirmUrl,
disableUrl: disableUrl
},
function (err) {
if (err) {
Logger.tag('monitoring', 'confirmation').error('Could not enqueue confirmation mail.', err);
return callback({data: 'Internal error.', type: ErrorTypes.internalError});
}
callback(null);
}
);
}
module.exports = {
createNode: function (node, callback) {
const token = generateToken();
const nodeSecrets = {};
node.monitoringConfirmed = false;
if (node.monitoring) {
nodeSecrets.monitoringToken = generateToken();
}
writeNodeFile(false, token, node, nodeSecrets, function (err, token, node) {
if (err) {
return callback(err);
}
if (node.monitoring && !node.monitoringConfirmed) {
return sendMonitoringConfirmationMail(node, nodeSecrets, function (err) {
if (err) {
return callback(err);
}
return callback(null, token, node);
});
}
return callback(null, token, node);
});
},
updateNode: function (token, node, callback) {
this.getNodeDataByToken(token, function (err, currentNode, nodeSecrets) {
if (err) {
return callback(err);
}
let monitoringConfirmed = false;
let monitoringToken = '';
if (node.monitoring) {
if (!currentNode.monitoring) {
// monitoring just has been enabled
monitoringConfirmed = false;
monitoringToken = generateToken();
} else {
// monitoring is still enabled
if (currentNode.email !== node.email) {
// new email so we need a new token and a reconfirmation
monitoringConfirmed = false;
monitoringToken = generateToken();
} else {
// email unchanged, keep token (fix if not set) and confirmation state
monitoringConfirmed = currentNode.monitoringConfirmed;
monitoringToken = nodeSecrets.monitoringToken || generateToken();
}
}
}
node.monitoringConfirmed = monitoringConfirmed;
nodeSecrets.monitoringToken = monitoringToken;
writeNodeFile(true, token, node, nodeSecrets, function (err, token, node) {
if (err) {
return callback(err);
}
if (node.monitoring && !node.monitoringConfirmed) {
return sendMonitoringConfirmationMail(node, nodeSecrets, function (err) {
if (err) {
return callback(err);
}
return callback(null, token, node);
});
}
return callback(null, token, node);
});
});
},
internalUpdateNode: function (token, node, nodeSecrets, callback) {
writeNodeFile(true, token, node, nodeSecrets, callback);
},
deleteNode: function (token, callback) {
deleteNodeFile(token, callback);
},
getAllNodes: function (callback) {
findNodeFiles({}, function (err, files) {
if (err) {
Logger.tag('nodes').error('Error getting all nodes:', err);
return callback({data: 'Internal error.', type: ErrorTypes.internalError});
}
async.mapLimit(
files,
MAX_PARALLEL_NODES_PARSING,
parseNodeFile,
function (err, nodes) {
if (err) {
Logger.tag('nodes').error('Error getting all nodes:', err);
return callback({data: 'Internal error.', type: ErrorTypes.internalError});
}
return callback(null, nodes);
}
);
});
},
getNodeDataByMac: function (mac, callback) {
return findNodeDataByFilePattern({ mac: mac }, callback);
},
getNodeDataByToken: function (token, callback) {
return getNodeDataByFilePattern({ token: token }, callback);
},
getNodeDataByMonitoringToken: function (monitoringToken, callback) {
return getNodeDataByFilePattern({ monitoringToken: monitoringToken }, callback);
},
fixNodeFilenames: function (callback) {
findFilesInPeersPath(function (err, files) {
if (err) {
return callback(err);
}
async.mapLimit(
files,
MAX_PARALLEL_NODES_PARSING,
function (file, fileCallback) {
parseNodeFile(file, function (err, node, nodeSecrets) {
if (err) {
return fileCallback(err);
}
const expectedFilename = toNodeFilename(node.token, node, nodeSecrets);
if (file !== expectedFilename) {
return fs.rename(file, expectedFilename, function (err) {
if (err) {
return fileCallback(new Error(
'Cannot rename file ' + file + ' to ' + expectedFilename + ' => ' + err
));
}
fileCallback(null);
});
}
fileCallback(null);
});
},
callback
);
});
},
getNodeStatistics: function (callback) {
this.getAllNodes(function (err, nodes) {
if (err) {
return callback(err);
}
const nodeStatistics = {
registered: _.size(nodes),
withVPN: 0,
withCoords: 0,
monitoring: {
active: 0,
pending: 0
}
};
_.each(nodes, function (node) {
if (node.key) {
nodeStatistics.withVPN += 1;
}
if (node.coords) {
nodeStatistics.withCoords += 1;
}
switch (node.monitoringState) {
case 'active':
nodeStatistics.monitoring.active += 1;
break;
case 'pending':
nodeStatistics.monitoring.pending += 1;
break;
}
});
callback(null, nodeStatistics);
});
}
}

View file

@ -0,0 +1,505 @@
import _ from "lodash";
import async from "async";
import crypto from "crypto";
import oldFs, {promises as fs} from "graceful-fs";
import glob from "glob";
import {config} from "../config";
import ErrorTypes from "../utils/errorTypes";
import Logger from "../logger";
import * as MailService from "../services/mailService";
import {normalizeString} from "../utils/strings";
import {monitoringConfirmUrl, monitoringDisableUrl} from "../utils/urlBuilder";
import {MonitoringState, MonitoringToken, Node, NodeSecrets, NodeStatistics, Token} from "../types";
import util from "util";
const pglob = util.promisify(glob);
type NodeFilter = {
hostname?: string,
mac?: string,
key?: string,
token?: Token,
monitoringToken?: string,
}
type NodeFilenameParsed = {
hostname?: string,
mac?: string,
key?: string,
token?: Token,
monitoringToken?: string,
}
const linePrefixes = {
hostname: '# Knotenname: ',
nickname: '# Ansprechpartner: ',
email: '# Kontakt: ',
coords: '# Koordinaten: ',
mac: '# MAC: ',
token: '# Token: ',
monitoring: '# Monitoring: ',
monitoringToken: '# Monitoring-Token: '
};
const filenameParts = ['hostname', 'mac', 'key', 'token', 'monitoringToken'];
function generateToken(): Token {
return crypto.randomBytes(8).toString('hex');
}
function toNodeFilesPattern(filter: NodeFilter): string {
const pattern = _.join(
_.map(
filenameParts,
field => field in filter ? (filter as {[key: string]: string | undefined})[field] : '*'),
'@'
);
return config.server.peersPath + '/' + pattern.toLowerCase();
}
function findNodeFiles(filter: NodeFilter): Promise<string[]> {
return pglob(toNodeFilesPattern(filter));
}
function findNodeFilesSync(filter: NodeFilter) {
return glob.sync(toNodeFilesPattern(filter));
}
async function findFilesInPeersPath(): Promise<string[]> {
const files = await pglob(config.server.peersPath + '/*');
return await async.filter(files, (file, fileCallback) => {
if (file[0] === '.') {
return fileCallback(null, false);
}
fs.lstat(file)
.then(stats => fileCallback(null, stats.isFile()))
.catch(fileCallback);
});
}
function parseNodeFilename(filename: string): NodeFilenameParsed {
const parts = _.split(filename, '@', filenameParts.length);
const parsed: {[key: string]: string | undefined} = {};
const zippedParts = _.zip<string, string>(filenameParts, parts);
_.each(zippedParts, part => {
const key = part[0];
if (key) {
parsed[key] = part[1];
}
});
return parsed;
}
function isDuplicate(filter: NodeFilter, token: Token | null): boolean {
const files = findNodeFilesSync(filter);
if (files.length === 0) {
return false;
}
if (files.length > 1 || !token /* node is being created*/) {
return true;
}
return parseNodeFilename(files[0]).token !== token;
}
function checkNoDuplicates(token: Token | null, node: Node, nodeSecrets: NodeSecrets): void {
if (isDuplicate({ hostname: node.hostname }, token)) {
throw {data: {msg: 'Already exists.', field: 'hostname'}, type: ErrorTypes.conflict};
}
if (node.key) {
if (isDuplicate({ key: node.key }, token)) {
throw {data: {msg: 'Already exists.', field: 'key'}, type: ErrorTypes.conflict};
}
}
if (isDuplicate({ mac: node.mac }, token)) {
throw {data: {msg: 'Already exists.', field: 'mac'}, type: ErrorTypes.conflict};
}
if (nodeSecrets.monitoringToken && isDuplicate({ monitoringToken: nodeSecrets.monitoringToken }, token)) {
throw {data: {msg: 'Already exists.', field: 'monitoringToken'}, type: ErrorTypes.conflict};
}
}
function toNodeFilename(token: Token, node: Node, nodeSecrets: NodeSecrets): string {
return config.server.peersPath + '/' +
(
(node.hostname || '') + '@' +
(node.mac || '') + '@' +
(node.key || '') + '@' +
(token || '') + '@' +
(nodeSecrets.monitoringToken || '')
).toLowerCase();
}
async function writeNodeFile(
isUpdate: boolean,
token: Token,
node: Node,
nodeSecrets: NodeSecrets,
): Promise<{token: Token, node: Node}> {
const filename = toNodeFilename(token, node, nodeSecrets);
let data = '';
_.each(linePrefixes, function (prefix, key) {
let value;
switch (key) {
case 'monitoring':
if (node.monitoring && node.monitoringConfirmed) {
value = 'aktiv';
} else if (node.monitoring && !node.monitoringConfirmed) {
value = 'pending';
} else {
value = '';
}
break;
case 'monitoringToken':
value = nodeSecrets.monitoringToken || '';
break;
default:
value = key === 'token' ? token : (node as {[key: string]: any})[key];
if (_.isUndefined(value)) {
const nodeSecret = (nodeSecrets as {[key: string]: string})[key];
value = _.isUndefined(nodeSecret) ? '' : nodeSecret;
}
break;
}
data += prefix + value + '\n';
});
if (node.key) {
data += 'key "' + node.key + '";\n';
}
// since node.js is single threaded we don't need a lock
if (isUpdate) {
const files = findNodeFilesSync({ token: token });
if (files.length !== 1) {
throw {data: 'Node not found.', type: ErrorTypes.notFound};
}
checkNoDuplicates(token, node, nodeSecrets);
const file = files[0];
try {
oldFs.unlinkSync(file);
}
catch (error) {
Logger.tag('node', 'save').error('Could not delete old node file: ' + file, error);
throw {data: 'Could not remove old node data.', type: ErrorTypes.internalError};
}
} else {
checkNoDuplicates(null, node, nodeSecrets);
}
try {
oldFs.writeFileSync(filename, data, 'utf8');
return {token, node};
}
catch (error) {
Logger.tag('node', 'save').error('Could not write node file: ' + filename, error);
throw {data: 'Could not write node data.', type: ErrorTypes.internalError};
}
}
async function deleteNodeFile(token: Token): Promise<void> {
let files;
try {
files = await findNodeFiles({ token: token });
}
catch (error) {
Logger.tag('node', 'delete').error('Could not find node file: ' + files, error);
throw {data: 'Could not delete node.', type: ErrorTypes.internalError};
}
if (files.length !== 1) {
throw {data: 'Node not found.', type: ErrorTypes.notFound};
}
try {
oldFs.unlinkSync(files[0]);
}
catch (error) {
Logger.tag('node', 'delete').error('Could not delete node file: ' + files, error);
throw {data: 'Could not delete node.', type: ErrorTypes.internalError};
}
}
async function parseNodeFile(file: string): Promise<{node: Node, nodeSecrets: NodeSecrets}> {
const contents = await fs.readFile(file);
const lines = contents.toString();
const node: {[key: string]: any} = {};
const nodeSecrets: {[key: string]: any} = {};
_.each(lines.split('\n'), function (line) {
const entries: {[key: string]: string} = {};
for (const key of Object.keys(linePrefixes)) {
const prefix = (linePrefixes as {[key: string]: string})[key];
if (line.substring(0, prefix.length) === prefix) {
entries[key] = normalizeString(line.substr(prefix.length));
break;
}
}
if (_.isEmpty(entries) && line.substring(0, 5) === 'key "') {
entries.key = normalizeString(line.split('"')[1]);
}
_.each(entries, function (value, key) {
switch (key) {
case 'mac':
node.mac = value;
node.mapId = _.toLower(value).replace(/:/g, '');
break;
case 'monitoring':
const active = value === 'aktiv';
const pending = value === 'pending';
node.monitoring = active || pending;
node.monitoringConfirmed = active;
node.monitoringState =
active ? MonitoringState.ACTIVE : (pending ? MonitoringState.PENDING : MonitoringState.DISABLED);
break;
case 'monitoringToken':
nodeSecrets.monitoringToken = value;
break;
default:
node[key] = value;
break;
}
});
});
return {
node: node as Node,
nodeSecrets: nodeSecrets as NodeSecrets,
};
}
async function findNodeDataByFilePattern(filter: NodeFilter): Promise<{node: Node, nodeSecrets: NodeSecrets} | null> {
const files = await findNodeFiles(filter);
if (files.length !== 1) {
return null;
}
const file = files[0];
return await parseNodeFile(file);
}
async function getNodeDataByFilePattern(filter: NodeFilter): Promise<{node: Node, nodeSecrets: NodeSecrets}> {
const result = await findNodeDataByFilePattern(filter);
if (!result) {
throw {data: 'Node not found.', type: ErrorTypes.notFound};
}
return result;
}
async function sendMonitoringConfirmationMail(node: Node, nodeSecrets: NodeSecrets): Promise<void> {
const monitoringToken = nodeSecrets.monitoringToken;
if (!monitoringToken) {
Logger
.tag('monitoring', 'confirmation')
.error('Could not enqueue confirmation mail. No monitoring token found.');
throw {data: 'Internal error.', type: ErrorTypes.internalError};
}
const confirmUrl = monitoringConfirmUrl(monitoringToken);
const disableUrl = monitoringDisableUrl(monitoringToken);
await MailService.enqueue(
config.server.email.from,
node.nickname + ' <' + node.email + '>',
'monitoring-confirmation',
{
node: node,
confirmUrl: confirmUrl,
disableUrl: disableUrl
},
);
}
export async function createNode (node: Node): Promise<{token: Token, node: Node}> {
const token = generateToken();
const nodeSecrets: NodeSecrets = {};
node.monitoringConfirmed = false;
if (node.monitoring) {
nodeSecrets.monitoringToken = generateToken();
}
const written = await writeNodeFile(false, token, node, nodeSecrets);
if (written.node.monitoring && !written.node.monitoringConfirmed) {
await sendMonitoringConfirmationMail(written.node, nodeSecrets)
}
return written;
}
export async function updateNode (token: Token, node: Node): Promise<{token: Token, node: Node}> {
const {node: currentNode, nodeSecrets} = await getNodeDataByToken(token);
let monitoringConfirmed = false;
let monitoringToken = '';
if (node.monitoring) {
if (!currentNode.monitoring) {
// monitoring just has been enabled
monitoringConfirmed = false;
monitoringToken = generateToken();
} else {
// monitoring is still enabled
if (currentNode.email !== node.email) {
// new email so we need a new token and a reconfirmation
monitoringConfirmed = false;
monitoringToken = generateToken();
} else {
// email unchanged, keep token (fix if not set) and confirmation state
monitoringConfirmed = currentNode.monitoringConfirmed;
monitoringToken = nodeSecrets.monitoringToken || generateToken();
}
}
}
node.monitoringConfirmed = monitoringConfirmed;
nodeSecrets.monitoringToken = monitoringToken;
const written = await writeNodeFile(true, token, node, nodeSecrets);
if (written.node.monitoring && !written.node.monitoringConfirmed) {
await sendMonitoringConfirmationMail(written.node, nodeSecrets)
}
return written;
}
export async function internalUpdateNode(
token: Token,
node: Node, nodeSecrets: NodeSecrets
): Promise<{token: Token, node: Node}> {
return await writeNodeFile(true, token, node, nodeSecrets);
}
export async function deleteNode (token: Token): Promise<void> {
await deleteNodeFile(token);
}
export async function getAllNodes(): Promise<Node[]> {
let files;
try {
files = await findNodeFiles({});
} catch (error) {
Logger.tag('nodes').error('Error getting all nodes:', error);
throw {data: 'Internal error.', type: ErrorTypes.internalError};
}
const nodes: Node[] = [];
for (const file of files) {
try {
const {node} = await parseNodeFile(file);
nodes.push(node);
} catch (error) {
Logger.tag('nodes').error('Error getting all nodes:', error);
throw {data: 'Internal error.', type: ErrorTypes.internalError};
}
}
return nodes;
}
export async function getNodeDataByMac (mac: string): Promise<{node: Node, nodeSecrets: NodeSecrets} | null> {
return await findNodeDataByFilePattern({ mac: mac });
}
export async function getNodeDataByToken (token: Token): Promise<{node: Node, nodeSecrets: NodeSecrets}> {
return await getNodeDataByFilePattern({ token: token });
}
export async function getNodeDataByMonitoringToken (
monitoringToken: MonitoringToken
): Promise<{node: Node, nodeSecrets: NodeSecrets}> {
return await getNodeDataByFilePattern({ monitoringToken: monitoringToken });
}
export async function fixNodeFilenames(): Promise<void> {
const files = await findFilesInPeersPath();
for (const file of files) {
const {node, nodeSecrets} = await parseNodeFile(file);
const expectedFilename = toNodeFilename(node.token, node, nodeSecrets);
if (file !== expectedFilename) {
try {
await fs.rename(file, expectedFilename);
}
catch (error) {
throw new Error(
'Cannot rename file ' + file + ' to ' + expectedFilename + ' => ' + error
);
}
}
}
}
export async function getNodeStatistics(): Promise<NodeStatistics> {
const nodes = await getAllNodes();
const nodeStatistics: NodeStatistics = {
registered: _.size(nodes),
withVPN: 0,
withCoords: 0,
monitoring: {
active: 0,
pending: 0
}
};
_.each(nodes, function (node: Node): void {
if (node.key) {
nodeStatistics.withVPN += 1;
}
if (node.coords) {
nodeStatistics.withCoords += 1;
}
function ensureExhaustive(monitoringState: never): void {
throw new Error('Add missing case for monitoring stat below: ' + monitoringState);
}
const monitoringState = node.monitoringState;
switch (monitoringState) {
case MonitoringState.ACTIVE:
nodeStatistics.monitoring.active += 1;
break;
case MonitoringState.PENDING:
nodeStatistics.monitoring.pending += 1;
break;
case MonitoringState.DISABLED:
// Not counted seperately.
break;
default:
ensureExhaustive(monitoringState);
}
});
return nodeStatistics;
}

View file

@ -1,4 +1,65 @@
// TODO: Complete interface / class declaration. // TODO: Token type.
export interface NodeSecrets { export type Token = string;
monitoringToken: string; // TODO: Token type. export type FastdKey = string;
export type MonitoringToken = string;
export enum MonitoringState {
ACTIVE = "active",
PENDING = "pending",
DISABLED = "disabled",
} }
export type NodeId = string;
export enum NodeState {
ONLINE = "ONLINE",
OFFLINE = "OFFLINE",
}
export type NodeStateData = {
site: string,
domain: string,
state: NodeState,
}
export type Node = {
token: Token;
nickname: string;
email: string;
hostname: string;
coords?: string; // TODO: Use object with longitude and latitude.
key?: FastdKey;
mac: string;
monitoring: boolean;
monitoringConfirmed: boolean;
monitoringState: MonitoringState;
};
// TODO: Complete interface / class declaration.
export type NodeSecrets = {
monitoringToken?: MonitoringToken,
};
export type NodeStatistics = {
registered: number,
withVPN: number,
withCoords: number,
monitoring: {
active: number,
pending: number
}
};
export type MailId = string;
export type MailData = any;
export type MailType = string;
export interface Mail {
id: MailId,
email: MailType,
sender: string,
recipient: string,
data: MailData,
failures: number,
}

View file

@ -113,18 +113,16 @@ export function getData (req: Request): any {
return _.extend({}, req.body, req.params, req.query); return _.extend({}, req.body, req.params, req.query);
} }
// TODO: Promisify. export async function getValidRestParams(
export function getValidRestParams(
type: string, type: string,
subtype: string | null, subtype: string | null,
req: Request, req: Request,
callback: (err: {data: any, type: {code: number}} | null, restParams?: RestParams) => void ): Promise<RestParams> {
) {
const restConstraints = CONSTRAINTS.rest as {[key: string]: any}; const restConstraints = CONSTRAINTS.rest as {[key: string]: any};
let constraints: Constraints; let constraints: Constraints;
if (!(type in restConstraints) || !isConstraints(restConstraints[type])) { if (!(type in restConstraints) || !isConstraints(restConstraints[type])) {
Logger.tag('validation', 'rest').error('Unknown REST resource type: {}', type); Logger.tag('validation', 'rest').error('Unknown REST resource type: {}', type);
return callback({data: 'Internal error.', type: ErrorTypes.internalError}); throw {data: 'Internal error.', type: ErrorTypes.internalError};
} }
constraints = restConstraints[type]; constraints = restConstraints[type];
@ -134,7 +132,7 @@ export function getValidRestParams(
const constraintsObj = CONSTRAINTS as {[key: string]: any}; const constraintsObj = CONSTRAINTS as {[key: string]: any};
if (!(subtypeFilters in constraintsObj) || !isConstraints(constraintsObj[subtypeFilters])) { if (!(subtypeFilters in constraintsObj) || !isConstraints(constraintsObj[subtypeFilters])) {
Logger.tag('validation', 'rest').error('Unknown REST resource subtype: {}', subtype); Logger.tag('validation', 'rest').error('Unknown REST resource subtype: {}', subtype);
return callback({data: 'Internal error.', type: ErrorTypes.internalError}); throw {data: 'Internal error.', type: ErrorTypes.internalError};
} }
filterConstraints = constraintsObj[subtypeFilters]; filterConstraints = constraintsObj[subtypeFilters];
} }
@ -147,12 +145,11 @@ export function getValidRestParams(
const areValidParams = forConstraints(constraints, false); const areValidParams = forConstraints(constraints, false);
const areValidFilters = forConstraints(filterConstraints, false); const areValidFilters = forConstraints(filterConstraints, false);
if (!areValidParams(restParams) || !areValidFilters(filterParams)) { if (!areValidParams(restParams) || !areValidFilters(filterParams)) {
return callback({data: 'Invalid REST parameters.', type: ErrorTypes.badRequest}); throw {data: 'Invalid REST parameters.', type: ErrorTypes.badRequest};
} }
restParams.filters = filterParams; restParams.filters = filterParams;
return restParams as RestParams;
callback(null, restParams as RestParams);
} }
export function filter (entities: ArrayLike<Entity>, allowedFilterFields: string[], restParams: RestParams) { export function filter (entities: ArrayLike<Entity>, allowedFilterFields: string[], restParams: RestParams) {

View file

@ -1,6 +1,6 @@
import _ from "lodash" import _ from "lodash"
import {config} from "../config" import {config} from "../config"
import {NodeSecrets} from "../types" import {MonitoringToken} from "../types"
// TODO: Typed URLs // TODO: Typed URLs
@ -31,10 +31,10 @@ export function editNodeUrl (): string {
return formUrl('update'); return formUrl('update');
} }
export function monitoringConfirmUrl (nodeSecrets: NodeSecrets): string { export function monitoringConfirmUrl (monitoringToken: MonitoringToken): string {
return formUrl('monitoring/confirm', { token: nodeSecrets.monitoringToken }); return formUrl('monitoring/confirm', { token: monitoringToken });
} }
export function monitoringDisableUrl (nodeSecrets: NodeSecrets): string { export function monitoringDisableUrl (monitoringToken: MonitoringToken): string {
return formUrl('monitoring/disable', { token: nodeSecrets.monitoringToken }); return formUrl('monitoring/disable', { token: monitoringToken });
} }