ESLint: Auto reformat and fixing some warnings / errors.
This commit is contained in:
parent
5237db38e0
commit
91690509d3
50 changed files with 2141 additions and 1493 deletions
6
server/@types/http-auth-connect/index.d.ts
vendored
6
server/@types/http-auth-connect/index.d.ts
vendored
|
@ -1,6 +1,6 @@
|
|||
declare module "http-auth-connect" {
|
||||
import {Auth} from "http-auth";
|
||||
import {RequestHandler} from "express"
|
||||
import { Auth } from "http-auth";
|
||||
import { RequestHandler } from "express";
|
||||
|
||||
export default function (auth: Auth): RequestHandler
|
||||
export default function (auth: Auth): RequestHandler;
|
||||
}
|
||||
|
|
17
server/@types/http-auth/index.d.ts
vendored
17
server/@types/http-auth/index.d.ts
vendored
|
@ -4,9 +4,18 @@ declare module "http-auth" {
|
|||
class BasicAuth extends Auth {}
|
||||
class BasicAuthOptions {}
|
||||
|
||||
type BasicAuthChecker =
|
||||
(username: string, password: string, callback: BasicAuthCheckerCallback) => void
|
||||
type BasicAuthCheckerCallback = (result: boolean | Error, customUser?: string) => void
|
||||
type BasicAuthChecker = (
|
||||
username: string,
|
||||
password: string,
|
||||
callback: BasicAuthCheckerCallback
|
||||
) => void;
|
||||
type BasicAuthCheckerCallback = (
|
||||
result: boolean | Error,
|
||||
customUser?: string
|
||||
) => void;
|
||||
|
||||
function basic(options: BasicAuthOptions, checker: BasicAuthChecker): BasicAuth
|
||||
function basic(
|
||||
options: BasicAuthOptions,
|
||||
checker: BasicAuthChecker
|
||||
): BasicAuth;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
declare module "nodemailer-html-to-text" {
|
||||
import {PluginFunction} from "nodemailer/lib/mailer";
|
||||
import {HtmlToTextOptions} from "html-to-text";
|
||||
import { PluginFunction } from "nodemailer/lib/mailer";
|
||||
import { HtmlToTextOptions } from "html-to-text";
|
||||
|
||||
export function htmlToText(options: HtmlToTextOptions): PluginFunction;
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import {MockLogger} from "./logger";
|
||||
import { MockLogger } from "./logger";
|
||||
|
||||
test("should reset single message", () => {
|
||||
// given
|
||||
|
@ -104,7 +104,7 @@ test("should get messages for no tag", () => {
|
|||
|
||||
// when
|
||||
logger.tag().debug("message");
|
||||
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug")).toEqual([["message"]]);
|
||||
});
|
||||
|
@ -152,7 +152,10 @@ test("should get multiple messages", () => {
|
|||
logger.tag("foo", "bar").debug("message 2");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([["message 1"], ["message 2"]]);
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([
|
||||
["message 1"],
|
||||
["message 2"],
|
||||
]);
|
||||
});
|
||||
|
||||
test("should get complex message", () => {
|
||||
|
@ -163,5 +166,7 @@ test("should get complex message", () => {
|
|||
logger.tag("foo", "bar").debug("message", 1, false, {});
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([["message", 1, false, {}]]);
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([
|
||||
["message", 1, false, {}],
|
||||
]);
|
||||
});
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
import {Logger, TaggedLogger, LogLevel} from '../types';
|
||||
import {ActivatableLogger} from '../logger';
|
||||
import { LogLevel, TaggedLogger } from "../types";
|
||||
import { ActivatableLogger } from "../logger";
|
||||
|
||||
export type MockLogMessages = any[][];
|
||||
export type MockLogMessages = unknown[][];
|
||||
type TaggedLogMessages = {
|
||||
tags: {[key: string]: TaggedLogMessages},
|
||||
logs: {[key: string]: MockLogMessages}
|
||||
}
|
||||
tags: { [key: string]: TaggedLogMessages };
|
||||
logs: { [key: string]: MockLogMessages };
|
||||
};
|
||||
|
||||
export class MockLogger implements ActivatableLogger {
|
||||
private taggedLogMessages: TaggedLogMessages = MockLogger.emptyTaggedLogMessages();
|
||||
private taggedLogMessages: TaggedLogMessages =
|
||||
MockLogger.emptyTaggedLogMessages();
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
constructor() {}
|
||||
|
||||
private static emptyTaggedLogMessages(): TaggedLogMessages {
|
||||
return {
|
||||
tags: {},
|
||||
logs: {}
|
||||
logs: {},
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -36,46 +38,54 @@ export class MockLogger implements ActivatableLogger {
|
|||
return taggedLogMessages.logs[level] || [];
|
||||
}
|
||||
|
||||
init(...args: any[]): void {}
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function,@typescript-eslint/no-unused-vars
|
||||
init(...args: unknown[]): void {}
|
||||
|
||||
private doLog(taggedLogMessages: TaggedLogMessages, level: LogLevel, tags: string[], args: any[]): void {
|
||||
private doLog(
|
||||
taggedLogMessages: TaggedLogMessages,
|
||||
level: LogLevel,
|
||||
tags: string[],
|
||||
args: unknown[]
|
||||
): void {
|
||||
if (tags.length > 0) {
|
||||
const tag = tags[0];
|
||||
const remainingTags = tags.slice(1);
|
||||
const subTaggedLogsMessages: TaggedLogMessages =
|
||||
taggedLogMessages.tags[tag] || MockLogger.emptyTaggedLogMessages();
|
||||
taggedLogMessages.tags[tag] ||
|
||||
MockLogger.emptyTaggedLogMessages();
|
||||
this.doLog(subTaggedLogsMessages, level, remainingTags, args);
|
||||
taggedLogMessages.tags[tag] = subTaggedLogsMessages;
|
||||
|
||||
} else {
|
||||
const logMessages: MockLogMessages = taggedLogMessages.logs[level] || [];
|
||||
const logMessages: MockLogMessages =
|
||||
taggedLogMessages.logs[level] || [];
|
||||
logMessages.push(args);
|
||||
taggedLogMessages.logs[level] = logMessages;
|
||||
}
|
||||
}
|
||||
|
||||
tag(...tags: string[]): TaggedLogger {
|
||||
const logger: MockLogger = this;
|
||||
const doLog = this.doLog.bind(this);
|
||||
const taggedLogMessages = this.taggedLogMessages;
|
||||
return {
|
||||
log(level: LogLevel, ...args: any[]): void {
|
||||
logger.doLog(logger.taggedLogMessages, level, tags, args);
|
||||
log(level: LogLevel, ...args: unknown[]): void {
|
||||
doLog(taggedLogMessages, level, tags, args);
|
||||
},
|
||||
debug(...args: any[]): void {
|
||||
this.log('debug', ...args);
|
||||
debug(...args: unknown[]): void {
|
||||
this.log("debug", ...args);
|
||||
},
|
||||
info(...args: any[]): void {
|
||||
this.log('info', ...args);
|
||||
info(...args: unknown[]): void {
|
||||
this.log("info", ...args);
|
||||
},
|
||||
warn(...args: any[]): void {
|
||||
this.log('warn', ...args);
|
||||
warn(...args: unknown[]): void {
|
||||
this.log("warn", ...args);
|
||||
},
|
||||
error(...args: any[]): void {
|
||||
this.log('error', ...args);
|
||||
error(...args: unknown[]): void {
|
||||
this.log("error", ...args);
|
||||
},
|
||||
profile(...args: any[]): void {
|
||||
this.log('profile', ...args);
|
||||
profile(...args: unknown[]): void {
|
||||
this.log("profile", ...args);
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import _ from "lodash";
|
||||
import auth, {BasicAuthCheckerCallback} from "http-auth";
|
||||
import auth, { BasicAuthCheckerCallback } from "http-auth";
|
||||
import authConnect from "http-auth-connect";
|
||||
import bodyParser from "body-parser";
|
||||
import bcrypt from "bcrypt";
|
||||
import compress from "compression";
|
||||
import express, {Express, NextFunction, Request, Response} from "express";
|
||||
import {promises as fs} from "graceful-fs";
|
||||
import express, { Express, NextFunction, Request, Response } from "express";
|
||||
import { promises as fs } from "graceful-fs";
|
||||
|
||||
import {config} from "./config";
|
||||
import type {CleartextPassword, PasswordHash, Username} from "./types";
|
||||
import {isString} from "./types";
|
||||
import { config } from "./config";
|
||||
import type { CleartextPassword, PasswordHash, Username } from "./types";
|
||||
import { isString } from "./types";
|
||||
import Logger from "./logger";
|
||||
|
||||
export const app: Express = express();
|
||||
|
@ -17,7 +17,8 @@ export const app: Express = express();
|
|||
/**
|
||||
* Used to have some password comparison in case the user does not exist to avoid timing attacks.
|
||||
*/
|
||||
const INVALID_PASSWORD_HASH: PasswordHash = "$2b$05$JebmV1q/ySuxa89GoJYlc.6SEnj1OZYBOfTf.TYAehcC5HLeJiWPi" as PasswordHash;
|
||||
const INVALID_PASSWORD_HASH: PasswordHash =
|
||||
"$2b$05$JebmV1q/ySuxa89GoJYlc.6SEnj1OZYBOfTf.TYAehcC5HLeJiWPi" as PasswordHash;
|
||||
|
||||
/**
|
||||
* Trying to implement a timing safe string compare.
|
||||
|
@ -41,7 +42,10 @@ function timingSafeEqual<T extends string>(a: T, b: T): boolean {
|
|||
return different === 0;
|
||||
}
|
||||
|
||||
async function isValidLogin(username: Username, password: CleartextPassword): Promise<boolean> {
|
||||
async function isValidLogin(
|
||||
username: Username,
|
||||
password: CleartextPassword
|
||||
): Promise<boolean> {
|
||||
if (!config.server.internal.active) {
|
||||
return false;
|
||||
}
|
||||
|
@ -71,52 +75,63 @@ export function init(): void {
|
|||
// urls beneath /internal are protected
|
||||
const internalAuth = auth.basic(
|
||||
{
|
||||
realm: 'Knotenformular - Intern'
|
||||
realm: "Knotenformular - Intern",
|
||||
},
|
||||
function (username: string, password: string, callback: BasicAuthCheckerCallback): void {
|
||||
function (
|
||||
username: string,
|
||||
password: string,
|
||||
callback: BasicAuthCheckerCallback
|
||||
): void {
|
||||
isValidLogin(username as Username, password as CleartextPassword)
|
||||
.then(result => callback(result))
|
||||
.catch(err => {
|
||||
Logger.tag('login').error(err);
|
||||
.then((result) => callback(result))
|
||||
.catch((err) => {
|
||||
Logger.tag("login").error(err);
|
||||
});
|
||||
}
|
||||
);
|
||||
router.use('/internal', authConnect(internalAuth));
|
||||
router.use("/internal", authConnect(internalAuth));
|
||||
|
||||
router.use(bodyParser.json());
|
||||
router.use(bodyParser.urlencoded({extended: true}));
|
||||
router.use(bodyParser.urlencoded({ extended: true }));
|
||||
|
||||
const adminDir = __dirname + '/../admin';
|
||||
const clientDir = __dirname + '/../client';
|
||||
const templateDir = __dirname + '/templates';
|
||||
const adminDir = __dirname + "/../admin";
|
||||
const clientDir = __dirname + "/../client";
|
||||
const templateDir = __dirname + "/templates";
|
||||
|
||||
const jsTemplateFiles = [
|
||||
'/config.js'
|
||||
];
|
||||
const jsTemplateFiles = ["/config.js"];
|
||||
|
||||
function usePromise(f: (req: Request, res: Response) => Promise<void>): void {
|
||||
function usePromise(
|
||||
f: (req: Request, res: Response) => Promise<void>
|
||||
): void {
|
||||
router.use((req: Request, res: Response, next: NextFunction): void => {
|
||||
f(req, res).then(next).catch(next)
|
||||
f(req, res).then(next).catch(next);
|
||||
});
|
||||
}
|
||||
|
||||
router.use(compress());
|
||||
|
||||
async function serveTemplate(mimeType: string, req: Request, res: Response): Promise<void> {
|
||||
const body = await fs.readFile(templateDir + '/' + req.path + '.template', 'utf8');
|
||||
async function serveTemplate(
|
||||
mimeType: string,
|
||||
req: Request,
|
||||
res: Response
|
||||
): Promise<void> {
|
||||
const body = await fs.readFile(
|
||||
templateDir + "/" + req.path + ".template",
|
||||
"utf8"
|
||||
);
|
||||
|
||||
res.writeHead(200, {'Content-Type': mimeType});
|
||||
res.end(_.template(body)({config: config.client}));
|
||||
res.writeHead(200, { "Content-Type": mimeType });
|
||||
res.end(_.template(body)({ config: config.client }));
|
||||
}
|
||||
|
||||
usePromise(async (req: Request, res: Response): Promise<void> => {
|
||||
if (jsTemplateFiles.indexOf(req.path) >= 0) {
|
||||
await serveTemplate('application/javascript', req, res);
|
||||
await serveTemplate("application/javascript", req, res);
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/internal/admin', express.static(adminDir + '/'));
|
||||
router.use('/', express.static(clientDir + '/'));
|
||||
router.use("/internal/admin", express.static(adminDir + "/"));
|
||||
router.use("/", express.static(clientDir + "/"));
|
||||
|
||||
app.use(config.server.rootPath, router);
|
||||
}
|
||||
|
|
|
@ -1,46 +1,65 @@
|
|||
import commandLineArgs from "command-line-args"
|
||||
import commandLineUsage from "command-line-usage"
|
||||
import fs from "graceful-fs"
|
||||
import url from "url"
|
||||
import {parse} from "sparkson"
|
||||
import {Config, Url, Version} from "./types"
|
||||
import commandLineArgs from "command-line-args";
|
||||
import commandLineUsage from "command-line-usage";
|
||||
import fs from "graceful-fs";
|
||||
import url from "url";
|
||||
import { parse } from "sparkson";
|
||||
import { Config, hasOwnProperty, Url, Version } from "./types";
|
||||
|
||||
// @ts-ignore
|
||||
export let config: Config = {};
|
||||
export let config: Config = {} as Config;
|
||||
export let version: Version = "unknown" as Version;
|
||||
|
||||
export function parseCommandLine(): void {
|
||||
const commandLineDefs = [
|
||||
{name: 'help', alias: 'h', type: Boolean, description: 'Show this help'},
|
||||
{name: 'config', alias: 'c', type: String, description: 'Location of config.json'},
|
||||
{name: 'version', alias: 'v', type: Boolean, description: 'Show ffffng version'}
|
||||
{
|
||||
name: "help",
|
||||
alias: "h",
|
||||
type: Boolean,
|
||||
description: "Show this help",
|
||||
},
|
||||
{
|
||||
name: "config",
|
||||
alias: "c",
|
||||
type: String,
|
||||
description: "Location of config.json",
|
||||
},
|
||||
{
|
||||
name: "version",
|
||||
alias: "v",
|
||||
type: Boolean,
|
||||
description: "Show ffffng version",
|
||||
},
|
||||
];
|
||||
|
||||
let commandLineOptions;
|
||||
try {
|
||||
commandLineOptions = commandLineArgs(commandLineDefs);
|
||||
} catch (e: any) {
|
||||
if (e.message) {
|
||||
console.error(e.message);
|
||||
} catch (error) {
|
||||
if (hasOwnProperty(error, "message")) {
|
||||
console.error(error.message);
|
||||
} else {
|
||||
console.error(e);
|
||||
console.error(error);
|
||||
}
|
||||
console.error('Try \'--help\' for more information.');
|
||||
console.error("Try '--help' for more information.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const packageJsonFile = __dirname + '/../package.json';
|
||||
const packageJsonFile = __dirname + "/../package.json";
|
||||
if (fs.existsSync(packageJsonFile)) {
|
||||
version = JSON.parse(fs.readFileSync(packageJsonFile, 'utf8')).version;
|
||||
version = JSON.parse(fs.readFileSync(packageJsonFile, "utf8")).version;
|
||||
}
|
||||
|
||||
function usage() {
|
||||
console.log(commandLineUsage([
|
||||
{
|
||||
header: 'ffffng - ' + version + ' - Freifunk node management form',
|
||||
optionList: commandLineDefs
|
||||
}
|
||||
]));
|
||||
console.log(
|
||||
commandLineUsage([
|
||||
{
|
||||
header:
|
||||
"ffffng - " +
|
||||
version +
|
||||
" - Freifunk node management form",
|
||||
optionList: commandLineDefs,
|
||||
},
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
if (commandLineOptions.help) {
|
||||
|
@ -49,7 +68,7 @@ export function parseCommandLine(): void {
|
|||
}
|
||||
|
||||
if (commandLineOptions.version) {
|
||||
console.log('ffffng - ' + version);
|
||||
console.log("ffffng - " + version);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
|
@ -62,9 +81,9 @@ export function parseCommandLine(): void {
|
|||
let configJSON = {};
|
||||
|
||||
if (fs.existsSync(configJSONFile)) {
|
||||
configJSON = JSON.parse(fs.readFileSync(configJSONFile, 'utf8'));
|
||||
configJSON = JSON.parse(fs.readFileSync(configJSONFile, "utf8"));
|
||||
} else {
|
||||
console.error('config.json not found: ' + configJSONFile);
|
||||
console.error("config.json not found: " + configJSONFile);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
@ -72,7 +91,7 @@ export function parseCommandLine(): void {
|
|||
|
||||
function stripTrailingSlash(url: Url): Url {
|
||||
return url.endsWith("/")
|
||||
? url.substring(0, url.length - 1) as Url
|
||||
? (url.substring(0, url.length - 1) as Url)
|
||||
: url;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,48 +1,83 @@
|
|||
import {RunResult, SqlType, Statement, TypedDatabase} from "../../types";
|
||||
import { RunResult, SqlType, Statement, TypedDatabase } from "../../types";
|
||||
import * as sqlite3 from "sqlite3";
|
||||
|
||||
export async function init(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
export class MockDatabase implements TypedDatabase {
|
||||
constructor() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
constructor() {}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async on(event: string, listener: unknown): Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
async on(event: string, listener: any): Promise<void> {
|
||||
}
|
||||
|
||||
async run(sql: SqlType, ...params: any[]): Promise<RunResult> {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async run(sql: SqlType, ...params: unknown[]): Promise<RunResult> {
|
||||
return {
|
||||
stmt: new Statement(new sqlite3.Statement()),
|
||||
};
|
||||
}
|
||||
|
||||
async get<T = any>(sql: SqlType, ...params: any[]): Promise<T | undefined> {
|
||||
async get<T = unknown>(
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
sql: SqlType,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
...params: unknown[]
|
||||
): Promise<T | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async each<T = any>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T = any>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T = any>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T = any>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T = any>(sql: SqlType, ...params: any[]): Promise<number>;
|
||||
async each(sql: SqlType, ...callback: (any)[]): Promise<number> {
|
||||
async each<T = unknown>(
|
||||
sql: SqlType,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
async each<T = unknown>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
async each<T = unknown>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
param2: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
async each<T = unknown>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
param2: unknown,
|
||||
param3: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async each<T = unknown>(
|
||||
sql: SqlType,
|
||||
...params: unknown[]
|
||||
): Promise<number>;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async each(sql: SqlType, ...callback: unknown[]): Promise<number> {
|
||||
return 0;
|
||||
}
|
||||
|
||||
async all<T>(sql: SqlType, ...params: any[]): Promise<T[]> {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async all<T>(sql: SqlType, ...params: unknown[]): Promise<T[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
async exec(sql: SqlType, ...params: any[]): Promise<void> {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async exec(sql: SqlType, ...params: unknown[]): Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
async prepare(sql: SqlType, ...params: any[]): Promise<Statement> {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async prepare(sql: SqlType, ...params: unknown[]): Promise<Statement> {
|
||||
return new Statement(new sqlite3.Statement());
|
||||
}
|
||||
}
|
||||
|
||||
export const db: MockDatabase = new MockDatabase();
|
||||
|
||||
export {TypedDatabase, Statement}
|
||||
export { TypedDatabase, Statement };
|
||||
|
|
|
@ -2,11 +2,11 @@ import util from "util";
|
|||
import fs from "graceful-fs";
|
||||
import glob from "glob";
|
||||
import path from "path";
|
||||
import {config} from "../config";
|
||||
import { config } from "../config";
|
||||
import Logger from "../logger";
|
||||
import {Database, open, Statement} from "sqlite";
|
||||
import { Database, open, Statement } from "sqlite";
|
||||
import * as sqlite3 from "sqlite3";
|
||||
import {RunResult, SqlType, TypedDatabase} from "../types";
|
||||
import { RunResult, SqlType, TypedDatabase } from "../types";
|
||||
|
||||
const pglob = util.promisify(glob);
|
||||
const pReadFile = util.promisify(fs.readFile);
|
||||
|
@ -28,102 +28,145 @@ class DatabasePromiseWrapper implements TypedDatabase {
|
|||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
this.db.catch(err => {
|
||||
Logger.tag('database', 'init').error('Error initializing database: ', err);
|
||||
this.db.catch((err) => {
|
||||
Logger.tag("database", "init").error(
|
||||
"Error initializing database: ",
|
||||
err
|
||||
);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
async on(event: string, listener: any): Promise<void> {
|
||||
async on(event: string, listener: unknown): Promise<void> {
|
||||
const db = await this.db;
|
||||
db.on(event, listener);
|
||||
}
|
||||
|
||||
async run(sql: SqlType, ...params: any[]): Promise<RunResult> {
|
||||
async run(sql: SqlType, ...params: unknown[]): Promise<RunResult> {
|
||||
const db = await this.db;
|
||||
return db.run(sql, ...params);
|
||||
}
|
||||
|
||||
async get<T>(sql: SqlType, ...params: any[]): Promise<T | undefined> {
|
||||
async get<T>(sql: SqlType, ...params: unknown[]): Promise<T | undefined> {
|
||||
const db = await this.db;
|
||||
return await db.get<T>(sql, ...params);
|
||||
}
|
||||
|
||||
async each<T>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T>(sql: SqlType, ...params: any[]): Promise<number> {
|
||||
async each<T>(
|
||||
sql: SqlType,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
async each<T>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
async each<T>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
param2: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
async each<T>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
param2: unknown,
|
||||
param3: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async each<T>(sql: SqlType, ...params: unknown[]): Promise<number> {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.each.apply(db, arguments);
|
||||
return await db.each(sql, ...params);
|
||||
}
|
||||
|
||||
async all<T>(sql: SqlType, ...params: any[]): Promise<T[]> {
|
||||
async all<T>(sql: SqlType, ...params: unknown[]): Promise<T[]> {
|
||||
const db = await this.db;
|
||||
return (await db.all<T[]>(sql, ...params));
|
||||
return await db.all<T[]>(sql, ...params);
|
||||
}
|
||||
|
||||
async exec(sql: SqlType, ...params: any[]): Promise<void> {
|
||||
async exec(sql: SqlType, ...params: unknown[]): Promise<void> {
|
||||
const db = await this.db;
|
||||
return await db.exec(sql, ...params);
|
||||
}
|
||||
|
||||
async prepare(sql: SqlType, ...params: any[]): Promise<Statement> {
|
||||
async prepare(sql: SqlType, ...params: unknown[]): Promise<Statement> {
|
||||
const db = await this.db;
|
||||
return await db.prepare(sql, ...params);
|
||||
}
|
||||
}
|
||||
|
||||
async function applyPatch(db: TypedDatabase, file: string): Promise<void> {
|
||||
Logger.tag('database', 'migration').info('Checking if patch need to be applied: %s', file);
|
||||
Logger.tag("database", "migration").info(
|
||||
"Checking if patch need to be applied: %s",
|
||||
file
|
||||
);
|
||||
|
||||
const contents = await pReadFile(file);
|
||||
const version = path.basename(file, '.sql');
|
||||
const version = path.basename(file, ".sql");
|
||||
|
||||
const row = await db.get('SELECT * FROM schema_version WHERE version = ?', version);
|
||||
const row = await db.get(
|
||||
"SELECT * FROM schema_version WHERE version = ?",
|
||||
version
|
||||
);
|
||||
if (row) {
|
||||
// patch is already applied. skip!
|
||||
Logger.tag('database', 'migration').info('Patch already applied, skipping: %s', file);
|
||||
return
|
||||
Logger.tag("database", "migration").info(
|
||||
"Patch already applied, skipping: %s",
|
||||
file
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const sql = 'BEGIN TRANSACTION;\n' +
|
||||
contents.toString() + '\n' +
|
||||
'INSERT INTO schema_version (version) VALUES (\'' + version + '\');\n' +
|
||||
'END TRANSACTION;';
|
||||
const sql =
|
||||
"BEGIN TRANSACTION;\n" +
|
||||
contents.toString() +
|
||||
"\n" +
|
||||
"INSERT INTO schema_version (version) VALUES ('" +
|
||||
version +
|
||||
"');\n" +
|
||||
"END TRANSACTION;";
|
||||
|
||||
await db.exec(sql);
|
||||
|
||||
Logger.tag('database', 'migration').info('Patch successfully applied: %s', file);
|
||||
Logger.tag("database", "migration").info(
|
||||
"Patch successfully applied: %s",
|
||||
file
|
||||
);
|
||||
}
|
||||
|
||||
async function applyMigrations(db: TypedDatabase): Promise<void> {
|
||||
Logger.tag('database', 'migration').info('Migrating database...');
|
||||
Logger.tag("database", "migration").info("Migrating database...");
|
||||
|
||||
const sql = 'BEGIN TRANSACTION; CREATE TABLE IF NOT EXISTS schema_version (\n' +
|
||||
' version VARCHAR(255) PRIMARY KEY ASC,\n' +
|
||||
' applied_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL\n' +
|
||||
'); END TRANSACTION;';
|
||||
const sql =
|
||||
"BEGIN TRANSACTION; CREATE TABLE IF NOT EXISTS schema_version (\n" +
|
||||
" version VARCHAR(255) PRIMARY KEY ASC,\n" +
|
||||
" applied_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL\n" +
|
||||
"); END TRANSACTION;";
|
||||
|
||||
await db.exec(sql);
|
||||
|
||||
const files = await pglob(__dirname + '/patches/*.sql');
|
||||
const files = await pglob(__dirname + "/patches/*.sql");
|
||||
for (const file of files) {
|
||||
await applyPatch(db, file)
|
||||
await applyPatch(db, file);
|
||||
}
|
||||
}
|
||||
|
||||
export const db: TypedDatabase = new DatabasePromiseWrapper();
|
||||
|
||||
export async function init(): Promise<void> {
|
||||
Logger.tag('database').info('Setting up database: %s', config.server.databaseFile);
|
||||
await db.on('profile', (sql: string, time: number) => Logger.tag('database').profile('[%sms]\t%s', time, sql));
|
||||
Logger.tag("database").info(
|
||||
"Setting up database: %s",
|
||||
config.server.databaseFile
|
||||
);
|
||||
await db.on("profile", (sql: string, time: number) =>
|
||||
Logger.tag("database").profile("[%sms]\t%s", time, sql)
|
||||
);
|
||||
|
||||
try {
|
||||
await applyMigrations(db);
|
||||
} catch (error) {
|
||||
Logger.tag('database').error('Error migrating database:', error);
|
||||
Logger.tag("database").error("Error migrating database:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
(function () {
|
||||
// Use graceful-fs instead of fs also in all libraries to have more robust fs handling.
|
||||
const realFs = require('fs');
|
||||
const gracefulFs = require('graceful-fs');
|
||||
gracefulFs.gracefulify(realFs);
|
||||
|
||||
// Init config by parsing commandline. Afterwards all other imports may happen.
|
||||
require('./config').parseCommandLine();
|
||||
})();
|
10
server/init.ts
Normal file
10
server/init.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
import realFs from "fs";
|
||||
import gracefulFs from "graceful-fs";
|
||||
|
||||
import { parseCommandLine } from "./config";
|
||||
|
||||
// Use graceful-fs instead of fs also in all libraries to have more robust fs handling.
|
||||
gracefulFs.gracefulify(realFs);
|
||||
|
||||
// Init config by parsing commandline. Afterwards all other imports may happen.
|
||||
parseCommandLine();
|
|
@ -1,12 +1,13 @@
|
|||
import {fixNodeFilenames} from "../services/nodeService";
|
||||
import {jobResultOkay} from "./scheduler";
|
||||
import { fixNodeFilenames } from "../services/nodeService";
|
||||
import { jobResultOkay } from "./scheduler";
|
||||
|
||||
export default {
|
||||
name: 'FixNodeFilenamesJob',
|
||||
description: 'Makes sure node files (holding fastd key, name, etc.) are correctly named.',
|
||||
name: "FixNodeFilenamesJob",
|
||||
description:
|
||||
"Makes sure node files (holding fastd key, name, etc.) are correctly named.",
|
||||
|
||||
async run() {
|
||||
await fixNodeFilenames();
|
||||
return jobResultOkay();
|
||||
},
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import * as MailService from "../services/mailService"
|
||||
import {jobResultOkay} from "./scheduler";
|
||||
import * as MailService from "../services/mailService";
|
||||
import { jobResultOkay } from "./scheduler";
|
||||
|
||||
export default {
|
||||
name: 'MailQueueJob',
|
||||
description: 'Send pending emails (up to 5 attempts in case of failures).',
|
||||
name: "MailQueueJob",
|
||||
description: "Send pending emails (up to 5 attempts in case of failures).",
|
||||
|
||||
async run() {
|
||||
await MailService.sendPendingMails();
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import * as MonitoringService from "../services/monitoringService";
|
||||
import {jobResultOkay} from "./scheduler";
|
||||
import { jobResultOkay } from "./scheduler";
|
||||
|
||||
export default {
|
||||
name: 'MonitoringMailsSendingJob',
|
||||
description: 'Sends monitoring emails depending on the monitoring state of nodes retrieved by the NodeInformationRetrievalJob.',
|
||||
name: "MonitoringMailsSendingJob",
|
||||
description:
|
||||
"Sends monitoring emails depending on the monitoring state of nodes retrieved by the NodeInformationRetrievalJob.",
|
||||
|
||||
async run() {
|
||||
await MonitoringService.sendMonitoringMails();
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
import * as MonitoringService from "../services/monitoringService";
|
||||
import {jobResultOkay, jobResultWarning} from "./scheduler";
|
||||
import { jobResultOkay, jobResultWarning } from "./scheduler";
|
||||
|
||||
export default {
|
||||
name: 'NodeInformationRetrievalJob',
|
||||
description: 'Fetches the nodes.json and calculates and stores the monitoring / online status for registered nodes.',
|
||||
name: "NodeInformationRetrievalJob",
|
||||
description:
|
||||
"Fetches the nodes.json and calculates and stores the monitoring / online status for registered nodes.",
|
||||
|
||||
async run () {
|
||||
async run() {
|
||||
const result = await MonitoringService.retrieveNodeInformation();
|
||||
if (result.failedParsingNodesCount > 0) {
|
||||
return jobResultWarning(
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import * as MonitoringService from "../services/monitoringService";
|
||||
import {jobResultOkay} from "./scheduler";
|
||||
import { jobResultOkay } from "./scheduler";
|
||||
|
||||
export default {
|
||||
name: 'OfflineNodesDeletionJob',
|
||||
description: 'Delete nodes that are offline for more than 100 days.',
|
||||
name: "OfflineNodesDeletionJob",
|
||||
description: "Delete nodes that are offline for more than 100 days.",
|
||||
|
||||
async run() {
|
||||
await MonitoringService.deleteOfflineNodes();
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import cron from "node-cron";
|
||||
import moment from "moment";
|
||||
|
||||
import {config} from "../config";
|
||||
import { config } from "../config";
|
||||
import Logger from "../logger";
|
||||
|
||||
import MailQueueJob from "./MailQueueJob";
|
||||
|
@ -16,29 +16,29 @@ export enum JobResultState {
|
|||
}
|
||||
|
||||
export type JobResult = {
|
||||
state: JobResultState,
|
||||
message?: string,
|
||||
state: JobResultState;
|
||||
message?: string;
|
||||
};
|
||||
|
||||
export function jobResultOkay(message?: string): JobResult {
|
||||
return {
|
||||
state: JobResultState.OKAY,
|
||||
message
|
||||
}
|
||||
message,
|
||||
};
|
||||
}
|
||||
|
||||
export function jobResultWarning(message?: string): JobResult {
|
||||
return {
|
||||
state: JobResultState.WARNING,
|
||||
message
|
||||
}
|
||||
message,
|
||||
};
|
||||
}
|
||||
|
||||
export interface Job {
|
||||
name: string,
|
||||
description: string,
|
||||
name: string;
|
||||
description: string;
|
||||
|
||||
run(): Promise<JobResult>,
|
||||
run(): Promise<JobResult>;
|
||||
}
|
||||
|
||||
export enum TaskState {
|
||||
|
@ -59,7 +59,7 @@ export class Task {
|
|||
public lastRunDuration: number | null,
|
||||
public state: TaskState,
|
||||
public result: JobResult | null,
|
||||
public enabled: boolean,
|
||||
public enabled: boolean
|
||||
) {}
|
||||
|
||||
run(): void {
|
||||
|
@ -75,7 +75,7 @@ export class Task {
|
|||
const done = (state: TaskState, result: JobResult | null): void => {
|
||||
const now = moment();
|
||||
const duration = now.diff(this.runningSince || now);
|
||||
Logger.tag('jobs').profile('[%sms]\t%s', duration, this.name);
|
||||
Logger.tag("jobs").profile("[%sms]\t%s", duration, this.name);
|
||||
|
||||
this.runningSince = null;
|
||||
this.lastRunDuration = duration;
|
||||
|
@ -83,16 +83,19 @@ export class Task {
|
|||
this.result = result;
|
||||
};
|
||||
|
||||
this.job.run().then(result => {
|
||||
done(TaskState.IDLE, result);
|
||||
}).catch(err => {
|
||||
Logger.tag('jobs').error("Job %s failed: %s", this.name, err);
|
||||
done(TaskState.FAILED, null);
|
||||
});
|
||||
this.job
|
||||
.run()
|
||||
.then((result) => {
|
||||
done(TaskState.IDLE, result);
|
||||
})
|
||||
.catch((err) => {
|
||||
Logger.tag("jobs").error("Job %s failed: %s", this.name, err);
|
||||
done(TaskState.FAILED, null);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
type Tasks = {[key: string]: Task};
|
||||
type Tasks = { [key: string]: Task };
|
||||
|
||||
const tasks: Tasks = {};
|
||||
|
||||
|
@ -104,7 +107,7 @@ function nextTaskId(): number {
|
|||
}
|
||||
|
||||
function schedule(expr: string, job: Job): void {
|
||||
Logger.tag('jobs').info('Scheduling job: %s %s', expr, job.name);
|
||||
Logger.tag("jobs").info("Scheduling job: %s %s", expr, job.name);
|
||||
|
||||
const id = nextTaskId();
|
||||
|
||||
|
@ -119,33 +122,35 @@ function schedule(expr: string, job: Job): void {
|
|||
null,
|
||||
TaskState.IDLE,
|
||||
null,
|
||||
true,
|
||||
true
|
||||
);
|
||||
|
||||
cron.schedule(expr, () => task.run());
|
||||
|
||||
tasks['' + id] = task;
|
||||
tasks["" + id] = task;
|
||||
}
|
||||
|
||||
export function init() {
|
||||
Logger.tag('jobs').info('Scheduling background jobs...');
|
||||
Logger.tag("jobs").info("Scheduling background jobs...");
|
||||
|
||||
try {
|
||||
schedule('0 */1 * * * *', MailQueueJob);
|
||||
schedule('15 */1 * * * *', FixNodeFilenamesJob);
|
||||
schedule("0 */1 * * * *", MailQueueJob);
|
||||
schedule("15 */1 * * * *", FixNodeFilenamesJob);
|
||||
|
||||
if (config.client.monitoring.enabled) {
|
||||
schedule('30 */15 * * * *', NodeInformationRetrievalJob);
|
||||
schedule('45 */5 * * * *', MonitoringMailsSendingJob);
|
||||
schedule('0 0 3 * * *', OfflineNodesDeletionJob); // every night at 3:00
|
||||
schedule("30 */15 * * * *", NodeInformationRetrievalJob);
|
||||
schedule("45 */5 * * * *", MonitoringMailsSendingJob);
|
||||
schedule("0 0 3 * * *", OfflineNodesDeletionJob); // every night at 3:00
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
Logger.tag('jobs').error('Error during scheduling of background jobs:', error);
|
||||
} catch (error) {
|
||||
Logger.tag("jobs").error(
|
||||
"Error during scheduling of background jobs:",
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
|
||||
Logger.tag('jobs').info('Scheduling of background jobs done.');
|
||||
Logger.tag("jobs").info("Scheduling of background jobs done.");
|
||||
}
|
||||
|
||||
export function getTasks(): Tasks {
|
||||
|
|
|
@ -1,46 +1,48 @@
|
|||
import {isLogLevel, isUndefined, LoggingConfig, LogLevel, LogLevels} from "./types";
|
||||
import {ActivatableLoggerImpl} from "./logger";
|
||||
import {
|
||||
isLogLevel,
|
||||
isUndefined,
|
||||
LoggingConfig,
|
||||
LogLevel,
|
||||
LogLevels,
|
||||
} from "./types";
|
||||
import { ActivatableLoggerImpl } from "./logger";
|
||||
|
||||
function withDefault<T>(value: T | undefined, defaultValue: T): T {
|
||||
return isUndefined(value) ? defaultValue : value;
|
||||
}
|
||||
|
||||
class TestableLogger extends ActivatableLoggerImpl {
|
||||
private logs: any[][] = [];
|
||||
private logs: unknown[][] = [];
|
||||
|
||||
constructor(
|
||||
enabled?: boolean,
|
||||
debug?: boolean,
|
||||
profile?: boolean,
|
||||
) {
|
||||
constructor(enabled?: boolean, debug?: boolean, profile?: boolean) {
|
||||
super();
|
||||
this.init(
|
||||
new LoggingConfig(
|
||||
withDefault(enabled, true),
|
||||
withDefault(debug, true),
|
||||
withDefault(profile, true),
|
||||
withDefault(profile, true)
|
||||
),
|
||||
(...args: any[]): void => this.doLog(...args)
|
||||
(...args: unknown[]): void => this.doLog(...args)
|
||||
);
|
||||
}
|
||||
|
||||
doLog(...args: any[]): void {
|
||||
doLog(...args: unknown[]): void {
|
||||
this.logs.push(args);
|
||||
}
|
||||
|
||||
getLogs(): any[][] {
|
||||
getLogs(): unknown[][] {
|
||||
return this.logs;
|
||||
}
|
||||
}
|
||||
|
||||
type ParsedLogEntry = {
|
||||
level: LogLevel,
|
||||
tags: string[],
|
||||
message: string,
|
||||
args: any[],
|
||||
level: LogLevel;
|
||||
tags: string[];
|
||||
message: string;
|
||||
args: unknown[];
|
||||
};
|
||||
|
||||
function parseLogEntry(logEntry: any[]): ParsedLogEntry {
|
||||
function parseLogEntry(logEntry: unknown[]): ParsedLogEntry {
|
||||
if (!logEntry.length) {
|
||||
throw new Error(
|
||||
`Empty log entry. Should always start with log message: ${logEntry}`
|
||||
|
@ -55,7 +57,8 @@ function parseLogEntry(logEntry: any[]): ParsedLogEntry {
|
|||
}
|
||||
|
||||
// noinspection RegExpRedundantEscape
|
||||
const regexp = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} ([A-Z]+) - (\[[^\]]*\])? *(.*)$/;
|
||||
const regexp =
|
||||
/^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} ([A-Z]+) - (\[[^\]]*\])? *(.*)$/;
|
||||
const groups = logMessage.match(regexp);
|
||||
if (groups === null || groups.length < 4) {
|
||||
throw new Error(
|
||||
|
@ -71,7 +74,7 @@ function parseLogEntry(logEntry: any[]): ParsedLogEntry {
|
|||
}
|
||||
|
||||
const tagsStr = groups[2].substring(1, groups[2].length - 1);
|
||||
const tags = tagsStr ? tagsStr.split(", "): [];
|
||||
const tags = tagsStr ? tagsStr.split(", ") : [];
|
||||
const message = groups[3];
|
||||
const args = logEntry.slice(1);
|
||||
|
||||
|
@ -83,7 +86,7 @@ function parseLogEntry(logEntry: any[]): ParsedLogEntry {
|
|||
};
|
||||
}
|
||||
|
||||
function parseLogs(logs: any[][]): ParsedLogEntry[] {
|
||||
function parseLogs(logs: unknown[][]): ParsedLogEntry[] {
|
||||
const parsedLogs: ParsedLogEntry[] = [];
|
||||
for (const logEntry of logs) {
|
||||
parsedLogs.push(parseLogEntry(logEntry));
|
||||
|
@ -100,12 +103,14 @@ for (const level of LogLevels) {
|
|||
logger.tag()[level]("message");
|
||||
|
||||
// then
|
||||
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||
level,
|
||||
tags: [],
|
||||
message: "message",
|
||||
args: [],
|
||||
}]);
|
||||
expect(parseLogs(logger.getLogs())).toEqual([
|
||||
{
|
||||
level,
|
||||
tags: [],
|
||||
message: "message",
|
||||
args: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test(`should log single tagged ${level} message without parameters`, () => {
|
||||
|
@ -116,12 +121,14 @@ for (const level of LogLevels) {
|
|||
logger.tag("tag1", "tag2")[level]("message");
|
||||
|
||||
// then
|
||||
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||
level,
|
||||
tags: ["tag1", "tag2"],
|
||||
message: "message",
|
||||
args: [],
|
||||
}]);
|
||||
expect(parseLogs(logger.getLogs())).toEqual([
|
||||
{
|
||||
level,
|
||||
tags: ["tag1", "tag2"],
|
||||
message: "message",
|
||||
args: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test(`should log single tagged ${level} message with parameters`, () => {
|
||||
|
@ -132,12 +139,14 @@ for (const level of LogLevels) {
|
|||
logger.tag("tag1", "tag2")[level]("message", 1, {}, [false]);
|
||||
|
||||
// then
|
||||
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||
level,
|
||||
tags: ["tag1", "tag2"],
|
||||
message: "message",
|
||||
args: [1, {}, [false]],
|
||||
}]);
|
||||
expect(parseLogs(logger.getLogs())).toEqual([
|
||||
{
|
||||
level,
|
||||
tags: ["tag1", "tag2"],
|
||||
message: "message",
|
||||
args: [1, {}, [false]],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test(`should escape tags for ${level} message without parameters`, () => {
|
||||
|
@ -148,12 +157,14 @@ for (const level of LogLevels) {
|
|||
logger.tag("%s", "%d", "%f", "%o", "%")[level]("message");
|
||||
|
||||
// then
|
||||
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||
level,
|
||||
tags: ["%%s", "%%d", "%%f", "%%o", "%%"],
|
||||
message: "message",
|
||||
args: [],
|
||||
}]);
|
||||
expect(parseLogs(logger.getLogs())).toEqual([
|
||||
{
|
||||
level,
|
||||
tags: ["%%s", "%%d", "%%f", "%%o", "%%"],
|
||||
message: "message",
|
||||
args: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test(`should not escape ${level} message itself`, () => {
|
||||
|
@ -164,12 +175,14 @@ for (const level of LogLevels) {
|
|||
logger.tag("tag")[level]("%s %d %f %o %%");
|
||||
|
||||
// then
|
||||
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||
level,
|
||||
tags: ["tag"],
|
||||
message: "%s %d %f %o %%",
|
||||
args: [],
|
||||
}]);
|
||||
expect(parseLogs(logger.getLogs())).toEqual([
|
||||
{
|
||||
level,
|
||||
tags: ["tag"],
|
||||
message: "%s %d %f %o %%",
|
||||
args: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test(`should not escape ${level} message arguments`, () => {
|
||||
|
@ -180,12 +193,14 @@ for (const level of LogLevels) {
|
|||
logger.tag("tag")[level]("message", 1, "%s", "%d", "%f", "%o", "%");
|
||||
|
||||
// then
|
||||
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||
level,
|
||||
tags: ["tag"],
|
||||
message: "message",
|
||||
args: [1, "%s", "%d", "%f", "%o", "%"],
|
||||
}]);
|
||||
expect(parseLogs(logger.getLogs())).toEqual([
|
||||
{
|
||||
level,
|
||||
tags: ["tag"],
|
||||
message: "message",
|
||||
args: [1, "%s", "%d", "%f", "%o", "%"],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test(`should not log ${level} message on disabled logger`, () => {
|
||||
|
@ -219,12 +234,14 @@ test(`should log profile message with disabled debugging`, () => {
|
|||
logger.tag("tag").profile("message");
|
||||
|
||||
// then
|
||||
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||
level: "profile",
|
||||
tags: ["tag"],
|
||||
message: "message",
|
||||
args: [],
|
||||
}]);
|
||||
expect(parseLogs(logger.getLogs())).toEqual([
|
||||
{
|
||||
level: "profile",
|
||||
tags: ["tag"],
|
||||
message: "message",
|
||||
args: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test(`should not log profile message with disabled profiling`, () => {
|
||||
|
@ -246,10 +263,12 @@ test(`should log debug message with disabled profiling`, () => {
|
|||
logger.tag("tag").debug("message");
|
||||
|
||||
// then
|
||||
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||
level: "debug",
|
||||
tags: ["tag"],
|
||||
message: "message",
|
||||
args: [],
|
||||
}]);
|
||||
expect(parseLogs(logger.getLogs())).toEqual([
|
||||
{
|
||||
level: "debug",
|
||||
tags: ["tag"],
|
||||
message: "message",
|
||||
args: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
|
|
@ -1,15 +1,28 @@
|
|||
import {isString, Logger, LoggingConfig, LogLevel, TaggedLogger} from './types';
|
||||
import moment from 'moment';
|
||||
import {
|
||||
isString,
|
||||
Logger,
|
||||
LoggingConfig,
|
||||
LogLevel,
|
||||
TaggedLogger,
|
||||
} from "./types";
|
||||
import moment from "moment";
|
||||
|
||||
export type LoggingFunction = (...args: any[]) => void;
|
||||
export type LoggingFunction = (...args: unknown[]) => void;
|
||||
|
||||
// noinspection JSUnusedLocalSymbols
|
||||
const noopTaggedLogger: TaggedLogger = {
|
||||
log(_level: LogLevel, ..._args: any[]): void {},
|
||||
debug(..._args: any[]): void {},
|
||||
info(..._args: any[]): void {},
|
||||
warn(..._args: any[]): void {},
|
||||
error(..._args: any[]): void {},
|
||||
profile(..._args: any[]): void {},
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function
|
||||
log(level: LogLevel, ...args: unknown[]): void {},
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function
|
||||
debug(...args: unknown[]): void {},
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function
|
||||
info(...args: unknown[]): void {},
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function
|
||||
warn(...args: unknown[]): void {},
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function
|
||||
error(...args: unknown[]): void {},
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function
|
||||
profile(...args: unknown[]): void {},
|
||||
};
|
||||
|
||||
export interface ActivatableLogger extends Logger {
|
||||
|
@ -34,17 +47,20 @@ export class ActivatableLoggerImpl implements ActivatableLogger {
|
|||
const profile = this.config.profile;
|
||||
const loggingFunction = this.loggingFunction;
|
||||
return {
|
||||
log(level: LogLevel, ...args: any[]): void {
|
||||
const timeStr = moment().format('YYYY-MM-DD HH:mm:ss');
|
||||
log(level: LogLevel, ...args: unknown[]): void {
|
||||
const timeStr = moment().format("YYYY-MM-DD HH:mm:ss");
|
||||
const levelStr = level.toUpperCase();
|
||||
const tagsStr = tags ? '[' + tags.join(', ') + ']' : '';
|
||||
const tagsStr = tags ? "[" + tags.join(", ") + "]" : "";
|
||||
const messagePrefix = `${timeStr} ${levelStr} - ${tagsStr}`;
|
||||
|
||||
// Make sure to only replace %s, etc. in real log message
|
||||
// but not in tags.
|
||||
const escapedMessagePrefix = messagePrefix.replace(/%/g, '%%');
|
||||
const escapedMessagePrefix = messagePrefix.replace(
|
||||
/%/g,
|
||||
"%%"
|
||||
);
|
||||
|
||||
let message = '';
|
||||
let message = "";
|
||||
if (args && isString(args[0])) {
|
||||
message = args[0];
|
||||
args.shift();
|
||||
|
@ -55,26 +71,26 @@ export class ActivatableLoggerImpl implements ActivatableLogger {
|
|||
: escapedMessagePrefix;
|
||||
loggingFunction(logStr, ...args);
|
||||
},
|
||||
debug(...args: any[]): void {
|
||||
debug(...args: unknown[]): void {
|
||||
if (debug) {
|
||||
this.log('debug', ...args);
|
||||
this.log("debug", ...args);
|
||||
}
|
||||
},
|
||||
info(...args: any[]): void {
|
||||
this.log('info', ...args);
|
||||
info(...args: unknown[]): void {
|
||||
this.log("info", ...args);
|
||||
},
|
||||
warn(...args: any[]): void {
|
||||
this.log('warn', ...args);
|
||||
warn(...args: unknown[]): void {
|
||||
this.log("warn", ...args);
|
||||
},
|
||||
error(...args: any[]): void {
|
||||
this.log('error', ...args);
|
||||
error(...args: unknown[]): void {
|
||||
this.log("error", ...args);
|
||||
},
|
||||
profile(...args: any[]): void {
|
||||
profile(...args: unknown[]): void {
|
||||
if (profile) {
|
||||
this.log('profile', ...args);
|
||||
this.log("profile", ...args);
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
} else {
|
||||
return noopTaggedLogger;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import {createTransport, Transporter} from "nodemailer";
|
||||
import {config} from "../config";
|
||||
import { createTransport, Transporter } from "nodemailer";
|
||||
import { config } from "../config";
|
||||
import * as MailTemplateService from "../services/mailTemplateService";
|
||||
import Mail from "nodemailer/lib/mailer";
|
||||
import SMTPTransport from "nodemailer/lib/smtp-transport";
|
||||
|
|
|
@ -1,29 +1,28 @@
|
|||
import "./init"
|
||||
import {config} from "./config"
|
||||
import Logger from "./logger"
|
||||
import * as db from "./db/database"
|
||||
import * as scheduler from "./jobs/scheduler"
|
||||
import * as router from "./router"
|
||||
import * as app from "./app"
|
||||
import "./init";
|
||||
import { config } from "./config";
|
||||
import Logger from "./logger";
|
||||
import * as db from "./db/database";
|
||||
import * as scheduler from "./jobs/scheduler";
|
||||
import * as router from "./router";
|
||||
import * as app from "./app";
|
||||
import * as mail from "./mail";
|
||||
|
||||
app.init();
|
||||
Logger.init(config.server.logging);
|
||||
Logger.tag('main', 'startup').info('Server starting up...');
|
||||
Logger.tag("main", "startup").info("Server starting up...");
|
||||
|
||||
async function main() {
|
||||
Logger.tag('main').info('Initializing...');
|
||||
Logger.tag("main").info("Initializing...");
|
||||
|
||||
await db.init();
|
||||
mail.init();
|
||||
scheduler.init();
|
||||
router.init();
|
||||
|
||||
app.app.listen(config.server.port, '::');
|
||||
app.app.listen(config.server.port, "::");
|
||||
}
|
||||
|
||||
main()
|
||||
.catch(error => {
|
||||
console.error('Unhandled runtime error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
main().catch((error) => {
|
||||
console.error("Unhandled runtime error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import {handleJSON} from "../utils/resources";
|
||||
import {config} from "../config";
|
||||
import { handleJSON } from "../utils/resources";
|
||||
import { config } from "../config";
|
||||
|
||||
export const get = handleJSON(async () => config.client);
|
||||
|
|
|
@ -1,26 +1,36 @@
|
|||
import {promises as fs} from "graceful-fs";
|
||||
import { promises as fs } from "graceful-fs";
|
||||
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
import Logger from "../logger";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {Request, Response} from "express";
|
||||
import { Request, Response } from "express";
|
||||
|
||||
const indexHtml = __dirname + '/../../client/index.html';
|
||||
const indexHtml = __dirname + "/../../client/index.html";
|
||||
|
||||
export function render (req: Request, res: Response): void {
|
||||
export function render(req: Request, res: Response): void {
|
||||
const data = Resources.getData(req);
|
||||
|
||||
fs.readFile(indexHtml, 'utf8')
|
||||
.then(body =>
|
||||
fs.readFile(indexHtml, "utf8")
|
||||
.then((body) =>
|
||||
Resources.successHtml(
|
||||
res,
|
||||
body.replace(
|
||||
/<body/,
|
||||
'<script>window.__nodeToken = \''+ data.token + '\';</script><body'
|
||||
"<script>window.__nodeToken = '" +
|
||||
data.token +
|
||||
"';</script><body"
|
||||
)
|
||||
))
|
||||
.catch(err => {
|
||||
Logger.tag('frontend').error('Could not read file: ', indexHtml, err);
|
||||
return Resources.error(res, {data: 'Internal error.', type: ErrorTypes.internalError});
|
||||
})
|
||||
)
|
||||
)
|
||||
.catch((err) => {
|
||||
Logger.tag("frontend").error(
|
||||
"Could not read file: ",
|
||||
indexHtml,
|
||||
err
|
||||
);
|
||||
return Resources.error(res, {
|
||||
data: "Internal error.",
|
||||
type: ErrorTypes.internalError,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -2,53 +2,55 @@ import CONSTRAINTS from "../shared/validation/constraints";
|
|||
import ErrorTypes from "../utils/errorTypes";
|
||||
import * as MailService from "../services/mailService";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {handleJSONWithData, RequestData} from "../utils/resources";
|
||||
import {normalizeString, parseInteger} from "../shared/utils/strings";
|
||||
import {forConstraint} from "../shared/validation/validator";
|
||||
import {Request, Response} from "express";
|
||||
import {isString, Mail, MailId} from "../types";
|
||||
import { handleJSONWithData, RequestData } from "../utils/resources";
|
||||
import { normalizeString, parseInteger } from "../shared/utils/strings";
|
||||
import { forConstraint } from "../shared/validation/validator";
|
||||
import { Request, Response } from "express";
|
||||
import { isString, Mail, MailId } from "../types";
|
||||
|
||||
const isValidId = forConstraint(CONSTRAINTS.id, false);
|
||||
|
||||
async function withValidMailId(data: RequestData): Promise<MailId> {
|
||||
if (!isString(data.id)) {
|
||||
throw {data: 'Missing mail id.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Missing mail id.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
|
||||
const id = normalizeString(data.id);
|
||||
|
||||
if (!isValidId(id)) {
|
||||
throw {data: 'Invalid mail id.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Invalid mail id.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
|
||||
return parseInteger(id) as MailId;
|
||||
}
|
||||
|
||||
export const get = handleJSONWithData(async data => {
|
||||
export const get = handleJSONWithData(async (data) => {
|
||||
const id = await withValidMailId(data);
|
||||
return await MailService.getMail(id);
|
||||
});
|
||||
|
||||
async function doGetAll(req: Request): Promise<{ total: number, mails: Mail[] }> {
|
||||
const restParams = await Resources.getValidRestParams('list', null, req);
|
||||
async function doGetAll(
|
||||
req: Request
|
||||
): Promise<{ total: number; mails: Mail[] }> {
|
||||
const restParams = await Resources.getValidRestParams("list", null, req);
|
||||
return await MailService.getPendingMails(restParams);
|
||||
}
|
||||
|
||||
export function getAll(req: Request, res: Response): void {
|
||||
doGetAll(req)
|
||||
.then(({total, mails}) => {
|
||||
res.set('X-Total-Count', total.toString(10));
|
||||
.then(({ total, mails }) => {
|
||||
res.set("X-Total-Count", total.toString(10));
|
||||
return Resources.success(res, mails);
|
||||
})
|
||||
.catch(err => Resources.error(res, err))
|
||||
.catch((err) => Resources.error(res, err));
|
||||
}
|
||||
|
||||
export const remove = handleJSONWithData(async data => {
|
||||
export const remove = handleJSONWithData(async (data) => {
|
||||
const id = await withValidMailId(data);
|
||||
await MailService.deleteMail(id);
|
||||
});
|
||||
|
||||
export const resetFailures = handleJSONWithData(async data => {
|
||||
export const resetFailures = handleJSONWithData(async (data) => {
|
||||
const id = await withValidMailId(data);
|
||||
return await MailService.resetFailures(id);
|
||||
});
|
||||
|
|
|
@ -2,55 +2,63 @@ import CONSTRAINTS from "../shared/validation/constraints";
|
|||
import ErrorTypes from "../utils/errorTypes";
|
||||
import * as MonitoringService from "../services/monitoringService";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {handleJSONWithData} from "../utils/resources";
|
||||
import {normalizeString} from "../shared/utils/strings";
|
||||
import {forConstraint} from "../shared/validation/validator";
|
||||
import {Request, Response} from "express";
|
||||
import {isMonitoringToken, JSONObject, MonitoringResponse, MonitoringToken, toMonitoringResponse} from "../types";
|
||||
import { handleJSONWithData } from "../utils/resources";
|
||||
import { normalizeString } from "../shared/utils/strings";
|
||||
import { forConstraint } from "../shared/validation/validator";
|
||||
import { Request, Response } from "express";
|
||||
import {
|
||||
isMonitoringToken,
|
||||
JSONObject,
|
||||
MonitoringResponse,
|
||||
MonitoringToken,
|
||||
toMonitoringResponse,
|
||||
} from "../types";
|
||||
|
||||
const isValidToken = forConstraint(CONSTRAINTS.token, false);
|
||||
|
||||
// FIXME: Get rid of any
|
||||
async function doGetAll(req: Request): Promise<{ total: number, result: any }> {
|
||||
const restParams = await Resources.getValidRestParams('list', null, req);
|
||||
const {monitoringStates, total} = await MonitoringService.getAll(restParams);
|
||||
async function doGetAll(req: Request): Promise<{ total: number; result: any }> {
|
||||
const restParams = await Resources.getValidRestParams("list", null, req);
|
||||
const { monitoringStates, total } = await MonitoringService.getAll(
|
||||
restParams
|
||||
);
|
||||
return {
|
||||
total,
|
||||
result: monitoringStates.map(state => {
|
||||
result: monitoringStates.map((state) => {
|
||||
state.mapId = state.mac.toLowerCase().replace(/:/g, "");
|
||||
return state;
|
||||
})
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export function getAll(req: Request, res: Response): void {
|
||||
doGetAll(req)
|
||||
.then(({total, result}) => {
|
||||
res.set('X-Total-Count', total.toString(10));
|
||||
Resources.success(res, result)
|
||||
.then(({ total, result }) => {
|
||||
res.set("X-Total-Count", total.toString(10));
|
||||
Resources.success(res, result);
|
||||
})
|
||||
.catch(err => Resources.error(res, err));
|
||||
.catch((err) => Resources.error(res, err));
|
||||
}
|
||||
|
||||
function getValidatedToken(data: JSONObject): MonitoringToken {
|
||||
if (!isMonitoringToken(data.token)) {
|
||||
throw {data: 'Missing token.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Missing token.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
const token = normalizeString(data.token);
|
||||
if (!isValidToken(token)) {
|
||||
throw {data: 'Invalid token.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Invalid token.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
return token as MonitoringToken;
|
||||
}
|
||||
|
||||
export const confirm = handleJSONWithData<MonitoringResponse>(async data => {
|
||||
export const confirm = handleJSONWithData<MonitoringResponse>(async (data) => {
|
||||
const validatedToken = getValidatedToken(data);
|
||||
|
||||
const node = await MonitoringService.confirm(validatedToken);
|
||||
return toMonitoringResponse(node);
|
||||
});
|
||||
|
||||
export const disable = handleJSONWithData<MonitoringResponse>(async data => {
|
||||
export const disable = handleJSONWithData<MonitoringResponse>(async (data) => {
|
||||
const validatedToken: MonitoringToken = getValidatedToken(data);
|
||||
|
||||
const node = await MonitoringService.disable(validatedToken);
|
||||
|
|
|
@ -2,11 +2,11 @@ import Constraints from "../shared/validation/constraints";
|
|||
import ErrorTypes from "../utils/errorTypes";
|
||||
import * as MonitoringService from "../services/monitoringService";
|
||||
import * as NodeService from "../services/nodeService";
|
||||
import {normalizeMac, normalizeString} from "../shared/utils/strings";
|
||||
import {forConstraint, forConstraints} from "../shared/validation/validator";
|
||||
import { normalizeMac, normalizeString } from "../shared/utils/strings";
|
||||
import { forConstraint, forConstraints } from "../shared/validation/validator";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {handleJSONWithData} from "../utils/resources";
|
||||
import {Request, Response} from "express";
|
||||
import { handleJSONWithData } from "../utils/resources";
|
||||
import { Request, Response } from "express";
|
||||
import {
|
||||
CreateOrUpdateNode,
|
||||
DomainSpecificNodeResponse,
|
||||
|
@ -24,18 +24,26 @@ import {
|
|||
toDomainSpecificNodeResponse,
|
||||
Token,
|
||||
toNodeResponse,
|
||||
toNodeTokenResponse
|
||||
toNodeTokenResponse,
|
||||
} from "../types";
|
||||
|
||||
const nodeFields = ['hostname', 'key', 'email', 'nickname', 'mac', 'coords', 'monitoring'];
|
||||
const nodeFields = [
|
||||
"hostname",
|
||||
"key",
|
||||
"email",
|
||||
"nickname",
|
||||
"mac",
|
||||
"coords",
|
||||
"monitoring",
|
||||
];
|
||||
|
||||
function getNormalizedNodeData(reqData: JSONObject): CreateOrUpdateNode {
|
||||
const node: { [key: string]: any } = {};
|
||||
const node: { [key: string]: unknown } = {};
|
||||
for (const field of nodeFields) {
|
||||
let value: JSONValue | undefined = reqData[field];
|
||||
if (isString(value)) {
|
||||
value = normalizeString(value);
|
||||
if (field === 'mac') {
|
||||
if (field === "mac") {
|
||||
value = normalizeMac(value as MAC);
|
||||
}
|
||||
}
|
||||
|
@ -49,7 +57,7 @@ function getNormalizedNodeData(reqData: JSONObject): CreateOrUpdateNode {
|
|||
return node;
|
||||
}
|
||||
|
||||
throw {data: "Invalid node data.", type: ErrorTypes.badRequest};
|
||||
throw { data: "Invalid node data.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
|
||||
const isValidNode = forConstraints(Constraints.node, false);
|
||||
|
@ -57,77 +65,82 @@ const isValidToken = forConstraint(Constraints.token, false);
|
|||
|
||||
function getValidatedToken(data: JSONObject): Token {
|
||||
if (!isToken(data.token)) {
|
||||
throw {data: 'Missing token.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Missing token.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
const token = normalizeString(data.token);
|
||||
if (!isValidToken(token)) {
|
||||
throw {data: 'Invalid token.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Invalid token.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
return token as Token;
|
||||
}
|
||||
|
||||
export const create = handleJSONWithData<NodeTokenResponse>(async data => {
|
||||
export const create = handleJSONWithData<NodeTokenResponse>(async (data) => {
|
||||
const baseNode = getNormalizedNodeData(data);
|
||||
if (!isValidNode(baseNode)) {
|
||||
throw {data: 'Invalid node data.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Invalid node data.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
|
||||
const node = await NodeService.createNode(baseNode);
|
||||
return toNodeTokenResponse(node);
|
||||
});
|
||||
|
||||
export const update = handleJSONWithData<NodeTokenResponse>(async data => {
|
||||
export const update = handleJSONWithData<NodeTokenResponse>(async (data) => {
|
||||
const validatedToken: Token = getValidatedToken(data);
|
||||
const baseNode = getNormalizedNodeData(data);
|
||||
if (!isValidNode(baseNode)) {
|
||||
throw {data: 'Invalid node data.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Invalid node data.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
|
||||
const node = await NodeService.updateNode(validatedToken, baseNode);
|
||||
return toNodeTokenResponse(node);
|
||||
});
|
||||
|
||||
export const remove = handleJSONWithData<void>(async data => {
|
||||
export const remove = handleJSONWithData<void>(async (data) => {
|
||||
const validatedToken = getValidatedToken(data);
|
||||
await NodeService.deleteNode(validatedToken);
|
||||
});
|
||||
|
||||
export const get = handleJSONWithData<NodeResponse>(async data => {
|
||||
export const get = handleJSONWithData<NodeResponse>(async (data) => {
|
||||
const validatedToken: Token = getValidatedToken(data);
|
||||
const node = await NodeService.getNodeDataByToken(validatedToken);
|
||||
return toNodeResponse(node);
|
||||
});
|
||||
|
||||
async function doGetAll(req: Request): Promise<{ total: number; pageNodes: any }> {
|
||||
const restParams = await Resources.getValidRestParams('list', 'node', req);
|
||||
async function doGetAll(
|
||||
req: Request
|
||||
): Promise<{ total: number; pageNodes: any }> {
|
||||
const restParams = await Resources.getValidRestParams("list", "node", req);
|
||||
|
||||
const nodes = await NodeService.getAllNodes();
|
||||
|
||||
const realNodes = nodes.filter(node =>
|
||||
// We ignore nodes without tokens as those are only manually added ones like gateways.
|
||||
!!node.token // FIXME: As node.token may not be undefined or null here, handle this when loading!
|
||||
const realNodes = nodes.filter(
|
||||
(node) =>
|
||||
// We ignore nodes without tokens as those are only manually added ones like gateways.
|
||||
!!node.token // FIXME: As node.token may not be undefined or null here, handle this when loading!
|
||||
);
|
||||
|
||||
const macs: MAC[] = realNodes.map(node => node.mac);
|
||||
const macs: MAC[] = realNodes.map((node) => node.mac);
|
||||
const nodeStateByMac = await MonitoringService.getByMacs(macs);
|
||||
|
||||
const domainSpecificNodes: DomainSpecificNodeResponse[] = realNodes.map(node => {
|
||||
const nodeState: NodeStateData = nodeStateByMac[node.mac] || {};
|
||||
return toDomainSpecificNodeResponse(node, nodeState);
|
||||
});
|
||||
const domainSpecificNodes: DomainSpecificNodeResponse[] = realNodes.map(
|
||||
(node) => {
|
||||
const nodeState: NodeStateData = nodeStateByMac[node.mac] || {};
|
||||
return toDomainSpecificNodeResponse(node, nodeState);
|
||||
}
|
||||
);
|
||||
|
||||
const filteredNodes = Resources.filter<DomainSpecificNodeResponse>(
|
||||
domainSpecificNodes,
|
||||
[
|
||||
'hostname',
|
||||
'nickname',
|
||||
'email',
|
||||
'token',
|
||||
'mac',
|
||||
'site',
|
||||
'domain',
|
||||
'key',
|
||||
'onlineState'
|
||||
"hostname",
|
||||
"nickname",
|
||||
"email",
|
||||
"token",
|
||||
"mac",
|
||||
"site",
|
||||
"domain",
|
||||
"key",
|
||||
"onlineState",
|
||||
],
|
||||
restParams
|
||||
);
|
||||
|
@ -141,13 +154,13 @@ async function doGetAll(req: Request): Promise<{ total: number; pageNodes: any }
|
|||
);
|
||||
const pageNodes = Resources.getPageEntities(sortedNodes, restParams);
|
||||
|
||||
return {total, pageNodes};
|
||||
return { total, pageNodes };
|
||||
}
|
||||
|
||||
export function getAll(req: Request, res: Response): void {
|
||||
doGetAll(req)
|
||||
.then((result: { total: number, pageNodes: any[] }) => {
|
||||
res.set('X-Total-Count', result.total.toString(10));
|
||||
.then((result: { total: number; pageNodes: any[] }) => {
|
||||
res.set("X-Total-Count", result.total.toString(10));
|
||||
return Resources.success(res, result.pageNodes);
|
||||
})
|
||||
.catch((err: any) => Resources.error(res, err));
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
import ErrorTypes from "../utils/errorTypes";
|
||||
import Logger from "../logger";
|
||||
import {getNodeStatistics} from "../services/nodeService";
|
||||
import {handleJSON} from "../utils/resources";
|
||||
import { getNodeStatistics } from "../services/nodeService";
|
||||
import { handleJSON } from "../utils/resources";
|
||||
|
||||
export const get = handleJSON(async () => {
|
||||
try {
|
||||
const nodeStatistics = await getNodeStatistics();
|
||||
return {
|
||||
nodes: nodeStatistics
|
||||
nodes: nodeStatistics,
|
||||
};
|
||||
} catch (error) {
|
||||
Logger.tag('statistics').error('Error getting statistics:', error);
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
Logger.tag("statistics").error("Error getting statistics:", error);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import CONSTRAINTS from "../shared/validation/constraints";
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {handleJSONWithData, RequestData} from "../utils/resources";
|
||||
import {getTasks, Task, TaskState} from "../jobs/scheduler";
|
||||
import {normalizeString} from "../shared/utils/strings";
|
||||
import {forConstraint} from "../shared/validation/validator";
|
||||
import {Request, Response} from "express";
|
||||
import {isString, isTaskSortField} from "../types";
|
||||
import { handleJSONWithData, RequestData } from "../utils/resources";
|
||||
import { getTasks, Task, TaskState } from "../jobs/scheduler";
|
||||
import { normalizeString } from "../shared/utils/strings";
|
||||
import { forConstraint } from "../shared/validation/validator";
|
||||
import { Request, Response } from "express";
|
||||
import { isString, isTaskSortField } from "../types";
|
||||
|
||||
const isValidId = forConstraint(CONSTRAINTS.id, false);
|
||||
|
||||
|
@ -22,7 +22,7 @@ type TaskResponse = {
|
|||
result: string | null;
|
||||
message: string | null;
|
||||
enabled: boolean;
|
||||
}
|
||||
};
|
||||
|
||||
function toTaskResponse(task: Task): TaskResponse {
|
||||
return {
|
||||
|
@ -34,20 +34,26 @@ function toTaskResponse(task: Task): TaskResponse {
|
|||
lastRunStarted: task.lastRunStarted && task.lastRunStarted.unix(),
|
||||
lastRunDuration: task.lastRunDuration || null,
|
||||
state: task.state,
|
||||
result: task.state !== TaskState.RUNNING && task.result ? task.result.state : null,
|
||||
message: task.state !== TaskState.RUNNING && task.result ? task.result.message || null : null,
|
||||
enabled: task.enabled
|
||||
result:
|
||||
task.state !== TaskState.RUNNING && task.result
|
||||
? task.result.state
|
||||
: null,
|
||||
message:
|
||||
task.state !== TaskState.RUNNING && task.result
|
||||
? task.result.message || null
|
||||
: null,
|
||||
enabled: task.enabled,
|
||||
};
|
||||
}
|
||||
|
||||
async function withValidTaskId(data: RequestData): Promise<string> {
|
||||
if (!isString(data.id)) {
|
||||
throw {data: 'Missing task id.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Missing task id.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
const id = normalizeString(data.id);
|
||||
|
||||
if (!isValidId(id)) {
|
||||
throw {data: 'Invalid task id.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Invalid task id.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
|
||||
return id;
|
||||
|
@ -58,7 +64,7 @@ async function getTask(id: string): Promise<Task> {
|
|||
const task = tasks[id];
|
||||
|
||||
if (!task) {
|
||||
throw {data: 'Task not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Task not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
|
||||
return task;
|
||||
|
@ -69,14 +75,19 @@ async function withTask(data: RequestData): Promise<Task> {
|
|||
return await getTask(id);
|
||||
}
|
||||
|
||||
async function setTaskEnabled(data: RequestData, enable: boolean): Promise<TaskResponse> {
|
||||
async function setTaskEnabled(
|
||||
data: RequestData,
|
||||
enable: boolean
|
||||
): Promise<TaskResponse> {
|
||||
const task = await withTask(data);
|
||||
task.enabled = enable;
|
||||
return toTaskResponse(task);
|
||||
}
|
||||
|
||||
async function doGetAll(req: Request): Promise<{ total: number, pageTasks: Task[] }> {
|
||||
const restParams = await Resources.getValidRestParams('list', null, req);
|
||||
async function doGetAll(
|
||||
req: Request
|
||||
): Promise<{ total: number; pageTasks: Task[] }> {
|
||||
const restParams = await Resources.getValidRestParams("list", null, req);
|
||||
|
||||
const tasks = Resources.sort(
|
||||
Object.values(getTasks()),
|
||||
|
@ -85,7 +96,7 @@ async function doGetAll(req: Request): Promise<{ total: number, pageTasks: Task[
|
|||
);
|
||||
const filteredTasks = Resources.filter(
|
||||
tasks,
|
||||
['id', 'name', 'schedule', 'state'],
|
||||
["id", "name", "schedule", "state"],
|
||||
restParams
|
||||
);
|
||||
|
||||
|
@ -100,28 +111,28 @@ async function doGetAll(req: Request): Promise<{ total: number, pageTasks: Task[
|
|||
|
||||
export function getAll(req: Request, res: Response): void {
|
||||
doGetAll(req)
|
||||
.then(({total, pageTasks}) => {
|
||||
res.set('X-Total-Count', total.toString(10));
|
||||
.then(({ total, pageTasks }) => {
|
||||
res.set("X-Total-Count", total.toString(10));
|
||||
Resources.success(res, pageTasks.map(toTaskResponse));
|
||||
})
|
||||
.catch(err => Resources.error(res, err));
|
||||
.catch((err) => Resources.error(res, err));
|
||||
}
|
||||
|
||||
export const run = handleJSONWithData(async data => {
|
||||
export const run = handleJSONWithData(async (data) => {
|
||||
const task = await withTask(data);
|
||||
|
||||
if (task.runningSince) {
|
||||
throw {data: 'Task already running.', type: ErrorTypes.conflict};
|
||||
throw { data: "Task already running.", type: ErrorTypes.conflict };
|
||||
}
|
||||
|
||||
task.run();
|
||||
return toTaskResponse(task);
|
||||
});
|
||||
|
||||
export const enable = handleJSONWithData(async data => {
|
||||
export const enable = handleJSONWithData(async (data) => {
|
||||
await setTaskEnabled(data, true);
|
||||
});
|
||||
|
||||
export const disable = handleJSONWithData(async data => {
|
||||
export const disable = handleJSONWithData(async (data) => {
|
||||
await setTaskEnabled(data, false);
|
||||
});
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import {handleJSON} from "../utils/resources";
|
||||
import {version} from "../config";
|
||||
import { handleJSON } from "../utils/resources";
|
||||
import { version } from "../config";
|
||||
|
||||
export const get = handleJSON(async () => ({
|
||||
version
|
||||
version,
|
||||
}));
|
||||
|
|
|
@ -1,51 +1,51 @@
|
|||
import express from "express"
|
||||
import express from "express";
|
||||
|
||||
import {app} from "./app"
|
||||
import {config} from "./config"
|
||||
import { app } from "./app";
|
||||
import { config } from "./config";
|
||||
|
||||
import * as ConfigResource from "./resources/configResource"
|
||||
import * as VersionResource from "./resources/versionResource"
|
||||
import * as StatisticsResource from "./resources/statisticsResource"
|
||||
import * as FrontendResource from "./resources/frontendResource"
|
||||
import * as NodeResource from "./resources/nodeResource"
|
||||
import * as MonitoringResource from "./resources/monitoringResource"
|
||||
import * as TaskResource from "./resources/taskResource"
|
||||
import * as MailResource from "./resources/mailResource"
|
||||
import * as ConfigResource from "./resources/configResource";
|
||||
import * as VersionResource from "./resources/versionResource";
|
||||
import * as StatisticsResource from "./resources/statisticsResource";
|
||||
import * as FrontendResource from "./resources/frontendResource";
|
||||
import * as NodeResource from "./resources/nodeResource";
|
||||
import * as MonitoringResource from "./resources/monitoringResource";
|
||||
import * as TaskResource from "./resources/taskResource";
|
||||
import * as MailResource from "./resources/mailResource";
|
||||
|
||||
export function init (): void {
|
||||
export function init(): void {
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/', FrontendResource.render);
|
||||
router.post("/", FrontendResource.render);
|
||||
|
||||
router.get('/api/config', ConfigResource.get);
|
||||
router.get('/api/version', VersionResource.get);
|
||||
router.get("/api/config", ConfigResource.get);
|
||||
router.get("/api/version", VersionResource.get);
|
||||
|
||||
router.post('/api/node', NodeResource.create);
|
||||
router.put('/api/node/:token', NodeResource.update);
|
||||
router.delete('/api/node/:token', NodeResource.remove);
|
||||
router.get('/api/node/:token', NodeResource.get);
|
||||
router.post("/api/node", NodeResource.create);
|
||||
router.put("/api/node/:token", NodeResource.update);
|
||||
router.delete("/api/node/:token", NodeResource.remove);
|
||||
router.get("/api/node/:token", NodeResource.get);
|
||||
|
||||
router.put('/api/monitoring/confirm/:token', MonitoringResource.confirm);
|
||||
router.put('/api/monitoring/disable/:token', MonitoringResource.disable);
|
||||
router.put("/api/monitoring/confirm/:token", MonitoringResource.confirm);
|
||||
router.put("/api/monitoring/disable/:token", MonitoringResource.disable);
|
||||
|
||||
router.get('/internal/api/statistics', StatisticsResource.get);
|
||||
router.get("/internal/api/statistics", StatisticsResource.get);
|
||||
|
||||
router.get('/internal/api/tasks', TaskResource.getAll);
|
||||
router.put('/internal/api/tasks/run/:id', TaskResource.run);
|
||||
router.put('/internal/api/tasks/enable/:id', TaskResource.enable);
|
||||
router.put('/internal/api/tasks/disable/:id', TaskResource.disable);
|
||||
router.get("/internal/api/tasks", TaskResource.getAll);
|
||||
router.put("/internal/api/tasks/run/:id", TaskResource.run);
|
||||
router.put("/internal/api/tasks/enable/:id", TaskResource.enable);
|
||||
router.put("/internal/api/tasks/disable/:id", TaskResource.disable);
|
||||
|
||||
router.get('/internal/api/monitoring', MonitoringResource.getAll);
|
||||
router.get("/internal/api/monitoring", MonitoringResource.getAll);
|
||||
|
||||
router.get('/internal/api/mails', MailResource.getAll);
|
||||
router.get('/internal/api/mails/:id', MailResource.get);
|
||||
router.delete('/internal/api/mails/:id', MailResource.remove);
|
||||
router.put('/internal/api/mails/reset/:id', MailResource.resetFailures);
|
||||
router.get("/internal/api/mails", MailResource.getAll);
|
||||
router.get("/internal/api/mails/:id", MailResource.get);
|
||||
router.delete("/internal/api/mails/:id", MailResource.remove);
|
||||
router.put("/internal/api/mails/reset/:id", MailResource.resetFailures);
|
||||
|
||||
router.put('/internal/api/nodes/:token', NodeResource.update);
|
||||
router.delete('/internal/api/nodes/:token', NodeResource.remove);
|
||||
router.get('/internal/api/nodes', NodeResource.getAll);
|
||||
router.get('/internal/api/nodes/:token', NodeResource.get);
|
||||
router.put("/internal/api/nodes/:token", NodeResource.update);
|
||||
router.delete("/internal/api/nodes/:token", NodeResource.remove);
|
||||
router.get("/internal/api/nodes", NodeResource.getAll);
|
||||
router.get("/internal/api/nodes/:token", NodeResource.get);
|
||||
|
||||
app.use(config.server.rootPath, router);
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import _ from "lodash";
|
||||
import moment, {Moment} from "moment";
|
||||
import {db} from "../db/database";
|
||||
import moment, { Moment } from "moment";
|
||||
import { db } from "../db/database";
|
||||
import Logger from "../logger";
|
||||
import * as MailTemplateService from "./mailTemplateService";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {RestParams} from "../utils/resources";
|
||||
import { RestParams } from "../utils/resources";
|
||||
import {
|
||||
EmailAddress,
|
||||
isJSONObject,
|
||||
|
@ -16,32 +16,31 @@ import {
|
|||
MailSortField,
|
||||
MailType,
|
||||
parseJSON,
|
||||
UnixTimestampSeconds
|
||||
UnixTimestampSeconds,
|
||||
} from "../types";
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
import {send} from "../mail";
|
||||
import { send } from "../mail";
|
||||
|
||||
type EmaiQueueRow = {
|
||||
id: MailId,
|
||||
created_at: UnixTimestampSeconds,
|
||||
data: string,
|
||||
email: string,
|
||||
failures: number,
|
||||
modified_at: UnixTimestampSeconds,
|
||||
recipient: EmailAddress,
|
||||
sender: EmailAddress,
|
||||
id: MailId;
|
||||
created_at: UnixTimestampSeconds;
|
||||
data: string;
|
||||
email: string;
|
||||
failures: number;
|
||||
modified_at: UnixTimestampSeconds;
|
||||
recipient: EmailAddress;
|
||||
sender: EmailAddress;
|
||||
};
|
||||
|
||||
const MAIL_QUEUE_DB_BATCH_SIZE = 50;
|
||||
|
||||
async function sendMail(options: Mail): Promise<void> {
|
||||
Logger
|
||||
.tag('mail', 'queue')
|
||||
.info(
|
||||
'Sending pending mail[%d] of type %s. ' +
|
||||
'Had %d failures before.',
|
||||
options.id, options.email, options.failures
|
||||
);
|
||||
Logger.tag("mail", "queue").info(
|
||||
"Sending pending mail[%d] of type %s. " + "Had %d failures before.",
|
||||
options.id,
|
||||
options.email,
|
||||
options.failures
|
||||
);
|
||||
|
||||
const renderedTemplate = await MailTemplateService.render(options);
|
||||
|
||||
|
@ -49,21 +48,24 @@ async function sendMail(options: Mail): Promise<void> {
|
|||
from: options.sender,
|
||||
to: options.recipient,
|
||||
subject: renderedTemplate.subject,
|
||||
html: renderedTemplate.body
|
||||
html: renderedTemplate.body,
|
||||
};
|
||||
|
||||
await send(mailOptions);
|
||||
|
||||
Logger.tag('mail', 'queue').info('Mail[%d] has been send.', options.id);
|
||||
Logger.tag("mail", "queue").info("Mail[%d] has been send.", options.id);
|
||||
}
|
||||
|
||||
async function findPendingMailsBefore(beforeMoment: Moment, limit: number): Promise<Mail[]> {
|
||||
async function findPendingMailsBefore(
|
||||
beforeMoment: Moment,
|
||||
limit: number
|
||||
): Promise<Mail[]> {
|
||||
const rows = await db.all<EmaiQueueRow>(
|
||||
'SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?',
|
||||
[beforeMoment.unix(), 5, limit],
|
||||
"SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?",
|
||||
[beforeMoment.unix(), 5, limit]
|
||||
);
|
||||
|
||||
return rows.map(row => {
|
||||
return rows.map((row) => {
|
||||
const mailType = row.email;
|
||||
if (!isMailType(mailType)) {
|
||||
throw new Error(`Invalid mailtype in database: ${mailType}`);
|
||||
|
@ -84,13 +86,15 @@ async function findPendingMailsBefore(beforeMoment: Moment, limit: number): Prom
|
|||
}
|
||||
|
||||
async function removePendingMailFromQueue(id: MailId): Promise<void> {
|
||||
await db.run('DELETE FROM email_queue WHERE id = ?', [id]);
|
||||
await db.run("DELETE FROM email_queue WHERE id = ?", [id]);
|
||||
}
|
||||
|
||||
async function incrementFailureCounterForPendingEmail(id: MailId): Promise<void> {
|
||||
async function incrementFailureCounterForPendingEmail(
|
||||
id: MailId
|
||||
): Promise<void> {
|
||||
await db.run(
|
||||
'UPDATE email_queue SET failures = failures + 1, modified_at = ? WHERE id = ?',
|
||||
[moment().unix(), id],
|
||||
"UPDATE email_queue SET failures = failures + 1, modified_at = ? WHERE id = ?",
|
||||
[moment().unix(), id]
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -99,7 +103,10 @@ async function sendPendingMail(pendingMail: Mail): Promise<void> {
|
|||
await sendMail(pendingMail);
|
||||
} catch (error) {
|
||||
// we only log the error and increment the failure counter as we want to continue with pending mails
|
||||
Logger.tag('mail', 'queue').error('Error sending pending mail[' + pendingMail.id + ']:', error);
|
||||
Logger.tag("mail", "queue").error(
|
||||
"Error sending pending mail[" + pendingMail.id + "]:",
|
||||
error
|
||||
);
|
||||
|
||||
await incrementFailureCounterForPendingEmail(pendingMail.id);
|
||||
return;
|
||||
|
@ -109,22 +116,29 @@ async function sendPendingMail(pendingMail: Mail): Promise<void> {
|
|||
}
|
||||
|
||||
async function doGetMail(id: MailId): Promise<Mail> {
|
||||
const row = await db.get<Mail>('SELECT * FROM email_queue WHERE id = ?', [id]);
|
||||
const row = await db.get<Mail>("SELECT * FROM email_queue WHERE id = ?", [
|
||||
id,
|
||||
]);
|
||||
if (row === undefined) {
|
||||
throw {data: 'Mail not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Mail not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
return row;
|
||||
}
|
||||
|
||||
export async function enqueue(sender: string, recipient: string, email: MailType, data: MailData): Promise<void> {
|
||||
export async function enqueue(
|
||||
sender: string,
|
||||
recipient: string,
|
||||
email: MailType,
|
||||
data: MailData
|
||||
): Promise<void> {
|
||||
if (!_.isPlainObject(data)) {
|
||||
throw new Error('Unexpected data: ' + data);
|
||||
throw new Error("Unexpected data: " + data);
|
||||
}
|
||||
await db.run(
|
||||
'INSERT INTO email_queue ' +
|
||||
'(failures, sender, recipient, email, data) ' +
|
||||
'VALUES (?, ?, ?, ?, ?)',
|
||||
[0, sender, recipient, email, JSON.stringify(data)],
|
||||
"INSERT INTO email_queue " +
|
||||
"(failures, sender, recipient, email, data) " +
|
||||
"VALUES (?, ?, ?, ?, ?)",
|
||||
[0, sender, recipient, email, JSON.stringify(data)]
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -132,10 +146,12 @@ export async function getMail(id: MailId): Promise<Mail> {
|
|||
return await doGetMail(id);
|
||||
}
|
||||
|
||||
export async function getPendingMails(restParams: RestParams): Promise<{ mails: Mail[], total: number }> {
|
||||
export async function getPendingMails(
|
||||
restParams: RestParams
|
||||
): Promise<{ mails: Mail[]; total: number }> {
|
||||
const row = await db.get<{ total: number }>(
|
||||
'SELECT count(*) AS total FROM email_queue',
|
||||
[],
|
||||
"SELECT count(*) AS total FROM email_queue",
|
||||
[]
|
||||
);
|
||||
|
||||
const total = row?.total || 0;
|
||||
|
@ -144,18 +160,18 @@ export async function getPendingMails(restParams: RestParams): Promise<{ mails:
|
|||
restParams,
|
||||
MailSortField.ID,
|
||||
isMailSortField,
|
||||
['id', 'failures', 'sender', 'recipient', 'email']
|
||||
["id", "failures", "sender", "recipient", "email"]
|
||||
);
|
||||
|
||||
const mails = await db.all(
|
||||
'SELECT * FROM email_queue WHERE ' + filter.query,
|
||||
filter.params,
|
||||
"SELECT * FROM email_queue WHERE " + filter.query,
|
||||
filter.params
|
||||
);
|
||||
|
||||
return {
|
||||
mails,
|
||||
total
|
||||
}
|
||||
total,
|
||||
};
|
||||
}
|
||||
|
||||
export async function deleteMail(id: MailId): Promise<void> {
|
||||
|
@ -164,29 +180,32 @@ export async function deleteMail(id: MailId): Promise<void> {
|
|||
|
||||
export async function resetFailures(id: MailId): Promise<Mail> {
|
||||
const statement = await db.run(
|
||||
'UPDATE email_queue SET failures = 0, modified_at = ? WHERE id = ?',
|
||||
[moment().unix(), id],
|
||||
"UPDATE email_queue SET failures = 0, modified_at = ? WHERE id = ?",
|
||||
[moment().unix(), id]
|
||||
);
|
||||
|
||||
if (!statement.changes) {
|
||||
throw new Error('Error: could not reset failure count for mail: ' + id);
|
||||
throw new Error("Error: could not reset failure count for mail: " + id);
|
||||
}
|
||||
|
||||
return await doGetMail(id);
|
||||
}
|
||||
|
||||
export async function sendPendingMails(): Promise<void> {
|
||||
Logger.tag('mail', 'queue').debug('Start sending pending mails...');
|
||||
Logger.tag("mail", "queue").debug("Start sending pending mails...");
|
||||
|
||||
const startTime = moment();
|
||||
|
||||
while (true) {
|
||||
Logger.tag('mail', 'queue').debug('Sending next batch...');
|
||||
Logger.tag("mail", "queue").debug("Sending next batch...");
|
||||
|
||||
const pendingMails = await findPendingMailsBefore(startTime, MAIL_QUEUE_DB_BATCH_SIZE);
|
||||
const pendingMails = await findPendingMailsBefore(
|
||||
startTime,
|
||||
MAIL_QUEUE_DB_BATCH_SIZE
|
||||
);
|
||||
|
||||
if (_.isEmpty(pendingMails)) {
|
||||
Logger.tag('mail', 'queue').debug('Done sending pending mails.');
|
||||
Logger.tag("mail", "queue").debug("Done sending pending mails.");
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,38 +1,40 @@
|
|||
import _ from "lodash";
|
||||
import deepExtend from "deep-extend";
|
||||
import {readFileSync, promises as fs} from "graceful-fs";
|
||||
import { readFileSync, promises as fs } from "graceful-fs";
|
||||
import moment from "moment";
|
||||
import {htmlToText} from "nodemailer-html-to-text";
|
||||
import { htmlToText } from "nodemailer-html-to-text";
|
||||
|
||||
import {config} from "../config";
|
||||
import { config } from "../config";
|
||||
import Logger from "../logger";
|
||||
import {editNodeUrl} from "../utils/urlBuilder";
|
||||
import {Transporter} from "nodemailer";
|
||||
import {MailData, Mail} from "../types";
|
||||
import { editNodeUrl } from "../utils/urlBuilder";
|
||||
import { Transporter } from "nodemailer";
|
||||
import { MailData, Mail } from "../types";
|
||||
|
||||
const templateBasePath = __dirname + '/../mailTemplates';
|
||||
const snippetsBasePath = templateBasePath + '/snippets';
|
||||
const templateBasePath = __dirname + "/../mailTemplates";
|
||||
const snippetsBasePath = templateBasePath + "/snippets";
|
||||
|
||||
const templateFunctions: {
|
||||
[key: string]:
|
||||
| ((name: string, data: MailData) => string)
|
||||
| ((data: MailData) => string)
|
||||
| ((href: string, text: string) => string)
|
||||
| ((unix: number) => string)
|
||||
| ((unix: number) => string);
|
||||
} = {};
|
||||
|
||||
function renderSnippet(this: any, name: string, data: MailData): string {
|
||||
const snippetFile = snippetsBasePath + '/' + name + '.html';
|
||||
const snippetFile = snippetsBasePath + "/" + name + ".html";
|
||||
|
||||
return _.template(readFileSync(snippetFile).toString())(deepExtend(
|
||||
{},
|
||||
this, // parent data
|
||||
data,
|
||||
templateFunctions
|
||||
));
|
||||
return _.template(readFileSync(snippetFile).toString())(
|
||||
deepExtend(
|
||||
{},
|
||||
this, // parent data
|
||||
data,
|
||||
templateFunctions
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function snippet(name: string): ((this: any, data: MailData) => string) {
|
||||
function snippet(name: string): (this: any, data: MailData) => string {
|
||||
return function (this: any, data: MailData): string {
|
||||
return renderSnippet.bind(this)(name, data);
|
||||
};
|
||||
|
@ -44,7 +46,7 @@ function renderLink(href: string, text: string): string {
|
|||
'<a href="<%- href %>#" style="color: #E5287A;"><%- text %></a>'
|
||||
)({
|
||||
href: href,
|
||||
text: text || href
|
||||
text: text || href,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -53,17 +55,17 @@ function renderHR(): string {
|
|||
}
|
||||
|
||||
function formatDateTime(unix: number): string {
|
||||
return moment.unix(unix).locale('de').local().format('DD.MM.YYYY HH:mm');
|
||||
return moment.unix(unix).locale("de").local().format("DD.MM.YYYY HH:mm");
|
||||
}
|
||||
|
||||
function formatFromNow(unix: number): string {
|
||||
return moment.unix(unix).locale('de').fromNow();
|
||||
return moment.unix(unix).locale("de").fromNow();
|
||||
}
|
||||
|
||||
templateFunctions.header = snippet('header');
|
||||
templateFunctions.footer = snippet('footer');
|
||||
templateFunctions.header = snippet("header");
|
||||
templateFunctions.footer = snippet("footer");
|
||||
|
||||
templateFunctions.monitoringFooter = snippet('monitoring-footer');
|
||||
templateFunctions.monitoringFooter = snippet("monitoring-footer");
|
||||
|
||||
templateFunctions.snippet = renderSnippet;
|
||||
|
||||
|
@ -73,24 +75,29 @@ templateFunctions.hr = renderHR;
|
|||
templateFunctions.formatDateTime = formatDateTime;
|
||||
templateFunctions.formatFromNow = formatFromNow;
|
||||
|
||||
export function configureTransporter (transporter: Transporter): void {
|
||||
transporter.use('compile', htmlToText({
|
||||
tables: ['.table']
|
||||
}));
|
||||
export function configureTransporter(transporter: Transporter): void {
|
||||
transporter.use(
|
||||
"compile",
|
||||
htmlToText({
|
||||
tables: [".table"],
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export async function render(mailOptions: Mail): Promise<{subject: string, body: string}> {
|
||||
const templatePathPrefix = templateBasePath + '/' + mailOptions.email;
|
||||
export async function render(
|
||||
mailOptions: Mail
|
||||
): Promise<{ subject: string; body: string }> {
|
||||
const templatePathPrefix = templateBasePath + "/" + mailOptions.email;
|
||||
|
||||
const subject = await fs.readFile(templatePathPrefix + '.subject.txt');
|
||||
const body = await fs.readFile(templatePathPrefix + '.body.html');
|
||||
const subject = await fs.readFile(templatePathPrefix + ".subject.txt");
|
||||
const body = await fs.readFile(templatePathPrefix + ".body.html");
|
||||
|
||||
const data = deepExtend(
|
||||
{},
|
||||
mailOptions.data,
|
||||
{
|
||||
community: config.client.community,
|
||||
editNodeUrl: editNodeUrl()
|
||||
editNodeUrl: editNodeUrl(),
|
||||
},
|
||||
templateFunctions
|
||||
);
|
||||
|
@ -98,12 +105,13 @@ export async function render(mailOptions: Mail): Promise<{subject: string, body:
|
|||
try {
|
||||
return {
|
||||
subject: _.template(subject.toString())(data).trim(),
|
||||
body: _.template(body.toString())(data)
|
||||
body: _.template(body.toString())(data),
|
||||
};
|
||||
} catch (error) {
|
||||
Logger
|
||||
.tag('mail', 'template')
|
||||
.error('Error rendering template for mail[' + mailOptions.id + ']:', error);
|
||||
Logger.tag("mail", "template").error(
|
||||
"Error rendering template for mail[" + mailOptions.id + "]:",
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import {ParsedNode, parseNode, parseNodesJson} from "./monitoringService";
|
||||
import {Domain, MAC, OnlineState, Site, UnixTimestampSeconds} from "../types";
|
||||
import Logger from '../logger';
|
||||
import {MockLogger} from "../__mocks__/logger";
|
||||
import {now, parseTimestamp} from "../utils/time";
|
||||
import { ParsedNode, parseNode, parseNodesJson } from "./monitoringService";
|
||||
import { Domain, MAC, OnlineState, Site, UnixTimestampSeconds } from "../types";
|
||||
import Logger from "../logger";
|
||||
import { MockLogger } from "../__mocks__/logger";
|
||||
import { now, parseTimestamp } from "../utils/time";
|
||||
|
||||
const mockedLogger = Logger as MockLogger;
|
||||
|
||||
jest.mock('../logger');
|
||||
jest.mock('../db/database');
|
||||
jest.mock("../logger");
|
||||
jest.mock("../db/database");
|
||||
|
||||
const NODES_JSON_INVALID_VERSION = 1;
|
||||
const NODES_JSON_VALID_VERSION = 2;
|
||||
|
@ -25,7 +25,7 @@ beforeEach(() => {
|
|||
mockedLogger.reset();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for undefined node data', () => {
|
||||
test("parseNode() should fail parsing node for undefined node data", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = undefined;
|
||||
|
@ -34,7 +34,7 @@ test('parseNode() should fail parsing node for undefined node data', () => {
|
|||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node data', () => {
|
||||
test("parseNode() should fail parsing node for empty node data", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {};
|
||||
|
@ -43,159 +43,159 @@ test('parseNode() should fail parsing node for empty node data', () => {
|
|||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node info', () => {
|
||||
test("parseNode() should fail parsing node for empty node info", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {}
|
||||
nodeinfo: {},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for non-string node id', () => {
|
||||
test("parseNode() should fail parsing node for non-string node id", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: 42
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node id', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: ""
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty network info', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for invalid mac', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "xxx"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for missing flags', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty flags', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
node_id: 42,
|
||||
},
|
||||
flags: {}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for missing last seen timestamp', () => {
|
||||
test("parseNode() should fail parsing node for empty node id", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "",
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for empty network info", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {},
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for invalid mac", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
mac: "xxx",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for missing flags", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for empty flags", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
flags: {},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for missing last seen timestamp", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
}
|
||||
online: true,
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for invalid last seen timestamp', () => {
|
||||
test("parseNode() should fail parsing node for invalid last seen timestamp", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: 42
|
||||
lastseen: 42,
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should succeed parsing node without site and domain', () => {
|
||||
test("parseNode() should succeed parsing node without site and domain", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING
|
||||
lastseen: TIMESTAMP_VALID_STRING,
|
||||
};
|
||||
|
||||
// then
|
||||
|
@ -210,22 +210,22 @@ test('parseNode() should succeed parsing node without site and domain', () => {
|
|||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||
});
|
||||
|
||||
test('parseNode() should succeed parsing node with site and domain', () => {
|
||||
test("parseNode() should succeed parsing node with site and domain", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
domain_code: "test-domain",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING,
|
||||
};
|
||||
|
@ -242,7 +242,7 @@ test('parseNode() should succeed parsing node with site and domain', () => {
|
|||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing empty string', () => {
|
||||
test("parseNodesJson() should fail parsing empty string", () => {
|
||||
// given
|
||||
const json = "";
|
||||
|
||||
|
@ -250,7 +250,7 @@ test('parseNodesJson() should fail parsing empty string', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing malformed JSON', () => {
|
||||
test("parseNodesJson() should fail parsing malformed JSON", () => {
|
||||
// given
|
||||
const json = '{"version": 2]';
|
||||
|
||||
|
@ -258,7 +258,7 @@ test('parseNodesJson() should fail parsing malformed JSON', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON null', () => {
|
||||
test("parseNodesJson() should fail parsing JSON null", () => {
|
||||
// given
|
||||
const json = JSON.stringify(null);
|
||||
|
||||
|
@ -266,7 +266,7 @@ test('parseNodesJson() should fail parsing JSON null', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON string', () => {
|
||||
test("parseNodesJson() should fail parsing JSON string", () => {
|
||||
// given
|
||||
const json = JSON.stringify("foo");
|
||||
|
||||
|
@ -274,7 +274,7 @@ test('parseNodesJson() should fail parsing JSON string', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON number', () => {
|
||||
test("parseNodesJson() should fail parsing JSON number", () => {
|
||||
// given
|
||||
const json = JSON.stringify(42);
|
||||
|
||||
|
@ -282,7 +282,7 @@ test('parseNodesJson() should fail parsing JSON number', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing empty JSON object', () => {
|
||||
test("parseNodesJson() should fail parsing empty JSON object", () => {
|
||||
// given
|
||||
const json = JSON.stringify({});
|
||||
|
||||
|
@ -290,57 +290,57 @@ test('parseNodesJson() should fail parsing empty JSON object', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for mismatching version', () => {
|
||||
test("parseNodesJson() should fail parsing for mismatching version", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_INVALID_VERSION
|
||||
version: NODES_JSON_INVALID_VERSION,
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for missing timestamp', () => {
|
||||
test("parseNodesJson() should fail parsing for missing timestamp", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
nodes: []
|
||||
nodes: [],
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for invalid timestamp', () => {
|
||||
test("parseNodesJson() should fail parsing for invalid timestamp", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_INVALID_STRING,
|
||||
nodes: []
|
||||
nodes: [],
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for nodes object instead of array', () => {
|
||||
test("parseNodesJson() should fail parsing for nodes object instead of array", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_VALID_STRING,
|
||||
nodes: {}
|
||||
nodes: {},
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should succeed parsing no nodes', () => {
|
||||
test("parseNodesJson() should succeed parsing no nodes", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_VALID_STRING,
|
||||
nodes: []
|
||||
nodes: [],
|
||||
});
|
||||
|
||||
// when
|
||||
|
@ -352,7 +352,7 @@ test('parseNodesJson() should succeed parsing no nodes', () => {
|
|||
expect(result.totalNodesCount).toEqual(0);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should skip parsing invalid nodes', () => {
|
||||
test("parseNodesJson() should skip parsing invalid nodes", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
|
@ -363,19 +363,19 @@ test('parseNodesJson() should skip parsing invalid nodes', () => {
|
|||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
domain_code: "test-domain",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: TIMESTAMP_INVALID_STRING,
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// when
|
||||
|
@ -385,10 +385,13 @@ test('parseNodesJson() should skip parsing invalid nodes', () => {
|
|||
expect(result.nodes).toEqual([]);
|
||||
expect(result.failedNodesCount).toEqual(2);
|
||||
expect(result.totalNodesCount).toEqual(2);
|
||||
expect(mockedLogger.getMessages('error', 'monitoring', 'parsing-nodes-json').length).toEqual(2);
|
||||
expect(
|
||||
mockedLogger.getMessages("error", "monitoring", "parsing-nodes-json")
|
||||
.length
|
||||
).toEqual(2);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should parse valid nodes', () => {
|
||||
test("parseNodesJson() should parse valid nodes", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
|
@ -399,19 +402,19 @@ test('parseNodesJson() should parse valid nodes', () => {
|
|||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
domain_code: "test-domain",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING,
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// when
|
||||
|
@ -430,5 +433,8 @@ test('parseNodesJson() should parse valid nodes', () => {
|
|||
expect(result.nodes).toEqual([expectedParsedNode]);
|
||||
expect(result.failedNodesCount).toEqual(1);
|
||||
expect(result.totalNodesCount).toEqual(2);
|
||||
expect(mockedLogger.getMessages('error', 'monitoring', 'parsing-nodes-json').length).toEqual(1);
|
||||
expect(
|
||||
mockedLogger.getMessages("error", "monitoring", "parsing-nodes-json")
|
||||
.length
|
||||
).toEqual(1);
|
||||
});
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,15 +1,18 @@
|
|||
import async from "async";
|
||||
import crypto from "crypto";
|
||||
import oldFs, {promises as fs} from "graceful-fs";
|
||||
import oldFs, { promises as fs } from "graceful-fs";
|
||||
import glob from "glob";
|
||||
|
||||
import {config} from "../config";
|
||||
import { config } from "../config";
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
import Logger from "../logger";
|
||||
import logger from "../logger";
|
||||
import * as MailService from "../services/mailService";
|
||||
import {normalizeString} from "../shared/utils/strings";
|
||||
import {monitoringConfirmUrl, monitoringDisableUrl} from "../utils/urlBuilder";
|
||||
import { normalizeString } from "../shared/utils/strings";
|
||||
import {
|
||||
monitoringConfirmUrl,
|
||||
monitoringDisableUrl,
|
||||
} from "../utils/urlBuilder";
|
||||
import {
|
||||
BaseNode,
|
||||
Coordinates,
|
||||
|
@ -36,27 +39,27 @@ import {
|
|||
TypeGuard,
|
||||
unhandledEnumField,
|
||||
UnixTimestampMilliseconds,
|
||||
UnixTimestampSeconds
|
||||
UnixTimestampSeconds,
|
||||
} from "../types";
|
||||
import util from "util";
|
||||
|
||||
const pglob = util.promisify(glob);
|
||||
|
||||
type NodeFilter = {
|
||||
hostname?: Hostname,
|
||||
mac?: MAC,
|
||||
key?: FastdKey,
|
||||
token?: Token,
|
||||
monitoringToken?: MonitoringToken,
|
||||
}
|
||||
hostname?: Hostname;
|
||||
mac?: MAC;
|
||||
key?: FastdKey;
|
||||
token?: Token;
|
||||
monitoringToken?: MonitoringToken;
|
||||
};
|
||||
|
||||
type NodeFilenameParsed = {
|
||||
hostname?: Hostname,
|
||||
mac?: MAC,
|
||||
key?: FastdKey,
|
||||
token?: Token,
|
||||
monitoringToken?: MonitoringToken,
|
||||
}
|
||||
hostname?: Hostname;
|
||||
mac?: MAC;
|
||||
key?: FastdKey;
|
||||
token?: Token;
|
||||
monitoringToken?: MonitoringToken;
|
||||
};
|
||||
|
||||
enum LINE_PREFIX {
|
||||
HOSTNAME = "# Knotenname: ",
|
||||
|
@ -69,9 +72,10 @@ enum LINE_PREFIX {
|
|||
MONITORING_TOKEN = "# Monitoring-Token: ",
|
||||
}
|
||||
|
||||
|
||||
function generateToken<Type extends string & { readonly __tag: symbol } = never>(): Type {
|
||||
return crypto.randomBytes(8).toString('hex') as Type;
|
||||
function generateToken<
|
||||
Type extends string & { readonly __tag: symbol } = never
|
||||
>(): Type {
|
||||
return crypto.randomBytes(8).toString("hex") as Type;
|
||||
}
|
||||
|
||||
function toNodeFilesPattern(filter: NodeFilter): string {
|
||||
|
@ -83,9 +87,9 @@ function toNodeFilesPattern(filter: NodeFilter): string {
|
|||
filter.monitoringToken,
|
||||
];
|
||||
|
||||
const pattern = fields.map((value) => value || '*').join('@');
|
||||
const pattern = fields.map((value) => value || "*").join("@");
|
||||
|
||||
return config.server.peersPath + '/' + pattern.toLowerCase();
|
||||
return config.server.peersPath + "/" + pattern.toLowerCase();
|
||||
}
|
||||
|
||||
function findNodeFiles(filter: NodeFilter): Promise<string[]> {
|
||||
|
@ -97,24 +101,25 @@ function findNodeFilesSync(filter: NodeFilter) {
|
|||
}
|
||||
|
||||
async function findFilesInPeersPath(): Promise<string[]> {
|
||||
const files = await pglob(config.server.peersPath + '/*');
|
||||
const files = await pglob(config.server.peersPath + "/*");
|
||||
|
||||
return await async.filter(files, (file, fileCallback) => {
|
||||
if (file[0] === '.') {
|
||||
if (file[0] === ".") {
|
||||
return fileCallback(null, false);
|
||||
}
|
||||
|
||||
fs.lstat(file)
|
||||
.then(stats => fileCallback(null, stats.isFile()))
|
||||
.then((stats) => fileCallback(null, stats.isFile()))
|
||||
.catch(fileCallback);
|
||||
});
|
||||
}
|
||||
|
||||
function parseNodeFilename(filename: string): NodeFilenameParsed {
|
||||
const parts = filename.split('@', 5);
|
||||
const parts = filename.split("@", 5);
|
||||
|
||||
function get<T>(isT: TypeGuard<T>, index: number): T | undefined {
|
||||
const value = index >= 0 && index < parts.length ? parts[index] : undefined;
|
||||
const value =
|
||||
index >= 0 && index < parts.length ? parts[index] : undefined;
|
||||
return isT(value) ? value : undefined;
|
||||
}
|
||||
|
||||
|
@ -140,35 +145,65 @@ function isDuplicate(filter: NodeFilter, token?: Token): boolean {
|
|||
return parseNodeFilename(files[0]).token !== token;
|
||||
}
|
||||
|
||||
function checkNoDuplicates(token: Token | undefined, node: BaseNode, nodeSecrets: NodeSecrets): void {
|
||||
if (isDuplicate({hostname: node.hostname}, token)) {
|
||||
throw {data: {msg: 'Already exists.', field: 'hostname'}, type: ErrorTypes.conflict};
|
||||
function checkNoDuplicates(
|
||||
token: Token | undefined,
|
||||
node: BaseNode,
|
||||
nodeSecrets: NodeSecrets
|
||||
): void {
|
||||
if (isDuplicate({ hostname: node.hostname }, token)) {
|
||||
throw {
|
||||
data: { msg: "Already exists.", field: "hostname" },
|
||||
type: ErrorTypes.conflict,
|
||||
};
|
||||
}
|
||||
|
||||
if (node.key) {
|
||||
if (isDuplicate({key: node.key}, token)) {
|
||||
throw {data: {msg: 'Already exists.', field: 'key'}, type: ErrorTypes.conflict};
|
||||
if (isDuplicate({ key: node.key }, token)) {
|
||||
throw {
|
||||
data: { msg: "Already exists.", field: "key" },
|
||||
type: ErrorTypes.conflict,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (isDuplicate({mac: node.mac}, token)) {
|
||||
throw {data: {msg: 'Already exists.', field: 'mac'}, type: ErrorTypes.conflict};
|
||||
if (isDuplicate({ mac: node.mac }, token)) {
|
||||
throw {
|
||||
data: { msg: "Already exists.", field: "mac" },
|
||||
type: ErrorTypes.conflict,
|
||||
};
|
||||
}
|
||||
|
||||
if (nodeSecrets.monitoringToken && isDuplicate({monitoringToken: nodeSecrets.monitoringToken}, token)) {
|
||||
throw {data: {msg: 'Already exists.', field: 'monitoringToken'}, type: ErrorTypes.conflict};
|
||||
if (
|
||||
nodeSecrets.monitoringToken &&
|
||||
isDuplicate({ monitoringToken: nodeSecrets.monitoringToken }, token)
|
||||
) {
|
||||
throw {
|
||||
data: { msg: "Already exists.", field: "monitoringToken" },
|
||||
type: ErrorTypes.conflict,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function toNodeFilename(token: Token, node: BaseNode, nodeSecrets: NodeSecrets): string {
|
||||
return config.server.peersPath + '/' +
|
||||
function toNodeFilename(
|
||||
token: Token,
|
||||
node: BaseNode,
|
||||
nodeSecrets: NodeSecrets
|
||||
): string {
|
||||
return (
|
||||
config.server.peersPath +
|
||||
"/" +
|
||||
(
|
||||
(node.hostname || '') + '@' +
|
||||
(node.mac || '') + '@' +
|
||||
(node.key || '') + '@' +
|
||||
(token || '') + '@' +
|
||||
(nodeSecrets.monitoringToken || '')
|
||||
).toLowerCase();
|
||||
(node.hostname || "") +
|
||||
"@" +
|
||||
(node.mac || "") +
|
||||
"@" +
|
||||
(node.key || "") +
|
||||
"@" +
|
||||
(token || "") +
|
||||
"@" +
|
||||
(nodeSecrets.monitoringToken || "")
|
||||
).toLowerCase()
|
||||
);
|
||||
}
|
||||
|
||||
function getNodeValue(
|
||||
|
@ -194,7 +229,10 @@ function getNodeValue(
|
|||
case LINE_PREFIX.MONITORING:
|
||||
if (node.monitoring && monitoringState === MonitoringState.ACTIVE) {
|
||||
return "aktiv";
|
||||
} else if (node.monitoring && monitoringState === MonitoringState.PENDING) {
|
||||
} else if (
|
||||
node.monitoring &&
|
||||
monitoringState === MonitoringState.PENDING
|
||||
) {
|
||||
return "pending";
|
||||
}
|
||||
return "";
|
||||
|
@ -210,13 +248,19 @@ async function writeNodeFile(
|
|||
token: Token,
|
||||
node: CreateOrUpdateNode,
|
||||
monitoringState: MonitoringState,
|
||||
nodeSecrets: NodeSecrets,
|
||||
nodeSecrets: NodeSecrets
|
||||
): Promise<StoredNode> {
|
||||
const filename = toNodeFilename(token, node, nodeSecrets);
|
||||
let data = '';
|
||||
let data = "";
|
||||
|
||||
for (const prefix of Object.values(LINE_PREFIX)) {
|
||||
data += `${prefix}${getNodeValue(prefix, token, node, monitoringState, nodeSecrets)}\n`;
|
||||
data += `${prefix}${getNodeValue(
|
||||
prefix,
|
||||
token,
|
||||
node,
|
||||
monitoringState,
|
||||
nodeSecrets
|
||||
)}\n`;
|
||||
}
|
||||
|
||||
if (node.key) {
|
||||
|
@ -225,9 +269,9 @@ async function writeNodeFile(
|
|||
|
||||
// since node.js is single threaded we don't need a lock when working with synchronous operations
|
||||
if (isUpdate) {
|
||||
const files = findNodeFilesSync({token: token});
|
||||
const files = findNodeFilesSync({ token: token });
|
||||
if (files.length !== 1) {
|
||||
throw {data: 'Node not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Node not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
|
||||
checkNoDuplicates(token, node, nodeSecrets);
|
||||
|
@ -236,41 +280,65 @@ async function writeNodeFile(
|
|||
try {
|
||||
oldFs.unlinkSync(file);
|
||||
} catch (error) {
|
||||
Logger.tag('node', 'save').error('Could not delete old node file: ' + file, error);
|
||||
throw {data: 'Could not remove old node data.', type: ErrorTypes.internalError};
|
||||
Logger.tag("node", "save").error(
|
||||
"Could not delete old node file: " + file,
|
||||
error
|
||||
);
|
||||
throw {
|
||||
data: "Could not remove old node data.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
checkNoDuplicates(undefined, node, nodeSecrets);
|
||||
}
|
||||
|
||||
try {
|
||||
oldFs.writeFileSync(filename, data, 'utf8');
|
||||
const {node: storedNode} = await parseNodeFile(filename);
|
||||
oldFs.writeFileSync(filename, data, "utf8");
|
||||
const { node: storedNode } = await parseNodeFile(filename);
|
||||
return storedNode;
|
||||
} catch (error) {
|
||||
Logger.tag('node', 'save').error('Could not write node file: ' + filename, error);
|
||||
throw {data: 'Could not write node data.', type: ErrorTypes.internalError};
|
||||
Logger.tag("node", "save").error(
|
||||
"Could not write node file: " + filename,
|
||||
error
|
||||
);
|
||||
throw {
|
||||
data: "Could not write node data.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteNodeFile(token: Token): Promise<void> {
|
||||
let files;
|
||||
try {
|
||||
files = await findNodeFiles({token: token});
|
||||
files = await findNodeFiles({ token: token });
|
||||
} catch (error) {
|
||||
Logger.tag('node', 'delete').error('Could not find node file: ' + files, error);
|
||||
throw {data: 'Could not delete node.', type: ErrorTypes.internalError};
|
||||
Logger.tag("node", "delete").error(
|
||||
"Could not find node file: " + files,
|
||||
error
|
||||
);
|
||||
throw {
|
||||
data: "Could not delete node.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
|
||||
if (files.length !== 1) {
|
||||
throw {data: 'Node not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Node not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
|
||||
try {
|
||||
oldFs.unlinkSync(files[0]);
|
||||
} catch (error) {
|
||||
Logger.tag('node', 'delete').error('Could not delete node file: ' + files, error);
|
||||
throw {data: 'Could not delete node.', type: ErrorTypes.internalError};
|
||||
Logger.tag("node", "delete").error(
|
||||
"Could not delete node file: " + files,
|
||||
error
|
||||
);
|
||||
throw {
|
||||
data: "Could not delete node.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -284,10 +352,7 @@ class StoredNodeBuilder {
|
|||
public mac: MAC = "" as MAC; // FIXME: Either make mac optional in Node or handle this!
|
||||
public monitoringState: MonitoringState = MonitoringState.DISABLED;
|
||||
|
||||
constructor(
|
||||
public readonly modifiedAt: UnixTimestampSeconds,
|
||||
) {
|
||||
}
|
||||
constructor(public readonly modifiedAt: UnixTimestampSeconds) {}
|
||||
|
||||
public build(): StoredNode {
|
||||
const node = {
|
||||
|
@ -304,14 +369,22 @@ class StoredNodeBuilder {
|
|||
|
||||
if (!isStoredNode(node)) {
|
||||
logger.tag("NodeService").error("Not a valid StoredNode:", node);
|
||||
throw {data: "Could not build StoredNode.", type: ErrorTypes.internalError};
|
||||
throw {
|
||||
data: "Could not build StoredNode.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
}
|
||||
|
||||
function setNodeValue(prefix: LINE_PREFIX, node: StoredNodeBuilder, nodeSecrets: NodeSecrets, value: string) {
|
||||
function setNodeValue(
|
||||
prefix: LINE_PREFIX,
|
||||
node: StoredNodeBuilder,
|
||||
nodeSecrets: NodeSecrets,
|
||||
value: string
|
||||
) {
|
||||
switch (prefix) {
|
||||
case LINE_PREFIX.HOSTNAME:
|
||||
node.hostname = value as Hostname;
|
||||
|
@ -332,10 +405,13 @@ function setNodeValue(prefix: LINE_PREFIX, node: StoredNodeBuilder, nodeSecrets:
|
|||
node.token = value as Token;
|
||||
break;
|
||||
case LINE_PREFIX.MONITORING:
|
||||
const active = value === 'aktiv';
|
||||
const pending = value === 'pending';
|
||||
node.monitoringState =
|
||||
active ? MonitoringState.ACTIVE : (pending ? MonitoringState.PENDING : MonitoringState.DISABLED);
|
||||
const active = value === "aktiv";
|
||||
const pending = value === "pending";
|
||||
node.monitoringState = active
|
||||
? MonitoringState.ACTIVE
|
||||
: pending
|
||||
? MonitoringState.PENDING
|
||||
: MonitoringState.DISABLED;
|
||||
break;
|
||||
case LINE_PREFIX.MONITORING_TOKEN:
|
||||
nodeSecrets.monitoringToken = value as MonitoringToken;
|
||||
|
@ -346,11 +422,14 @@ function setNodeValue(prefix: LINE_PREFIX, node: StoredNodeBuilder, nodeSecrets:
|
|||
}
|
||||
|
||||
async function getModifiedAt(file: string): Promise<UnixTimestampSeconds> {
|
||||
const modifiedAtMs = (await fs.lstat(file)).mtimeMs as UnixTimestampMilliseconds;
|
||||
const modifiedAtMs = (await fs.lstat(file))
|
||||
.mtimeMs as UnixTimestampMilliseconds;
|
||||
return toUnixTimestampSeconds(modifiedAtMs);
|
||||
}
|
||||
|
||||
async function parseNodeFile(file: string): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets }> {
|
||||
async function parseNodeFile(
|
||||
file: string
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets }> {
|
||||
const contents = await fs.readFile(file);
|
||||
const modifiedAt = await getModifiedAt(file);
|
||||
|
||||
|
@ -365,7 +444,9 @@ async function parseNodeFile(file: string): Promise<{ node: StoredNode, nodeSecr
|
|||
} else {
|
||||
for (const prefix of Object.values(LINE_PREFIX)) {
|
||||
if (line.substring(0, prefix.length) === prefix) {
|
||||
const value = normalizeString(line.substring(prefix.length));
|
||||
const value = normalizeString(
|
||||
line.substring(prefix.length)
|
||||
);
|
||||
setNodeValue(prefix, node, nodeSecrets, value);
|
||||
break;
|
||||
}
|
||||
|
@ -379,7 +460,9 @@ async function parseNodeFile(file: string): Promise<{ node: StoredNode, nodeSecr
|
|||
};
|
||||
}
|
||||
|
||||
async function findNodeDataByFilePattern(filter: NodeFilter): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets } | null> {
|
||||
async function findNodeDataByFilePattern(
|
||||
filter: NodeFilter
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets } | null> {
|
||||
const files = await findNodeFiles(filter);
|
||||
|
||||
if (files.length !== 1) {
|
||||
|
@ -390,22 +473,27 @@ async function findNodeDataByFilePattern(filter: NodeFilter): Promise<{ node: St
|
|||
return await parseNodeFile(file);
|
||||
}
|
||||
|
||||
async function getNodeDataByFilePattern(filter: NodeFilter): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets }> {
|
||||
async function getNodeDataByFilePattern(
|
||||
filter: NodeFilter
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets }> {
|
||||
const result = await findNodeDataByFilePattern(filter);
|
||||
if (!result) {
|
||||
throw {data: 'Node not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Node not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function sendMonitoringConfirmationMail(node: StoredNode, nodeSecrets: NodeSecrets): Promise<void> {
|
||||
async function sendMonitoringConfirmationMail(
|
||||
node: StoredNode,
|
||||
nodeSecrets: NodeSecrets
|
||||
): Promise<void> {
|
||||
const monitoringToken = nodeSecrets.monitoringToken;
|
||||
if (!monitoringToken) {
|
||||
Logger
|
||||
.tag('monitoring', 'confirmation')
|
||||
.error('Could not enqueue confirmation mail. No monitoring token found.');
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
Logger.tag("monitoring", "confirmation").error(
|
||||
"Could not enqueue confirmation mail. No monitoring token found."
|
||||
);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
|
||||
const confirmUrl = monitoringConfirmUrl(monitoringToken);
|
||||
|
@ -413,26 +501,36 @@ async function sendMonitoringConfirmationMail(node: StoredNode, nodeSecrets: Nod
|
|||
|
||||
await MailService.enqueue(
|
||||
config.server.email.from,
|
||||
node.nickname + ' <' + node.email + '>',
|
||||
node.nickname + " <" + node.email + ">",
|
||||
MailType.MONITORING_CONFIRMATION,
|
||||
{
|
||||
node: node,
|
||||
confirmUrl: confirmUrl,
|
||||
disableUrl: disableUrl
|
||||
},
|
||||
disableUrl: disableUrl,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function createNode(node: CreateOrUpdateNode): Promise<StoredNode> {
|
||||
export async function createNode(
|
||||
node: CreateOrUpdateNode
|
||||
): Promise<StoredNode> {
|
||||
const token: Token = generateToken();
|
||||
const nodeSecrets: NodeSecrets = {};
|
||||
|
||||
const monitoringState = node.monitoring ? MonitoringState.PENDING : MonitoringState.DISABLED;
|
||||
const monitoringState = node.monitoring
|
||||
? MonitoringState.PENDING
|
||||
: MonitoringState.DISABLED;
|
||||
if (node.monitoring) {
|
||||
nodeSecrets.monitoringToken = generateToken<MonitoringToken>();
|
||||
}
|
||||
|
||||
const createdNode = await writeNodeFile(false, token, node, monitoringState, nodeSecrets);
|
||||
const createdNode = await writeNodeFile(
|
||||
false,
|
||||
token,
|
||||
node,
|
||||
monitoringState,
|
||||
nodeSecrets
|
||||
);
|
||||
|
||||
if (createdNode.monitoringState == MonitoringState.PENDING) {
|
||||
await sendMonitoringConfirmationMail(createdNode, nodeSecrets);
|
||||
|
@ -441,8 +539,12 @@ export async function createNode(node: CreateOrUpdateNode): Promise<StoredNode>
|
|||
return createdNode;
|
||||
}
|
||||
|
||||
export async function updateNode(token: Token, node: CreateOrUpdateNode): Promise<StoredNode> {
|
||||
const {node: currentNode, nodeSecrets} = await getNodeDataWithSecretsByToken(token);
|
||||
export async function updateNode(
|
||||
token: Token,
|
||||
node: CreateOrUpdateNode
|
||||
): Promise<StoredNode> {
|
||||
const { node: currentNode, nodeSecrets } =
|
||||
await getNodeDataWithSecretsByToken(token);
|
||||
|
||||
let monitoringState = MonitoringState.DISABLED;
|
||||
let monitoringToken: MonitoringToken | undefined = undefined;
|
||||
|
@ -461,11 +563,12 @@ export async function updateNode(token: Token, node: CreateOrUpdateNode): Promis
|
|||
// new email so we need a new token and a reconfirmation
|
||||
monitoringState = MonitoringState.PENDING;
|
||||
monitoringToken = generateToken<MonitoringToken>();
|
||||
|
||||
} else {
|
||||
// email unchanged, keep token (fix if not set) and confirmation state
|
||||
monitoringState = currentNode.monitoringState;
|
||||
monitoringToken = nodeSecrets.monitoringToken || generateToken<MonitoringToken>();
|
||||
monitoringToken =
|
||||
nodeSecrets.monitoringToken ||
|
||||
generateToken<MonitoringToken>();
|
||||
}
|
||||
break;
|
||||
|
||||
|
@ -476,9 +579,15 @@ export async function updateNode(token: Token, node: CreateOrUpdateNode): Promis
|
|||
|
||||
nodeSecrets.monitoringToken = monitoringToken;
|
||||
|
||||
const storedNode = await writeNodeFile(true, token, node, monitoringState, nodeSecrets);
|
||||
const storedNode = await writeNodeFile(
|
||||
true,
|
||||
token,
|
||||
node,
|
||||
monitoringState,
|
||||
nodeSecrets
|
||||
);
|
||||
if (storedNode.monitoringState === MonitoringState.PENDING) {
|
||||
await sendMonitoringConfirmationMail(storedNode, nodeSecrets)
|
||||
await sendMonitoringConfirmationMail(storedNode, nodeSecrets);
|
||||
}
|
||||
|
||||
return storedNode;
|
||||
|
@ -488,7 +597,7 @@ export async function internalUpdateNode(
|
|||
token: Token,
|
||||
node: CreateOrUpdateNode,
|
||||
monitoringState: MonitoringState,
|
||||
nodeSecrets: NodeSecrets,
|
||||
nodeSecrets: NodeSecrets
|
||||
): Promise<StoredNode> {
|
||||
return await writeNodeFile(true, token, node, monitoringState, nodeSecrets);
|
||||
}
|
||||
|
@ -502,52 +611,58 @@ export async function getAllNodes(): Promise<StoredNode[]> {
|
|||
try {
|
||||
files = await findNodeFiles({});
|
||||
} catch (error) {
|
||||
Logger.tag('nodes').error('Error getting all nodes:', error);
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
Logger.tag("nodes").error("Error getting all nodes:", error);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
|
||||
const nodes: StoredNode[] = [];
|
||||
for (const file of files) {
|
||||
try {
|
||||
const {node} = await parseNodeFile(file);
|
||||
const { node } = await parseNodeFile(file);
|
||||
nodes.push(node);
|
||||
} catch (error) {
|
||||
Logger.tag('nodes').error('Error getting all nodes:', error);
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
Logger.tag("nodes").error("Error getting all nodes:", error);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
}
|
||||
|
||||
return nodes;
|
||||
}
|
||||
|
||||
export async function findNodeDataWithSecretsByMac(mac: MAC): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets } | null> {
|
||||
return await findNodeDataByFilePattern({mac});
|
||||
export async function findNodeDataWithSecretsByMac(
|
||||
mac: MAC
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets } | null> {
|
||||
return await findNodeDataByFilePattern({ mac });
|
||||
}
|
||||
|
||||
export async function findNodeDataByMac(mac: MAC): Promise<StoredNode | null> {
|
||||
const result = await findNodeDataByFilePattern({mac});
|
||||
const result = await findNodeDataByFilePattern({ mac });
|
||||
return result ? result.node : null;
|
||||
}
|
||||
|
||||
export async function getNodeDataWithSecretsByToken(token: Token): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets }> {
|
||||
return await getNodeDataByFilePattern({token: token});
|
||||
export async function getNodeDataWithSecretsByToken(
|
||||
token: Token
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets }> {
|
||||
return await getNodeDataByFilePattern({ token: token });
|
||||
}
|
||||
|
||||
export async function getNodeDataByToken(token: Token): Promise<StoredNode> {
|
||||
const {node} = await getNodeDataByFilePattern({token: token});
|
||||
const { node } = await getNodeDataByFilePattern({ token: token });
|
||||
return node;
|
||||
}
|
||||
|
||||
export async function getNodeDataWithSecretsByMonitoringToken(
|
||||
monitoringToken: MonitoringToken
|
||||
): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets }> {
|
||||
return await getNodeDataByFilePattern({monitoringToken: monitoringToken});
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets }> {
|
||||
return await getNodeDataByFilePattern({ monitoringToken: monitoringToken });
|
||||
}
|
||||
|
||||
export async function getNodeDataByMonitoringToken(
|
||||
monitoringToken: MonitoringToken
|
||||
): Promise<StoredNode> {
|
||||
const {node} = await getNodeDataByFilePattern({monitoringToken: monitoringToken});
|
||||
const { node } = await getNodeDataByFilePattern({
|
||||
monitoringToken: monitoringToken,
|
||||
});
|
||||
return node;
|
||||
}
|
||||
|
||||
|
@ -555,7 +670,7 @@ export async function fixNodeFilenames(): Promise<void> {
|
|||
const files = await findFilesInPeersPath();
|
||||
|
||||
for (const file of files) {
|
||||
const {node, nodeSecrets} = await parseNodeFile(file);
|
||||
const { node, nodeSecrets } = await parseNodeFile(file);
|
||||
|
||||
const expectedFilename = toNodeFilename(node.token, node, nodeSecrets);
|
||||
if (file !== expectedFilename) {
|
||||
|
@ -563,16 +678,23 @@ export async function fixNodeFilenames(): Promise<void> {
|
|||
await fs.rename(file, expectedFilename);
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
'Cannot rename file ' + file + ' to ' + expectedFilename + ' => ' + error
|
||||
"Cannot rename file " +
|
||||
file +
|
||||
" to " +
|
||||
expectedFilename +
|
||||
" => " +
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function findNodesModifiedBefore(timestamp: UnixTimestampSeconds): Promise<StoredNode[]> {
|
||||
export async function findNodesModifiedBefore(
|
||||
timestamp: UnixTimestampSeconds
|
||||
): Promise<StoredNode[]> {
|
||||
const nodes = await getAllNodes();
|
||||
return nodes.filter(node => node.modifiedAt < timestamp);
|
||||
return nodes.filter((node) => node.modifiedAt < timestamp);
|
||||
}
|
||||
|
||||
export async function getNodeStatistics(): Promise<NodeStatistics> {
|
||||
|
@ -584,8 +706,8 @@ export async function getNodeStatistics(): Promise<NodeStatistics> {
|
|||
withCoords: 0,
|
||||
monitoring: {
|
||||
active: 0,
|
||||
pending: 0
|
||||
}
|
||||
pending: 0,
|
||||
},
|
||||
};
|
||||
|
||||
for (const node of nodes) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import {ArrayField, Field, RawJsonField} from "sparkson";
|
||||
import { ArrayField, Field, RawJsonField } from "sparkson";
|
||||
|
||||
// Types shared with the client.
|
||||
export type TypeGuard<T> = (arg: unknown) => arg is T;
|
||||
|
@ -49,8 +49,7 @@ export function isJSONObject(arg: unknown): arg is JSONObject {
|
|||
return true;
|
||||
}
|
||||
|
||||
export interface JSONArray extends Array<JSONValue> {
|
||||
}
|
||||
export type JSONArray = Array<JSONValue>;
|
||||
|
||||
export const isJSONArray = toIsArray(isJSONValue);
|
||||
|
||||
|
@ -65,39 +64,48 @@ export function isObject(arg: unknown): arg is object {
|
|||
return arg !== null && typeof arg === "object";
|
||||
}
|
||||
|
||||
export function hasOwnProperty<Key extends PropertyKey>(
|
||||
arg: unknown,
|
||||
key: Key
|
||||
): arg is Record<Key, unknown> {
|
||||
return isObject(arg) && key in arg;
|
||||
}
|
||||
|
||||
export function isArray<T>(arg: unknown, isT: TypeGuard<T>): arg is Array<T> {
|
||||
if (!Array.isArray(arg)) {
|
||||
return false;
|
||||
}
|
||||
for (const element of arg) {
|
||||
if (!isT(element)) {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function isMap(arg: unknown): arg is Map<any, any> {
|
||||
export function isMap(arg: unknown): arg is Map<unknown, unknown> {
|
||||
return arg instanceof Map;
|
||||
}
|
||||
|
||||
export function isString(arg: unknown): arg is string {
|
||||
return typeof arg === "string"
|
||||
return typeof arg === "string";
|
||||
}
|
||||
|
||||
// noinspection JSUnusedLocalSymbols
|
||||
export function toIsNewtype<
|
||||
Type extends Value & { readonly __tag: symbol },
|
||||
Value,
|
||||
>(isValue: TypeGuard<Value>, _example: Type): TypeGuard<Type> {
|
||||
Value
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
>(isValue: TypeGuard<Value>, example: Type): TypeGuard<Type> {
|
||||
return (arg: unknown): arg is Type => isValue(arg);
|
||||
}
|
||||
|
||||
export function isNumber(arg: unknown): arg is number {
|
||||
return typeof arg === "number"
|
||||
return typeof arg === "number";
|
||||
}
|
||||
|
||||
export function isBoolean(arg: unknown): arg is boolean {
|
||||
return typeof arg === "boolean"
|
||||
return typeof arg === "boolean";
|
||||
}
|
||||
|
||||
export function isUndefined(arg: unknown): arg is undefined {
|
||||
|
@ -113,14 +121,18 @@ export function toIsArray<T>(isT: TypeGuard<T>): TypeGuard<T[]> {
|
|||
}
|
||||
|
||||
export function toIsEnum<E>(enumDef: E): EnumTypeGuard<E> {
|
||||
return (arg): arg is EnumValue<E> => Object.values(enumDef).includes(arg as [keyof E]);
|
||||
return (arg): arg is EnumValue<E> =>
|
||||
Object.values(enumDef).includes(arg as [keyof E]);
|
||||
}
|
||||
|
||||
export function isRegExp(arg: unknown): arg is RegExp {
|
||||
return isObject(arg) && arg instanceof RegExp;
|
||||
}
|
||||
|
||||
export function isOptional<T>(arg: unknown, isT: TypeGuard<T>): arg is (T | undefined) {
|
||||
export function isOptional<T>(
|
||||
arg: unknown,
|
||||
isT: TypeGuard<T>
|
||||
): arg is T | undefined {
|
||||
return arg === undefined || isT(arg);
|
||||
}
|
||||
|
||||
|
@ -160,7 +172,7 @@ export function isNodeStatistics(arg: unknown): arg is NodeStatistics {
|
|||
|
||||
export type Statistics = {
|
||||
nodes: NodeStatistics;
|
||||
}
|
||||
};
|
||||
|
||||
export function isStatistics(arg: unknown): arg is Statistics {
|
||||
return isObject(arg) && isNodeStatistics((arg as Statistics).nodes);
|
||||
|
@ -172,9 +184,8 @@ export class CommunityConfig {
|
|||
@Field("domain") public domain: string,
|
||||
@Field("contactEmail") public contactEmail: EmailAddress,
|
||||
@ArrayField("sites", String) public sites: Site[],
|
||||
@ArrayField("domains", String) public domains: Domain[],
|
||||
) {
|
||||
}
|
||||
@ArrayField("domains", String) public domains: Domain[]
|
||||
) {}
|
||||
}
|
||||
|
||||
export function isCommunityConfig(arg: unknown): arg is CommunityConfig {
|
||||
|
@ -194,9 +205,8 @@ export function isCommunityConfig(arg: unknown): arg is CommunityConfig {
|
|||
export class LegalConfig {
|
||||
constructor(
|
||||
@Field("privacyUrl", true) public privacyUrl?: Url,
|
||||
@Field("imprintUrl", true) public imprintUrl?: Url,
|
||||
) {
|
||||
}
|
||||
@Field("imprintUrl", true) public imprintUrl?: Url
|
||||
) {}
|
||||
}
|
||||
|
||||
export function isLegalConfig(arg: unknown): arg is LegalConfig {
|
||||
|
@ -205,16 +215,12 @@ export function isLegalConfig(arg: unknown): arg is LegalConfig {
|
|||
}
|
||||
const cfg = arg as LegalConfig;
|
||||
return (
|
||||
isOptional(cfg.privacyUrl, isUrl) &&
|
||||
isOptional(cfg.imprintUrl, isUrl)
|
||||
isOptional(cfg.privacyUrl, isUrl) && isOptional(cfg.imprintUrl, isUrl)
|
||||
);
|
||||
}
|
||||
|
||||
export class ClientMapConfig {
|
||||
constructor(
|
||||
@Field("mapUrl") public mapUrl: Url,
|
||||
) {
|
||||
}
|
||||
constructor(@Field("mapUrl") public mapUrl: Url) {}
|
||||
}
|
||||
|
||||
export function isClientMapConfig(arg: unknown): arg is ClientMapConfig {
|
||||
|
@ -226,10 +232,7 @@ export function isClientMapConfig(arg: unknown): arg is ClientMapConfig {
|
|||
}
|
||||
|
||||
export class MonitoringConfig {
|
||||
constructor(
|
||||
@Field("enabled") public enabled: boolean,
|
||||
) {
|
||||
}
|
||||
constructor(@Field("enabled") public enabled: boolean) {}
|
||||
}
|
||||
|
||||
export function isMonitoringConfig(arg: unknown): arg is MonitoringConfig {
|
||||
|
@ -243,9 +246,8 @@ export function isMonitoringConfig(arg: unknown): arg is MonitoringConfig {
|
|||
export class CoordinatesConfig {
|
||||
constructor(
|
||||
@Field("lat") public lat: number,
|
||||
@Field("lng") public lng: number,
|
||||
) {
|
||||
}
|
||||
@Field("lng") public lng: number
|
||||
) {}
|
||||
}
|
||||
|
||||
export function isCoordinatesConfig(arg: unknown): arg is CoordinatesConfig {
|
||||
|
@ -253,10 +255,7 @@ export function isCoordinatesConfig(arg: unknown): arg is CoordinatesConfig {
|
|||
return false;
|
||||
}
|
||||
const coords = arg as CoordinatesConfig;
|
||||
return (
|
||||
isNumber(coords.lat) &&
|
||||
isNumber(coords.lng)
|
||||
);
|
||||
return isNumber(coords.lat) && isNumber(coords.lng);
|
||||
}
|
||||
|
||||
export class CoordinatesSelectorConfig {
|
||||
|
@ -264,12 +263,13 @@ export class CoordinatesSelectorConfig {
|
|||
@Field("lat") public lat: number,
|
||||
@Field("lng") public lng: number,
|
||||
@Field("defaultZoom") public defaultZoom: number,
|
||||
@RawJsonField("layers") public layers: JSONObject,
|
||||
) {
|
||||
}
|
||||
@RawJsonField("layers") public layers: JSONObject
|
||||
) {}
|
||||
}
|
||||
|
||||
export function isCoordinatesSelectorConfig(arg: unknown): arg is CoordinatesSelectorConfig {
|
||||
export function isCoordinatesSelectorConfig(
|
||||
arg: unknown
|
||||
): arg is CoordinatesSelectorConfig {
|
||||
if (!isObject(arg)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -286,12 +286,14 @@ export class OtherCommunityInfoConfig {
|
|||
constructor(
|
||||
@Field("showInfo") public showInfo: boolean,
|
||||
@Field("showBorderForDebugging") public showBorderForDebugging: boolean,
|
||||
@ArrayField("localCommunityPolygon", CoordinatesConfig) public localCommunityPolygon: CoordinatesConfig[],
|
||||
) {
|
||||
}
|
||||
@ArrayField("localCommunityPolygon", CoordinatesConfig)
|
||||
public localCommunityPolygon: CoordinatesConfig[]
|
||||
) {}
|
||||
}
|
||||
|
||||
export function isOtherCommunityInfoConfig(arg: unknown): arg is OtherCommunityInfoConfig {
|
||||
export function isOtherCommunityInfoConfig(
|
||||
arg: unknown
|
||||
): arg is OtherCommunityInfoConfig {
|
||||
if (!isObject(arg)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -309,11 +311,12 @@ export class ClientConfig {
|
|||
@Field("legal") public legal: LegalConfig,
|
||||
@Field("map") public map: ClientMapConfig,
|
||||
@Field("monitoring") public monitoring: MonitoringConfig,
|
||||
@Field("coordsSelector") public coordsSelector: CoordinatesSelectorConfig,
|
||||
@Field("otherCommunityInfo") public otherCommunityInfo: OtherCommunityInfoConfig,
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
||||
) {
|
||||
}
|
||||
@Field("coordsSelector")
|
||||
public coordsSelector: CoordinatesSelectorConfig,
|
||||
@Field("otherCommunityInfo")
|
||||
public otherCommunityInfo: OtherCommunityInfoConfig,
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string
|
||||
) {}
|
||||
}
|
||||
|
||||
export function isClientConfig(arg: unknown): arg is ClientConfig {
|
||||
|
@ -345,15 +348,28 @@ export type DurationSeconds = number & { readonly __tag: unique symbol };
|
|||
export const isDurationSeconds = toIsNewtype(isNumber, NaN as DurationSeconds);
|
||||
|
||||
export type DurationMilliseconds = number & { readonly __tag: unique symbol };
|
||||
export const isDurationMilliseconds = toIsNewtype(isNumber, NaN as DurationMilliseconds);
|
||||
export const isDurationMilliseconds = toIsNewtype(
|
||||
isNumber,
|
||||
NaN as DurationMilliseconds
|
||||
);
|
||||
|
||||
export type UnixTimestampSeconds = number & { readonly __tag: unique symbol };
|
||||
export const isUnixTimestampSeconds = toIsNewtype(isNumber, NaN as UnixTimestampSeconds);
|
||||
export const isUnixTimestampSeconds = toIsNewtype(
|
||||
isNumber,
|
||||
NaN as UnixTimestampSeconds
|
||||
);
|
||||
|
||||
export type UnixTimestampMilliseconds = number & { readonly __tag: unique symbol };
|
||||
export const isUnixTimestampMilliseconds = toIsNewtype(isNumber, NaN as UnixTimestampMilliseconds);
|
||||
export type UnixTimestampMilliseconds = number & {
|
||||
readonly __tag: unique symbol;
|
||||
};
|
||||
export const isUnixTimestampMilliseconds = toIsNewtype(
|
||||
isNumber,
|
||||
NaN as UnixTimestampMilliseconds
|
||||
);
|
||||
|
||||
export function toUnixTimestampSeconds(ms: UnixTimestampMilliseconds): UnixTimestampSeconds {
|
||||
export function toUnixTimestampSeconds(
|
||||
ms: UnixTimestampMilliseconds
|
||||
): UnixTimestampSeconds {
|
||||
return Math.floor(ms) as UnixTimestampSeconds;
|
||||
}
|
||||
|
||||
|
@ -371,7 +387,7 @@ export const isMonitoringState = toIsEnum(MonitoringState);
|
|||
export type NodeId = string & { readonly __tag: unique symbol };
|
||||
export const isNodeId = toIsNewtype(isString, "" as NodeId);
|
||||
|
||||
export type Hostname = string & { readonly __tag: unique symbol }
|
||||
export type Hostname = string & { readonly __tag: unique symbol };
|
||||
export const isHostname = toIsNewtype(isString, "" as Hostname);
|
||||
|
||||
export type Nickname = string & { readonly __tag: unique symbol };
|
||||
|
@ -390,7 +406,7 @@ export type BaseNode = {
|
|||
coords?: Coordinates;
|
||||
key?: FastdKey;
|
||||
mac: MAC;
|
||||
}
|
||||
};
|
||||
|
||||
export function isBaseNode(arg: unknown): arg is BaseNode {
|
||||
if (!isObject(arg)) {
|
||||
|
@ -412,16 +428,14 @@ export function isBaseNode(arg: unknown): arg is BaseNode {
|
|||
*/
|
||||
export type CreateOrUpdateNode = BaseNode & {
|
||||
monitoring: boolean;
|
||||
}
|
||||
};
|
||||
|
||||
export function isCreateOrUpdateNode(arg: unknown): arg is CreateOrUpdateNode {
|
||||
if (!isBaseNode(arg)) {
|
||||
return false;
|
||||
}
|
||||
const node = arg as CreateOrUpdateNode;
|
||||
return (
|
||||
isBoolean(node.monitoring)
|
||||
);
|
||||
return isBoolean(node.monitoring);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -431,7 +445,7 @@ export type StoredNode = BaseNode & {
|
|||
token: Token;
|
||||
monitoringState: MonitoringState;
|
||||
modifiedAt: UnixTimestampSeconds;
|
||||
}
|
||||
};
|
||||
|
||||
export function isStoredNode(arg: unknown): arg is StoredNode {
|
||||
if (!isObject(arg)) {
|
||||
|
@ -449,23 +463,20 @@ export function isStoredNode(arg: unknown): arg is StoredNode {
|
|||
export type NodeResponse = StoredNode & {
|
||||
monitoring: boolean;
|
||||
monitoringConfirmed: boolean;
|
||||
}
|
||||
};
|
||||
|
||||
export function isNodeResponse(arg: unknown): arg is NodeResponse {
|
||||
if (!isStoredNode(arg)) {
|
||||
return false;
|
||||
}
|
||||
const node = arg as NodeResponse;
|
||||
return (
|
||||
isBoolean(node.monitoring) &&
|
||||
isBoolean(node.monitoringConfirmed)
|
||||
);
|
||||
return isBoolean(node.monitoring) && isBoolean(node.monitoringConfirmed);
|
||||
}
|
||||
|
||||
export type NodeTokenResponse = {
|
||||
token: Token;
|
||||
node: NodeResponse;
|
||||
}
|
||||
};
|
||||
|
||||
export function isNodeTokenResponse(arg: unknown): arg is NodeTokenResponse {
|
||||
if (!isObject(arg)) {
|
||||
|
@ -495,13 +506,16 @@ export const isDomain = toIsNewtype(isString, "" as Domain);
|
|||
/**
|
||||
* Represents a node in the context of a Freifunk site and domain.
|
||||
*/
|
||||
export type DomainSpecificNodeResponse = Record<NodeSortField, any> & NodeResponse & {
|
||||
site?: Site,
|
||||
domain?: Domain,
|
||||
onlineState?: OnlineState,
|
||||
}
|
||||
export type DomainSpecificNodeResponse = Record<NodeSortField, any> &
|
||||
NodeResponse & {
|
||||
site?: Site;
|
||||
domain?: Domain;
|
||||
onlineState?: OnlineState;
|
||||
};
|
||||
|
||||
export function isDomainSpecificNodeResponse(arg: unknown): arg is DomainSpecificNodeResponse {
|
||||
export function isDomainSpecificNodeResponse(
|
||||
arg: unknown
|
||||
): arg is DomainSpecificNodeResponse {
|
||||
if (!isNodeResponse(arg)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -514,12 +528,12 @@ export function isDomainSpecificNodeResponse(arg: unknown): arg is DomainSpecifi
|
|||
}
|
||||
|
||||
export type MonitoringResponse = {
|
||||
hostname: Hostname,
|
||||
mac: MAC,
|
||||
email: EmailAddress,
|
||||
monitoring: boolean,
|
||||
monitoringConfirmed: boolean,
|
||||
}
|
||||
hostname: Hostname;
|
||||
mac: MAC;
|
||||
email: EmailAddress;
|
||||
monitoring: boolean;
|
||||
monitoringConfirmed: boolean;
|
||||
};
|
||||
|
||||
export function isMonitoringResponse(arg: unknown): arg is MonitoringResponse {
|
||||
if (!Object(arg)) {
|
||||
|
@ -536,17 +550,17 @@ export function isMonitoringResponse(arg: unknown): arg is MonitoringResponse {
|
|||
}
|
||||
|
||||
export enum NodeSortField {
|
||||
HOSTNAME = 'hostname',
|
||||
NICKNAME = 'nickname',
|
||||
EMAIL = 'email',
|
||||
TOKEN = 'token',
|
||||
MAC = 'mac',
|
||||
KEY = 'key',
|
||||
SITE = 'site',
|
||||
DOMAIN = 'domain',
|
||||
COORDS = 'coords',
|
||||
ONLINE_STATE = 'onlineState',
|
||||
MONITORING_STATE = 'monitoringState',
|
||||
HOSTNAME = "hostname",
|
||||
NICKNAME = "nickname",
|
||||
EMAIL = "email",
|
||||
TOKEN = "token",
|
||||
MAC = "mac",
|
||||
KEY = "key",
|
||||
SITE = "site",
|
||||
DOMAIN = "domain",
|
||||
COORDS = "coords",
|
||||
ONLINE_STATE = "onlineState",
|
||||
MONITORING_STATE = "monitoringState",
|
||||
}
|
||||
|
||||
export const isNodeSortField = toIsEnum(NodeSortField);
|
||||
|
@ -558,7 +572,7 @@ export type NodesFilter = {
|
|||
site?: Site;
|
||||
domain?: Domain;
|
||||
onlineState?: OnlineState;
|
||||
}
|
||||
};
|
||||
|
||||
export const NODES_FILTER_FIELDS = {
|
||||
hasKey: Boolean,
|
||||
|
@ -585,49 +599,49 @@ export function isNodesFilter(arg: unknown): arg is NodesFilter {
|
|||
}
|
||||
|
||||
export enum MonitoringSortField {
|
||||
ID = 'id',
|
||||
HOSTNAME = 'hostname',
|
||||
MAC = 'mac',
|
||||
SITE = 'site',
|
||||
DOMAIN = 'domain',
|
||||
MONITORING_STATE = 'monitoring_state',
|
||||
STATE = 'state',
|
||||
LAST_SEEN = 'last_seen',
|
||||
IMPORT_TIMESTAMP = 'import_timestamp',
|
||||
LAST_STATUS_MAIL_TYPE = 'last_status_mail_type',
|
||||
LAST_STATUS_MAIL_SENT = 'last_status_mail_sent',
|
||||
CREATED_AT = 'created_at',
|
||||
MODIFIED_AT = 'modified_at',
|
||||
ID = "id",
|
||||
HOSTNAME = "hostname",
|
||||
MAC = "mac",
|
||||
SITE = "site",
|
||||
DOMAIN = "domain",
|
||||
MONITORING_STATE = "monitoring_state",
|
||||
STATE = "state",
|
||||
LAST_SEEN = "last_seen",
|
||||
IMPORT_TIMESTAMP = "import_timestamp",
|
||||
LAST_STATUS_MAIL_TYPE = "last_status_mail_type",
|
||||
LAST_STATUS_MAIL_SENT = "last_status_mail_sent",
|
||||
CREATED_AT = "created_at",
|
||||
MODIFIED_AT = "modified_at",
|
||||
}
|
||||
|
||||
export const isMonitoringSortField = toIsEnum(MonitoringSortField);
|
||||
|
||||
export enum TaskSortField {
|
||||
ID = 'id',
|
||||
NAME = 'name',
|
||||
SCHEDULE = 'schedule',
|
||||
STATE = 'state',
|
||||
RUNNING_SINCE = 'runningSince',
|
||||
LAST_RUN_STARTED = 'lastRunStarted',
|
||||
ID = "id",
|
||||
NAME = "name",
|
||||
SCHEDULE = "schedule",
|
||||
STATE = "state",
|
||||
RUNNING_SINCE = "runningSince",
|
||||
LAST_RUN_STARTED = "lastRunStarted",
|
||||
}
|
||||
|
||||
export const isTaskSortField = toIsEnum(TaskSortField);
|
||||
|
||||
export enum MailSortField {
|
||||
ID = 'id',
|
||||
FAILURES = 'failures',
|
||||
SENDER = 'sender',
|
||||
RECIPIENT = 'recipient',
|
||||
EMAIL = 'email',
|
||||
CREATED_AT = 'created_at',
|
||||
MODIFIED_AT = 'modified_at',
|
||||
ID = "id",
|
||||
FAILURES = "failures",
|
||||
SENDER = "sender",
|
||||
RECIPIENT = "recipient",
|
||||
EMAIL = "email",
|
||||
CREATED_AT = "created_at",
|
||||
MODIFIED_AT = "modified_at",
|
||||
}
|
||||
|
||||
export const isMailSortField = toIsEnum(MailSortField);
|
||||
|
||||
export type GenericSortField = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
readonly __tag: unique symbol;
|
||||
};
|
||||
|
||||
export enum SortDirection {
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
import {isString, MAC} from "../types";
|
||||
import { isString, MAC } from "../types";
|
||||
|
||||
export function normalizeString(str: string): string {
|
||||
return isString(str) ? str.trim().replace(/\s+/g, ' ') : str;
|
||||
return isString(str) ? str.trim().replace(/\s+/g, " ") : str;
|
||||
}
|
||||
|
||||
export function normalizeMac(mac: MAC): MAC {
|
||||
// parts only contains values at odd indexes
|
||||
const parts = mac.toUpperCase().replace(/[-:]/g, '').split(/([A-F0-9]{2})/);
|
||||
const parts = mac
|
||||
.toUpperCase()
|
||||
.replace(/[-:]/g, "")
|
||||
.split(/([A-F0-9]{2})/);
|
||||
|
||||
const macParts = [];
|
||||
|
||||
|
@ -14,7 +17,7 @@ export function normalizeMac(mac: MAC): MAC {
|
|||
macParts.push(parts[i]);
|
||||
}
|
||||
|
||||
return macParts.join(':') as MAC;
|
||||
return macParts.join(":") as MAC;
|
||||
}
|
||||
|
||||
export function parseInteger(str: string): number {
|
||||
|
@ -22,6 +25,8 @@ export function parseInteger(str: string): number {
|
|||
if (parsed.toString() === str) {
|
||||
return parsed;
|
||||
} else {
|
||||
throw new SyntaxError(`String does not represent a valid integer: "${str}"`);
|
||||
throw new SyntaxError(
|
||||
`String does not represent a valid integer: "${str}"`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,115 +3,115 @@
|
|||
|
||||
// noinspection RegExpSimplifiable
|
||||
const CONSTRAINTS = {
|
||||
id:{
|
||||
type: 'string',
|
||||
id: {
|
||||
type: "string",
|
||||
regex: /^[1-9][0-9]*$/,
|
||||
optional: false
|
||||
optional: false,
|
||||
},
|
||||
token:{
|
||||
type: 'string',
|
||||
token: {
|
||||
type: "string",
|
||||
regex: /^[0-9a-f]{16}$/i,
|
||||
optional: false
|
||||
optional: false,
|
||||
},
|
||||
node: {
|
||||
hostname: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^[-a-z0-9_]{1,32}$/i,
|
||||
optional: false
|
||||
optional: false,
|
||||
},
|
||||
key: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^([a-f0-9]{64})$/i,
|
||||
optional: true
|
||||
optional: true,
|
||||
},
|
||||
email: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i,
|
||||
optional: false
|
||||
optional: false,
|
||||
},
|
||||
nickname: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^[-a-z0-9_ äöüß]{1,64}$/i,
|
||||
optional: false
|
||||
optional: false,
|
||||
},
|
||||
mac: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^([a-f0-9]{12}|([a-f0-9]{2}:){5}[a-f0-9]{2}|([a-f0-9]{2}-){5}[a-f0-9]{2})$/i,
|
||||
optional: false
|
||||
optional: false,
|
||||
},
|
||||
coords: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^(-?[0-9]{1,3}(\.[0-9]{1,15})? -?[0-9]{1,3}(\.[0-9]{1,15})?)$/,
|
||||
optional: true
|
||||
optional: true,
|
||||
},
|
||||
monitoring: {
|
||||
type: 'boolean',
|
||||
optional: false
|
||||
}
|
||||
type: "boolean",
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
nodeFilters: {
|
||||
hasKey: {
|
||||
type: 'boolean',
|
||||
optional: true
|
||||
type: "boolean",
|
||||
optional: true,
|
||||
},
|
||||
hasCoords: {
|
||||
type: 'boolean',
|
||||
optional: true
|
||||
type: "boolean",
|
||||
optional: true,
|
||||
},
|
||||
onlineState: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^(ONLINE|OFFLINE)$/,
|
||||
optional: true
|
||||
optional: true,
|
||||
},
|
||||
monitoringState: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^(disabled|active|pending)$/,
|
||||
optional: true
|
||||
optional: true,
|
||||
},
|
||||
site: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^[a-z0-9_-]{1,32}$/,
|
||||
optional: true
|
||||
optional: true,
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^[a-z0-9_-]{1,32}$/,
|
||||
optional: true
|
||||
}
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
rest: {
|
||||
list: {
|
||||
_page: {
|
||||
type: 'number',
|
||||
type: "number",
|
||||
min: 1,
|
||||
optional: true,
|
||||
default: 1
|
||||
default: 1,
|
||||
},
|
||||
_perPage: {
|
||||
type: 'number',
|
||||
type: "number",
|
||||
min: 1,
|
||||
max: 50,
|
||||
optional: true,
|
||||
default: 20
|
||||
default: 20,
|
||||
},
|
||||
_sortDir: {
|
||||
type: 'enum',
|
||||
allowed: ['ASC', 'DESC'],
|
||||
type: "enum",
|
||||
allowed: ["ASC", "DESC"],
|
||||
optional: true,
|
||||
default: 'ASC'
|
||||
default: "ASC",
|
||||
},
|
||||
_sortField: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^[a-zA-Z0-9_]{1,32}$/,
|
||||
optional: true
|
||||
optional: true,
|
||||
},
|
||||
q: {
|
||||
type: 'string',
|
||||
type: "string",
|
||||
regex: /^[äöüß a-z0-9!#$%&@:.'*+/=?^_`{|}~-]{1,64}$/i,
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
}
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default CONSTRAINTS;
|
||||
|
|
|
@ -1,23 +1,31 @@
|
|||
import {parseInteger} from "../utils/strings";
|
||||
import {isBoolean, isNumber, isObject, isOptional, isRegExp, isString, toIsArray} from "../types";
|
||||
import { parseInteger } from "../utils/strings";
|
||||
import {
|
||||
isBoolean,
|
||||
isNumber,
|
||||
isObject,
|
||||
isOptional,
|
||||
isRegExp,
|
||||
isString,
|
||||
toIsArray,
|
||||
} from "../types";
|
||||
|
||||
export interface Constraint {
|
||||
type: string,
|
||||
type: string;
|
||||
|
||||
default?: any,
|
||||
default?: unknown;
|
||||
|
||||
optional?: boolean,
|
||||
optional?: boolean;
|
||||
|
||||
allowed?: string[],
|
||||
allowed?: string[];
|
||||
|
||||
min?: number,
|
||||
max?: number,
|
||||
min?: number;
|
||||
max?: number;
|
||||
|
||||
regex?: RegExp,
|
||||
regex?: RegExp;
|
||||
}
|
||||
|
||||
export type Constraints = { [key: string]: Constraint };
|
||||
export type Values = { [key: string]: any };
|
||||
export type Values = { [key: string]: unknown };
|
||||
|
||||
export function isConstraint(arg: unknown): arg is Constraint {
|
||||
if (!isObject(arg)) {
|
||||
|
@ -36,18 +44,22 @@ export function isConstraint(arg: unknown): arg is Constraint {
|
|||
);
|
||||
}
|
||||
|
||||
export function isConstraints(constraints: unknown): constraints is Constraints {
|
||||
export function isConstraints(
|
||||
constraints: unknown
|
||||
): constraints is Constraints {
|
||||
if (!isObject(constraints)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Object.entries(constraints).every(([key, constraint]) => isString(key) && isConstraint(constraint));
|
||||
return Object.entries(constraints).every(
|
||||
([key, constraint]) => isString(key) && isConstraint(constraint)
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: sanitize input for further processing as specified by constraints (correct types, trimming, etc.)
|
||||
|
||||
function isValidBoolean(value: unknown): boolean {
|
||||
return isBoolean(value) || value === 'true' || value === 'false';
|
||||
return isBoolean(value) || value === "true" || value === "false";
|
||||
}
|
||||
|
||||
function isValidNumber(constraint: Constraint, value: unknown): boolean {
|
||||
|
@ -86,7 +98,9 @@ function isValidEnum(constraint: Constraint, value: unknown): boolean {
|
|||
|
||||
function isValidString(constraint: Constraint, value: unknown): boolean {
|
||||
if (!constraint.regex) {
|
||||
throw new Error("String constraints must have regex set: " + constraint);
|
||||
throw new Error(
|
||||
"String constraints must have regex set: " + constraint
|
||||
);
|
||||
}
|
||||
|
||||
if (!isString(value)) {
|
||||
|
@ -94,32 +108,43 @@ function isValidString(constraint: Constraint, value: unknown): boolean {
|
|||
}
|
||||
|
||||
const trimmed = value.trim();
|
||||
return (trimmed === '' && constraint.optional) || constraint.regex.test(trimmed);
|
||||
return (
|
||||
(trimmed === "" && constraint.optional) ||
|
||||
constraint.regex.test(trimmed)
|
||||
);
|
||||
}
|
||||
|
||||
function isValid(constraint: Constraint, acceptUndefined: boolean, value: unknown): boolean {
|
||||
function isValid(
|
||||
constraint: Constraint,
|
||||
acceptUndefined: boolean,
|
||||
value: unknown
|
||||
): boolean {
|
||||
if (value === undefined) {
|
||||
return acceptUndefined || constraint.optional === true;
|
||||
}
|
||||
|
||||
switch (constraint.type) {
|
||||
case 'boolean':
|
||||
case "boolean":
|
||||
return isValidBoolean(value);
|
||||
|
||||
case 'number':
|
||||
case "number":
|
||||
return isValidNumber(constraint, value);
|
||||
|
||||
case 'enum':
|
||||
case "enum":
|
||||
return isValidEnum(constraint, value);
|
||||
|
||||
case 'string':
|
||||
case "string":
|
||||
return isValidString(constraint, value);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function areValid(constraints: Constraints, acceptUndefined: boolean, values: Values): boolean {
|
||||
function areValid(
|
||||
constraints: Constraints,
|
||||
acceptUndefined: boolean,
|
||||
values: Values
|
||||
): boolean {
|
||||
const fields = new Set(Object.keys(constraints));
|
||||
for (const field of fields) {
|
||||
if (!isValid(constraints[field], acceptUndefined, values[field])) {
|
||||
|
@ -136,10 +161,18 @@ function areValid(constraints: Constraints, acceptUndefined: boolean, values: Va
|
|||
return true;
|
||||
}
|
||||
|
||||
export function forConstraint(constraint: Constraint, acceptUndefined: boolean): (value: unknown) => boolean {
|
||||
return ((value: unknown): boolean => isValid(constraint, acceptUndefined, value));
|
||||
export function forConstraint(
|
||||
constraint: Constraint,
|
||||
acceptUndefined: boolean
|
||||
): (value: unknown) => boolean {
|
||||
return (value: unknown): boolean =>
|
||||
isValid(constraint, acceptUndefined, value);
|
||||
}
|
||||
|
||||
export function forConstraints(constraints: Constraints, acceptUndefined: boolean): (values: Values) => boolean {
|
||||
return ((values: Values): boolean => areValid(constraints, acceptUndefined, values));
|
||||
export function forConstraints(
|
||||
constraints: Constraints,
|
||||
acceptUndefined: boolean
|
||||
): (values: Values) => boolean {
|
||||
return (values: Values): boolean =>
|
||||
areValid(constraints, acceptUndefined, values);
|
||||
}
|
||||
|
|
|
@ -1,11 +1,20 @@
|
|||
import {ArrayField, Field, RawJsonField} from "sparkson"
|
||||
import {ClientConfig, DurationMilliseconds, isString, toIsNewtype, Url} from "../shared/types";
|
||||
import { ArrayField, Field, RawJsonField } from "sparkson";
|
||||
import {
|
||||
ClientConfig,
|
||||
DurationMilliseconds,
|
||||
isString,
|
||||
toIsNewtype,
|
||||
Url,
|
||||
} from "../shared/types";
|
||||
|
||||
export type Username = string & { readonly __tag: unique symbol };
|
||||
export const isUsername = toIsNewtype(isString, "" as Username);
|
||||
|
||||
export type CleartextPassword = string & { readonly __tag: unique symbol };
|
||||
export const isCleartextPassword = toIsNewtype(isString, "" as CleartextPassword);
|
||||
export const isCleartextPassword = toIsNewtype(
|
||||
isString,
|
||||
"" as CleartextPassword
|
||||
);
|
||||
|
||||
export type PasswordHash = string & { readonly __tag: unique symbol };
|
||||
export const isPasswordHash = toIsNewtype(isString, "" as PasswordHash);
|
||||
|
@ -13,34 +22,30 @@ export const isPasswordHash = toIsNewtype(isString, "" as PasswordHash);
|
|||
export class UsersConfig {
|
||||
constructor(
|
||||
@Field("user") public username: Username,
|
||||
@Field("passwordHash") public passwordHash: PasswordHash,
|
||||
) {
|
||||
}
|
||||
@Field("passwordHash") public passwordHash: PasswordHash
|
||||
) {}
|
||||
}
|
||||
|
||||
export class LoggingConfig {
|
||||
constructor(
|
||||
@Field("enabled") public enabled: boolean,
|
||||
@Field("debug") public debug: boolean,
|
||||
@Field("profile") public profile: boolean,
|
||||
) {
|
||||
}
|
||||
@Field("profile") public profile: boolean
|
||||
) {}
|
||||
}
|
||||
|
||||
export class InternalConfig {
|
||||
constructor(
|
||||
@Field("active") public active: boolean,
|
||||
@ArrayField("users", UsersConfig) public users: UsersConfig[],
|
||||
) {
|
||||
}
|
||||
@ArrayField("users", UsersConfig) public users: UsersConfig[]
|
||||
) {}
|
||||
}
|
||||
|
||||
export class SMTPAuthConfig {
|
||||
constructor(
|
||||
@Field("user") public user: Username,
|
||||
@Field("pass") public pass: CleartextPassword,
|
||||
) {
|
||||
}
|
||||
@Field("pass") public pass: CleartextPassword
|
||||
) {}
|
||||
}
|
||||
|
||||
// For details see: https://nodemailer.com/smtp/
|
||||
|
@ -55,26 +60,24 @@ export class SMTPConfig {
|
|||
@Field("opportunisticTLS") public opportunisticTLS?: boolean,
|
||||
@Field("name") public name?: string,
|
||||
@Field("localAddress") public localAddress?: string,
|
||||
@Field("connectionTimeout") public connectionTimeout?: DurationMilliseconds,
|
||||
@Field("connectionTimeout")
|
||||
public connectionTimeout?: DurationMilliseconds,
|
||||
@Field("greetingTimeout") public greetingTimeout?: DurationMilliseconds,
|
||||
@Field("socketTimeout") public socketTimeout?: DurationMilliseconds,
|
||||
) {
|
||||
}
|
||||
@Field("socketTimeout") public socketTimeout?: DurationMilliseconds
|
||||
) {}
|
||||
}
|
||||
|
||||
export class EmailConfig {
|
||||
constructor(
|
||||
@Field("from") public from: string,
|
||||
@RawJsonField("smtp") public smtp: SMTPConfig,
|
||||
) {
|
||||
}
|
||||
@RawJsonField("smtp") public smtp: SMTPConfig
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ServerMapConfig {
|
||||
constructor(
|
||||
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: Url[],
|
||||
) {
|
||||
}
|
||||
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: Url[]
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ServerConfig {
|
||||
|
@ -87,15 +90,13 @@ export class ServerConfig {
|
|||
@Field("internal") public internal: InternalConfig,
|
||||
@Field("email") public email: EmailConfig,
|
||||
@Field("map") public map: ServerMapConfig,
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
||||
) {
|
||||
}
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string
|
||||
) {}
|
||||
}
|
||||
|
||||
export class Config {
|
||||
constructor(
|
||||
@Field("server") public server: ServerConfig,
|
||||
@Field("client") public client: ClientConfig,
|
||||
) {
|
||||
}
|
||||
@Field("client") public client: ClientConfig
|
||||
) {}
|
||||
}
|
||||
|
|
|
@ -1,51 +1,70 @@
|
|||
import {ISqlite, Statement} from "sqlite";
|
||||
import { ISqlite, Statement } from "sqlite";
|
||||
|
||||
export type RunResult = ISqlite.RunResult;
|
||||
export type SqlType = ISqlite.SqlType;
|
||||
|
||||
export {Statement};
|
||||
export { Statement };
|
||||
|
||||
export interface TypedDatabase {
|
||||
/**
|
||||
* @see Database.on
|
||||
*/
|
||||
on(event: string, listener: any): Promise<void>;
|
||||
on(event: string, listener: unknown): Promise<void>;
|
||||
|
||||
/**
|
||||
* @see Database.run
|
||||
*/
|
||||
run(sql: SqlType, ...params: any[]): Promise<RunResult>;
|
||||
run(sql: SqlType, ...params: unknown[]): Promise<RunResult>;
|
||||
|
||||
/**
|
||||
* @see Database.get
|
||||
*/
|
||||
get<T>(sql: SqlType, ...params: any[]): Promise<T | undefined>;
|
||||
get<T>(sql: SqlType, ...params: unknown[]): Promise<T | undefined>;
|
||||
|
||||
/**
|
||||
* @see Database.each
|
||||
*/
|
||||
each<T>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
|
||||
each<T>(
|
||||
sql: SqlType,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
|
||||
each<T>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
each<T>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
|
||||
each<T>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
each<T>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
param2: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
|
||||
each<T>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
each<T>(
|
||||
sql: SqlType,
|
||||
param1: unknown,
|
||||
param2: unknown,
|
||||
param3: unknown,
|
||||
callback: (err: unknown, row: T) => void
|
||||
): Promise<number>;
|
||||
|
||||
each<T>(sql: SqlType, ...params: any[]): Promise<number>;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
each<T>(sql: SqlType, ...params: unknown[]): Promise<number>;
|
||||
|
||||
/**
|
||||
* @see Database.all
|
||||
*/
|
||||
all<T = never>(sql: SqlType, ...params: any[]): Promise<T[]>;
|
||||
all<T = never>(sql: SqlType, ...params: unknown[]): Promise<T[]>;
|
||||
|
||||
/**
|
||||
* @see Database.exec
|
||||
*/
|
||||
exec(sql: SqlType, ...params: any[]): Promise<void>;
|
||||
exec(sql: SqlType, ...params: unknown[]): Promise<void>;
|
||||
|
||||
/**
|
||||
* @see Database.prepare
|
||||
*/
|
||||
prepare(sql: SqlType, ...params: any[]): Promise<Statement>;
|
||||
prepare(sql: SqlType, ...params: unknown[]): Promise<Statement>;
|
||||
}
|
||||
|
|
|
@ -23,10 +23,10 @@ export * from "./logger";
|
|||
export * from "../shared/types";
|
||||
|
||||
export type NodeStateData = {
|
||||
site?: Site,
|
||||
domain?: Domain,
|
||||
state: OnlineState,
|
||||
}
|
||||
site?: Site;
|
||||
domain?: Domain;
|
||||
state: OnlineState;
|
||||
};
|
||||
|
||||
export function toCreateOrUpdateNode(node: StoredNode): CreateOrUpdateNode {
|
||||
return {
|
||||
|
@ -37,7 +37,7 @@ export function toCreateOrUpdateNode(node: StoredNode): CreateOrUpdateNode {
|
|||
key: node.key,
|
||||
mac: node.mac,
|
||||
monitoring: node.monitoringState !== MonitoringState.DISABLED,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function toNodeResponse(node: StoredNode): NodeResponse {
|
||||
|
@ -53,17 +53,20 @@ export function toNodeResponse(node: StoredNode): NodeResponse {
|
|||
monitoringConfirmed: node.monitoringState === MonitoringState.ACTIVE,
|
||||
monitoringState: node.monitoringState,
|
||||
modifiedAt: node.modifiedAt,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function toNodeTokenResponse(node: StoredNode): NodeTokenResponse {
|
||||
return {
|
||||
token: node.token,
|
||||
node: toNodeResponse(node),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function toDomainSpecificNodeResponse(node: StoredNode, nodeStateData: NodeStateData): DomainSpecificNodeResponse {
|
||||
export function toDomainSpecificNodeResponse(
|
||||
node: StoredNode,
|
||||
nodeStateData: NodeStateData
|
||||
): DomainSpecificNodeResponse {
|
||||
return {
|
||||
token: node.token,
|
||||
nickname: node.nickname,
|
||||
|
@ -79,7 +82,7 @@ export function toDomainSpecificNodeResponse(node: StoredNode, nodeStateData: No
|
|||
site: nodeStateData.site,
|
||||
domain: nodeStateData.domain,
|
||||
onlineState: nodeStateData.state,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function toMonitoringResponse(node: StoredNode): MonitoringResponse {
|
||||
|
@ -93,7 +96,7 @@ export function toMonitoringResponse(node: StoredNode): MonitoringResponse {
|
|||
}
|
||||
|
||||
export type NodeSecrets = {
|
||||
monitoringToken?: MonitoringToken,
|
||||
monitoringToken?: MonitoringToken;
|
||||
};
|
||||
|
||||
export type MailId = number & { readonly __tag: unique symbol };
|
||||
|
@ -118,4 +121,4 @@ export type Mail = {
|
|||
recipient: EmailAddress;
|
||||
data: MailData;
|
||||
failures: number;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
export type LogLevel = 'debug' | 'info' | 'warn' | 'error' | 'profile';
|
||||
export const LogLevels: LogLevel[] = ['debug', 'info', 'warn', 'error', 'profile'];
|
||||
export type LogLevel = "debug" | "info" | "warn" | "error" | "profile";
|
||||
export const LogLevels: LogLevel[] = [
|
||||
"debug",
|
||||
"info",
|
||||
"warn",
|
||||
"error",
|
||||
"profile",
|
||||
];
|
||||
|
||||
export function isLogLevel(arg: any): arg is LogLevel {
|
||||
export function isLogLevel(arg: unknown): arg is LogLevel {
|
||||
if (typeof arg !== "string") {
|
||||
return false;
|
||||
}
|
||||
|
@ -14,12 +20,12 @@ export function isLogLevel(arg: any): arg is LogLevel {
|
|||
}
|
||||
|
||||
export interface TaggedLogger {
|
||||
log(level: LogLevel, ...args: any[]): void;
|
||||
debug(...args: any[]): void;
|
||||
info(...args: any[]): void;
|
||||
warn(...args: any[]): void;
|
||||
error(...args: any[]): void;
|
||||
profile(...args: any[]): void;
|
||||
log(level: LogLevel, ...args: unknown[]): void;
|
||||
debug(...args: unknown[]): void;
|
||||
info(...args: unknown[]): void;
|
||||
warn(...args: unknown[]): void;
|
||||
error(...args: unknown[]): void;
|
||||
profile(...args: unknown[]): void;
|
||||
}
|
||||
|
||||
export interface Logger {
|
||||
|
|
|
@ -1,8 +1,16 @@
|
|||
import _ from "lodash";
|
||||
|
||||
export function inCondition<T>(field: string, list: T[]): {query: string, params: T[]} {
|
||||
export function inCondition<T>(
|
||||
field: string,
|
||||
list: T[]
|
||||
): { query: string; params: T[] } {
|
||||
return {
|
||||
query: '(' + field + ' IN (' + _.times(list.length, () =>'?').join(', ') + '))',
|
||||
query:
|
||||
"(" +
|
||||
field +
|
||||
" IN (" +
|
||||
_.times(list.length, () => "?").join(", ") +
|
||||
"))",
|
||||
params: list,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
badRequest: {code: 400},
|
||||
notFound: {code: 404},
|
||||
conflict: {code: 409},
|
||||
internalError: {code: 500}
|
||||
}
|
6
server/utils/errorTypes.ts
Normal file
6
server/utils/errorTypes.ts
Normal file
|
@ -0,0 +1,6 @@
|
|||
export default {
|
||||
badRequest: { code: 400 },
|
||||
notFound: { code: 404 },
|
||||
conflict: { code: 409 },
|
||||
internalError: { code: 500 },
|
||||
};
|
|
@ -1,10 +1,14 @@
|
|||
import _ from "lodash";
|
||||
|
||||
import CONSTRAINTS from "../shared/validation/constraints";
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
import ErrorTypes from "./errorTypes";
|
||||
import Logger from "../logger";
|
||||
import {Constraints, forConstraints, isConstraints} from "../shared/validation/validator";
|
||||
import {Request, Response} from "express";
|
||||
import {
|
||||
Constraints,
|
||||
forConstraints,
|
||||
isConstraints,
|
||||
} from "../shared/validation/validator";
|
||||
import { Request, Response } from "express";
|
||||
import {
|
||||
EnumTypeGuard,
|
||||
EnumValue,
|
||||
|
@ -16,7 +20,7 @@ import {
|
|||
JSONObject,
|
||||
JSONValue,
|
||||
SortDirection,
|
||||
TypeGuard
|
||||
TypeGuard,
|
||||
} from "../types";
|
||||
|
||||
export type RequestData = JSONObject;
|
||||
|
@ -36,21 +40,36 @@ export type RestParams = {
|
|||
filters?: FilterClause;
|
||||
};
|
||||
|
||||
export type OrderByClause = { query: string, params: any[] };
|
||||
export type LimitOffsetClause = { query: string, params: any[] };
|
||||
export type FilterClause = { query: string, params: any[] };
|
||||
export type OrderByClause = { query: string; params: unknown[] };
|
||||
export type LimitOffsetClause = { query: string; params: unknown[] };
|
||||
export type FilterClause = { query: string; params: unknown[] };
|
||||
|
||||
function respond(res: Response, httpCode: number, data: string, type: "html"): void;
|
||||
function respond(res: Response, httpCode: number, data: JSONValue, type: "json"): void;
|
||||
function respond(res: Response, httpCode: number, data: JSONValue, type: "html" | "json"): void {
|
||||
function respond(
|
||||
res: Response,
|
||||
httpCode: number,
|
||||
data: string,
|
||||
type: "html"
|
||||
): void;
|
||||
function respond(
|
||||
res: Response,
|
||||
httpCode: number,
|
||||
data: JSONValue,
|
||||
type: "json"
|
||||
): void;
|
||||
function respond(
|
||||
res: Response,
|
||||
httpCode: number,
|
||||
data: JSONValue,
|
||||
type: "html" | "json"
|
||||
): void {
|
||||
switch (type) {
|
||||
case 'html':
|
||||
res.writeHead(httpCode, {'Content-Type': 'text/html'});
|
||||
case "html":
|
||||
res.writeHead(httpCode, { "Content-Type": "text/html" });
|
||||
res.end(data);
|
||||
break;
|
||||
|
||||
default:
|
||||
res.writeHead(httpCode, {'Content-Type': 'application/json'});
|
||||
res.writeHead(httpCode, { "Content-Type": "application/json" });
|
||||
res.end(JSON.stringify(data));
|
||||
break;
|
||||
}
|
||||
|
@ -59,16 +78,22 @@ function respond(res: Response, httpCode: number, data: JSONValue, type: "html"
|
|||
function orderByClause<S>(
|
||||
restParams: RestParams,
|
||||
defaultSortField: EnumValue<S>,
|
||||
isSortField: EnumTypeGuard<S>,
|
||||
isSortField: EnumTypeGuard<S>
|
||||
): OrderByClause {
|
||||
let sortField: EnumValue<S> | undefined = isSortField(restParams._sortField) ? restParams._sortField : undefined;
|
||||
let sortField: EnumValue<S> | undefined = isSortField(restParams._sortField)
|
||||
? restParams._sortField
|
||||
: undefined;
|
||||
if (!sortField) {
|
||||
sortField = defaultSortField;
|
||||
}
|
||||
|
||||
return {
|
||||
query: 'ORDER BY LOWER(' + sortField + ') ' + (restParams._sortDir === SortDirection.ASCENDING ? 'ASC' : 'DESC'),
|
||||
params: []
|
||||
query:
|
||||
"ORDER BY LOWER(" +
|
||||
sortField +
|
||||
") " +
|
||||
(restParams._sortDir === SortDirection.ASCENDING ? "ASC" : "DESC"),
|
||||
params: [],
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -77,55 +102,64 @@ function limitOffsetClause(restParams: RestParams): LimitOffsetClause {
|
|||
const perPage = restParams._perPage;
|
||||
|
||||
return {
|
||||
query: 'LIMIT ? OFFSET ?',
|
||||
params: [perPage, ((page - 1) * perPage)]
|
||||
query: "LIMIT ? OFFSET ?",
|
||||
params: [perPage, (page - 1) * perPage],
|
||||
};
|
||||
}
|
||||
|
||||
function escapeForLikePattern(str: string): string {
|
||||
return str
|
||||
.replace(/\\/g, '\\\\')
|
||||
.replace(/%/g, '\\%')
|
||||
.replace(/_/g, '\\_');
|
||||
return str.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
|
||||
}
|
||||
|
||||
function filterCondition(restParams: RestParams, filterFields: string[]): FilterClause {
|
||||
function filterCondition(
|
||||
restParams: RestParams,
|
||||
filterFields: string[]
|
||||
): FilterClause {
|
||||
if (_.isEmpty(filterFields)) {
|
||||
return {
|
||||
query: '1 = 1',
|
||||
params: []
|
||||
query: "1 = 1",
|
||||
params: [],
|
||||
};
|
||||
}
|
||||
|
||||
let query = filterFields
|
||||
.map(field => 'LOWER(' + field + ') LIKE ?')
|
||||
.join(' OR ');
|
||||
.map((field) => "LOWER(" + field + ") LIKE ?")
|
||||
.join(" OR ");
|
||||
|
||||
query += ' ESCAPE \'\\\'';
|
||||
query += " ESCAPE '\\'";
|
||||
|
||||
const search = '%' + (isString(restParams.q) ? escapeForLikePattern(restParams.q.trim().toLowerCase()) : '') + '%';
|
||||
const search =
|
||||
"%" +
|
||||
(isString(restParams.q)
|
||||
? escapeForLikePattern(restParams.q.trim().toLowerCase())
|
||||
: "") +
|
||||
"%";
|
||||
const params = _.times(filterFields.length, () => search);
|
||||
|
||||
return {
|
||||
query: query,
|
||||
params: params
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
|
||||
function getConstrainedValues(data: { [key: string]: any }, constraints: Constraints): { [key: string]: any } {
|
||||
const values: { [key: string]: any } = {};
|
||||
function getConstrainedValues(
|
||||
data: { [key: string]: unknown },
|
||||
constraints: Constraints
|
||||
): { [key: string]: unknown } {
|
||||
const values: { [key: string]: unknown } = {};
|
||||
for (const key of Object.keys(constraints)) {
|
||||
const value = data[key];
|
||||
values[key] =
|
||||
isUndefined(value) && key in constraints && !isUndefined(constraints[key].default)
|
||||
isUndefined(value) &&
|
||||
key in constraints &&
|
||||
!isUndefined(constraints[key].default)
|
||||
? constraints[key].default
|
||||
: value;
|
||||
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
function normalize(data: any): JSONObject {
|
||||
function normalize(data: unknown): JSONObject {
|
||||
return isJSONObject(data) ? data : {};
|
||||
}
|
||||
|
||||
|
@ -144,23 +178,31 @@ export function getData(req: Request): RequestData {
|
|||
export async function getValidRestParams(
|
||||
type: string,
|
||||
subtype: string | null,
|
||||
req: Request,
|
||||
req: Request
|
||||
): Promise<RestParams> {
|
||||
const restConstraints = CONSTRAINTS.rest as { [key: string]: any };
|
||||
let constraints: Constraints;
|
||||
if (!(type in restConstraints) || !isConstraints(restConstraints[type])) {
|
||||
Logger.tag('validation', 'rest').error('Unknown REST resource type: {}', type);
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
Logger.tag("validation", "rest").error(
|
||||
"Unknown REST resource type: {}",
|
||||
type
|
||||
);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
constraints = restConstraints[type];
|
||||
const constraints: Constraints = restConstraints[type];
|
||||
|
||||
let filterConstraints: Constraints = {};
|
||||
if (subtype) {
|
||||
const subtypeFilters = subtype + 'Filters';
|
||||
const subtypeFilters = subtype + "Filters";
|
||||
const constraintsObj = CONSTRAINTS as { [key: string]: any };
|
||||
if (!(subtypeFilters in constraintsObj) || !isConstraints(constraintsObj[subtypeFilters])) {
|
||||
Logger.tag('validation', 'rest').error('Unknown REST resource subtype: {}', subtype);
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
if (
|
||||
!(subtypeFilters in constraintsObj) ||
|
||||
!isConstraints(constraintsObj[subtypeFilters])
|
||||
) {
|
||||
Logger.tag("validation", "rest").error(
|
||||
"Unknown REST resource subtype: {}",
|
||||
subtype
|
||||
);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
filterConstraints = constraintsObj[subtypeFilters];
|
||||
}
|
||||
|
@ -173,14 +215,18 @@ export async function getValidRestParams(
|
|||
const areValidParams = forConstraints(constraints, false);
|
||||
const areValidFilters = forConstraints(filterConstraints, false);
|
||||
if (!areValidParams(restParams) || !areValidFilters(filterParams)) {
|
||||
throw {data: 'Invalid REST parameters.', type: ErrorTypes.badRequest};
|
||||
throw { data: "Invalid REST parameters.", type: ErrorTypes.badRequest };
|
||||
}
|
||||
|
||||
restParams.filters = filterParams;
|
||||
return restParams as RestParams;
|
||||
}
|
||||
|
||||
export function filter<E>(entities: E[], allowedFilterFields: string[], restParams: RestParams): E[] {
|
||||
export function filter<E>(
|
||||
entities: E[],
|
||||
allowedFilterFields: string[],
|
||||
restParams: RestParams
|
||||
): E[] {
|
||||
let query = restParams.q;
|
||||
if (query) {
|
||||
query = query.trim().toLowerCase();
|
||||
|
@ -204,8 +250,11 @@ export function filter<E>(entities: E[], allowedFilterFields: string[], restPara
|
|||
}
|
||||
|
||||
value = value.toLowerCase();
|
||||
if (field === 'mac') {
|
||||
return _.includes(value.replace(/:/g, ''), query.replace(/:/g, ''));
|
||||
if (field === "mac") {
|
||||
return _.includes(
|
||||
value.replace(/:/g, ""),
|
||||
query.replace(/:/g, "")
|
||||
);
|
||||
}
|
||||
|
||||
return _.includes(value, query);
|
||||
|
@ -223,27 +272,36 @@ export function filter<E>(entities: E[], allowedFilterFields: string[], restPara
|
|||
if (isUndefined(value)) {
|
||||
return true;
|
||||
}
|
||||
if (key.startsWith('has')) {
|
||||
const entityKey = key.substring(3, 4).toLowerCase() + key.substring(4);
|
||||
if (key.startsWith("has")) {
|
||||
const entityKey =
|
||||
key.substring(3, 4).toLowerCase() + key.substring(4);
|
||||
return _.isEmpty(entity[entityKey]).toString() !== value;
|
||||
}
|
||||
return entity[key] === value;
|
||||
});
|
||||
}
|
||||
|
||||
return entities.filter(entity => queryMatches(entity) && filtersMatch(entity));
|
||||
return entities.filter(
|
||||
(entity) => queryMatches(entity) && filtersMatch(entity)
|
||||
);
|
||||
}
|
||||
|
||||
export function sort<T extends Record<S, any>, S extends string>(entities: T[], isSortField: TypeGuard<S>, restParams: RestParams): T[] {
|
||||
const sortField: S | undefined = isSortField(restParams._sortField) ? restParams._sortField : undefined;
|
||||
export function sort<T extends Record<S, unknown>, S extends string>(
|
||||
entities: T[],
|
||||
isSortField: TypeGuard<S>,
|
||||
restParams: RestParams
|
||||
): T[] {
|
||||
const sortField: S | undefined = isSortField(restParams._sortField)
|
||||
? restParams._sortField
|
||||
: undefined;
|
||||
if (!sortField) {
|
||||
return entities;
|
||||
}
|
||||
|
||||
const sorted = entities.slice(0);
|
||||
sorted.sort((a, b) => {
|
||||
let as = a[sortField];
|
||||
let bs = b[sortField];
|
||||
let as: any = a[sortField];
|
||||
let bs: any = b[sortField];
|
||||
|
||||
if (isString(as)) {
|
||||
as = as.toLowerCase();
|
||||
|
@ -259,69 +317,74 @@ export function sort<T extends Record<S, any>, S extends string>(entities: T[],
|
|||
order = 1;
|
||||
}
|
||||
|
||||
return restParams._sortDir === SortDirection.DESCENDING ? -order : order;
|
||||
return restParams._sortDir === SortDirection.DESCENDING
|
||||
? -order
|
||||
: order;
|
||||
});
|
||||
|
||||
return sorted;
|
||||
}
|
||||
|
||||
export function getPageEntities<Entity>(entities: Entity[], restParams: RestParams): Entity[] {
|
||||
export function getPageEntities<Entity>(
|
||||
entities: Entity[],
|
||||
restParams: RestParams
|
||||
): Entity[] {
|
||||
const page = restParams._page;
|
||||
const perPage = restParams._perPage;
|
||||
|
||||
return entities.slice((page - 1) * perPage, page * perPage);
|
||||
}
|
||||
|
||||
export {filterCondition as whereCondition};
|
||||
export { filterCondition as whereCondition };
|
||||
|
||||
export function filterClause<S>(
|
||||
restParams: RestParams,
|
||||
defaultSortField: EnumValue<S>,
|
||||
isSortField: EnumTypeGuard<S>,
|
||||
filterFields: string[],
|
||||
filterFields: string[]
|
||||
): FilterClause {
|
||||
const orderBy = orderByClause<S>(
|
||||
restParams,
|
||||
defaultSortField,
|
||||
isSortField,
|
||||
);
|
||||
const orderBy = orderByClause<S>(restParams, defaultSortField, isSortField);
|
||||
const limitOffset = limitOffsetClause(restParams);
|
||||
|
||||
const filter = filterCondition(
|
||||
restParams,
|
||||
filterFields
|
||||
);
|
||||
const filter = filterCondition(restParams, filterFields);
|
||||
|
||||
return {
|
||||
query: filter.query + ' ' + orderBy.query + ' ' + limitOffset.query,
|
||||
params: [...filter.params, ...orderBy.params, ...limitOffset.params]
|
||||
query: filter.query + " " + orderBy.query + " " + limitOffset.query,
|
||||
params: [...filter.params, ...orderBy.params, ...limitOffset.params],
|
||||
};
|
||||
}
|
||||
|
||||
export function success(res: Response, data: JSONValue) {
|
||||
respond(res, 200, data, 'json');
|
||||
respond(res, 200, data, "json");
|
||||
}
|
||||
|
||||
export function successHtml(res: Response, html: string) {
|
||||
respond(res, 200, html, 'html');
|
||||
respond(res, 200, html, "html");
|
||||
}
|
||||
|
||||
export function error(res: Response, err: { data: JSONValue, type: { code: number } }) {
|
||||
respond(res, err.type.code, err.data, 'json');
|
||||
export function error(
|
||||
res: Response,
|
||||
err: { data: JSONValue; type: { code: number } }
|
||||
) {
|
||||
respond(res, err.type.code, err.data, "json");
|
||||
}
|
||||
|
||||
export function handleJSON<Response>(handler: () => Promise<Response>): RequestHandler {
|
||||
export function handleJSON<Response>(
|
||||
handler: () => Promise<Response>
|
||||
): RequestHandler {
|
||||
return (request, response) => {
|
||||
handler()
|
||||
.then(data => success(response, data || {}))
|
||||
.catch(e => error(response, e));
|
||||
.then((data) => success(response, data || {}))
|
||||
.catch((e) => error(response, e));
|
||||
};
|
||||
}
|
||||
|
||||
export function handleJSONWithData<Response>(handler: (data: RequestData) => Promise<Response>): RequestHandler {
|
||||
export function handleJSONWithData<Response>(
|
||||
handler: (data: RequestData) => Promise<Response>
|
||||
): RequestHandler {
|
||||
return (request, response) => {
|
||||
handler(getData(request))
|
||||
.then(data => success(response, data || {}))
|
||||
.catch(e => error(response, e));
|
||||
.then((data) => success(response, data || {}))
|
||||
.catch((e) => error(response, e));
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import {parseTimestamp} from "./time";
|
||||
import { parseTimestamp } from "./time";
|
||||
import moment from "moment";
|
||||
|
||||
const TIMESTAMP_INVALID_STRING = "2020-01-02T42:99:23.000Z";
|
||||
const TIMESTAMP_VALID_STRING = "2020-01-02T12:34:56.000Z";
|
||||
|
||||
test('parseTimestamp() should fail parsing non-string timestamp', () => {
|
||||
test("parseTimestamp() should fail parsing non-string timestamp", () => {
|
||||
// given
|
||||
const timestamp = {};
|
||||
|
||||
|
@ -15,7 +15,7 @@ test('parseTimestamp() should fail parsing non-string timestamp', () => {
|
|||
expect(parsedTimestamp).toEqual(null);
|
||||
});
|
||||
|
||||
test('parseTimestamp() should fail parsing empty timestamp string', () => {
|
||||
test("parseTimestamp() should fail parsing empty timestamp string", () => {
|
||||
// given
|
||||
const timestamp = "";
|
||||
|
||||
|
@ -26,7 +26,7 @@ test('parseTimestamp() should fail parsing empty timestamp string', () => {
|
|||
expect(parsedTimestamp).toEqual(null);
|
||||
});
|
||||
|
||||
test('parseTimestamp() should fail parsing invalid timestamp string', () => {
|
||||
test("parseTimestamp() should fail parsing invalid timestamp string", () => {
|
||||
// given
|
||||
// noinspection UnnecessaryLocalVariableJS
|
||||
const timestamp = TIMESTAMP_INVALID_STRING;
|
||||
|
@ -38,7 +38,7 @@ test('parseTimestamp() should fail parsing invalid timestamp string', () => {
|
|||
expect(parsedTimestamp).toEqual(null);
|
||||
});
|
||||
|
||||
test('parseTimestamp() should succeed parsing valid timestamp string', () => {
|
||||
test("parseTimestamp() should succeed parsing valid timestamp string", () => {
|
||||
// given
|
||||
const timestamp = TIMESTAMP_VALID_STRING;
|
||||
|
||||
|
@ -47,7 +47,7 @@ test('parseTimestamp() should succeed parsing valid timestamp string', () => {
|
|||
|
||||
// then
|
||||
if (parsedTimestamp === null) {
|
||||
fail('timestamp should not be null');
|
||||
fail("timestamp should not be null");
|
||||
}
|
||||
expect(moment.unix(parsedTimestamp).toISOString()).toEqual(timestamp);
|
||||
});
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
import {DurationSeconds, isString, UnixTimestampSeconds} from "../types";
|
||||
import moment, {Moment} from "moment";
|
||||
import { DurationSeconds, isString, UnixTimestampSeconds } from "../types";
|
||||
import moment, { Moment } from "moment";
|
||||
|
||||
export function now(): UnixTimestampSeconds {
|
||||
return Math.round(Date.now() / 1000.0) as UnixTimestampSeconds;
|
||||
}
|
||||
|
||||
export function subtract(timestamp: UnixTimestampSeconds, duration: DurationSeconds): UnixTimestampSeconds {
|
||||
export function subtract(
|
||||
timestamp: UnixTimestampSeconds,
|
||||
duration: DurationSeconds
|
||||
): UnixTimestampSeconds {
|
||||
return (timestamp - duration) as UnixTimestampSeconds;
|
||||
}
|
||||
|
||||
|
@ -43,7 +46,9 @@ export function formatTimestamp(timestamp: UnixTimestampSeconds): string {
|
|||
return moment.unix(timestamp).format();
|
||||
}
|
||||
|
||||
export function parseTimestamp(timestamp: any): UnixTimestampSeconds | null {
|
||||
export function parseTimestamp(
|
||||
timestamp: unknown
|
||||
): UnixTimestampSeconds | null {
|
||||
if (!isString(timestamp)) {
|
||||
return null;
|
||||
}
|
||||
|
@ -53,4 +58,3 @@ export function parseTimestamp(timestamp: any): UnixTimestampSeconds | null {
|
|||
}
|
||||
return unix(parsed);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,32 +1,34 @@
|
|||
import {config} from "../config"
|
||||
import {MonitoringToken, Url} from "../types"
|
||||
import { config } from "../config";
|
||||
import { MonitoringToken, Url } from "../types";
|
||||
|
||||
function formUrl(route: string, queryParams?: { [key: string]: string }): Url {
|
||||
let url = config.server.baseUrl as string;
|
||||
if (route || queryParams) {
|
||||
url += '/#/';
|
||||
url += "/#/";
|
||||
}
|
||||
if (route) {
|
||||
url += route;
|
||||
}
|
||||
if (queryParams) {
|
||||
url += '?';
|
||||
url +=
|
||||
Object.entries(queryParams)
|
||||
.map(([key, value]) => encodeURIComponent(key) + '=' + encodeURIComponent(value))
|
||||
.join("&");
|
||||
url += "?";
|
||||
url += Object.entries(queryParams)
|
||||
.map(
|
||||
([key, value]) =>
|
||||
encodeURIComponent(key) + "=" + encodeURIComponent(value)
|
||||
)
|
||||
.join("&");
|
||||
}
|
||||
return url as Url;
|
||||
}
|
||||
|
||||
export function editNodeUrl(): Url {
|
||||
return formUrl('update');
|
||||
return formUrl("update");
|
||||
}
|
||||
|
||||
export function monitoringConfirmUrl(monitoringToken: MonitoringToken): Url {
|
||||
return formUrl('monitoring/confirm', {token: monitoringToken});
|
||||
return formUrl("monitoring/confirm", { token: monitoringToken });
|
||||
}
|
||||
|
||||
export function monitoringDisableUrl(monitoringToken: MonitoringToken): Url {
|
||||
return formUrl('monitoring/disable', {token: monitoringToken});
|
||||
return formUrl("monitoring/disable", { token: monitoringToken });
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue