Sqlite upgrade and type refactorings
This commit is contained in:
parent
01691a0c20
commit
28c8429edd
|
@ -12,7 +12,6 @@
|
|||
* Split into seperate packages for server and frontend.
|
||||
* Make admin panel part of new frontend package.
|
||||
* Get rid of grunt.
|
||||
* Bluebird for promises?
|
||||
* Use generated type guards.
|
||||
|
||||
## Mid term
|
||||
|
|
|
@ -52,7 +52,8 @@
|
|||
"request": "^2.88.2",
|
||||
"serve-static": "^1.14.1",
|
||||
"sparkson": "^1.3.6",
|
||||
"sqlite": "^3.0.6"
|
||||
"sqlite": "^4.1.1",
|
||||
"sqlite3": "^5.0.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/async": "^3.2.15",
|
||||
|
|
|
@ -9,7 +9,7 @@ import {promises as fs} from "graceful-fs";
|
|||
|
||||
import {config} from "./config";
|
||||
import type {CleartextPassword, PasswordHash, Username} from "./types";
|
||||
import {isString, lift2, to} from "./types";
|
||||
import {isString} from "./types";
|
||||
import Logger from "./logger";
|
||||
|
||||
export const app: Express = express();
|
||||
|
@ -17,14 +17,14 @@ export const app: Express = express();
|
|||
/**
|
||||
* Used to have some password comparison in case the user does not exist to avoid timing attacks.
|
||||
*/
|
||||
const INVALID_PASSWORD_HASH: PasswordHash = to("$2b$05$JebmV1q/ySuxa89GoJYlc.6SEnj1OZYBOfTf.TYAehcC5HLeJiWPi");
|
||||
const INVALID_PASSWORD_HASH: PasswordHash = "$2b$05$JebmV1q/ySuxa89GoJYlc.6SEnj1OZYBOfTf.TYAehcC5HLeJiWPi" as PasswordHash;
|
||||
|
||||
/**
|
||||
* Trying to implement a timing safe string compare.
|
||||
*
|
||||
* TODO: Write tests for timing.
|
||||
*/
|
||||
function timingSafeEqual(a: string, b: string): boolean {
|
||||
function timingSafeEqual<T extends string>(a: T, b: T): boolean {
|
||||
const lenA = a.length;
|
||||
const lenB = b.length;
|
||||
|
||||
|
@ -32,7 +32,7 @@ function timingSafeEqual(a: string, b: string): boolean {
|
|||
let different = Math.abs(lenA - lenB);
|
||||
|
||||
// Make sure b is always the same length as a. Use slice to try avoiding optimizations.
|
||||
b = different === 0 ? b.slice() : a.slice();
|
||||
b = (different === 0 ? b.slice() : a.slice()) as T;
|
||||
|
||||
for (let i = 0; i < lenA; i += 1) {
|
||||
different += Math.abs(a.charCodeAt(i) - b.charCodeAt(i));
|
||||
|
@ -50,15 +50,15 @@ async function isValidLogin(username: Username, password: CleartextPassword): Pr
|
|||
|
||||
// Iterate over all users every time to reduce risk of timing attacks.
|
||||
for (const userConfig of config.server.internal.users) {
|
||||
if (lift2(timingSafeEqual)(username, userConfig.username)) {
|
||||
if (timingSafeEqual(username, userConfig.username)) {
|
||||
passwordHash = userConfig.passwordHash;
|
||||
}
|
||||
}
|
||||
|
||||
// Always compare some password even if the user does not exist to reduce risk of timing attacks.
|
||||
const isValidPassword = await bcrypt.compare(
|
||||
password.value,
|
||||
passwordHash?.value || INVALID_PASSWORD_HASH.value
|
||||
password,
|
||||
passwordHash || INVALID_PASSWORD_HASH
|
||||
);
|
||||
|
||||
// Make sure password is only considered valid is user exists and therefor passwordHash is not undefined.
|
||||
|
@ -74,7 +74,7 @@ export function init(): void {
|
|||
realm: 'Knotenformular - Intern'
|
||||
},
|
||||
function (username: string, password: string, callback: BasicAuthCheckerCallback): void {
|
||||
isValidLogin(to(username), to(password))
|
||||
isValidLogin(username as Username, password as CleartextPassword)
|
||||
.then(result => callback(result))
|
||||
.catch(err => {
|
||||
Logger.tag('login').error(err);
|
||||
|
|
|
@ -3,11 +3,11 @@ import commandLineUsage from "command-line-usage"
|
|||
import fs from "graceful-fs"
|
||||
import url from "url"
|
||||
import {parse} from "sparkson"
|
||||
import {Config, Version} from "./types"
|
||||
import {Config, Url, Version} from "./types"
|
||||
|
||||
// @ts-ignore
|
||||
export let config: Config = {};
|
||||
export let version: Version = "unknown";
|
||||
export let version: Version = "unknown" as Version;
|
||||
|
||||
export function parseCommandLine(): void {
|
||||
const commandLineDefs = [
|
||||
|
@ -70,8 +70,10 @@ export function parseCommandLine(): void {
|
|||
|
||||
config = parse(Config, configJSON);
|
||||
|
||||
function stripTrailingSlash(url: string): string {
|
||||
return url.endsWith("/") ? url.substr(0, url.length - 1) : url;
|
||||
function stripTrailingSlash(url: Url): Url {
|
||||
return url.endsWith("/")
|
||||
? url.substr(0, url.length - 1) as Url
|
||||
: url;
|
||||
}
|
||||
|
||||
config.server.baseUrl = stripTrailingSlash(config.server.baseUrl);
|
||||
|
|
|
@ -1,93 +1,48 @@
|
|||
import {Database, Statement} from "sqlite";
|
||||
import {RunResult, SqlType, Statement, TypedDatabase} from "../database";
|
||||
import * as sqlite3 from "sqlite3";
|
||||
|
||||
export async function init(): Promise<void> {}
|
||||
|
||||
export class MockStatement implements Statement {
|
||||
constructor() {}
|
||||
|
||||
readonly changes: number = 0;
|
||||
readonly lastID: number = 0;
|
||||
readonly sql: string = "";
|
||||
|
||||
async all(): Promise<any[]>;
|
||||
async all(...params: any[]): Promise<any[]>;
|
||||
async all<T>(): Promise<T[]>;
|
||||
async all<T>(...params: any[]): Promise<T[]>;
|
||||
all(...params: any[]): any {
|
||||
export async function init(): Promise<void> {
|
||||
}
|
||||
|
||||
async bind(): Promise<Statement>;
|
||||
async bind(...params: any[]): Promise<Statement>;
|
||||
async bind(...params: any[]): Promise<Statement> {
|
||||
return mockStatement();
|
||||
export class MockDatabase implements TypedDatabase {
|
||||
constructor() {
|
||||
}
|
||||
|
||||
async each(callback?: (err: Error, row: any) => void): Promise<number>;
|
||||
async each(...params: any[]): Promise<number>;
|
||||
async each(...callback: (((err: Error, row: any) => void) | any)[]): Promise<number> {
|
||||
async on(event: string, listener: any): Promise<void> {
|
||||
}
|
||||
|
||||
async run(sql: SqlType, ...params: any[]): Promise<RunResult> {
|
||||
return {
|
||||
stmt: new Statement(new sqlite3.Statement()),
|
||||
};
|
||||
}
|
||||
|
||||
async get<T = any>(sql: SqlType, ...params: any[]): Promise<T | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async each<T = any>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T = any>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T = any>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T = any>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T = any>(sql: SqlType, ...params: any[]): Promise<number>;
|
||||
async each(sql: SqlType, ...callback: (any)[]): Promise<number> {
|
||||
return 0;
|
||||
}
|
||||
|
||||
async finalize(): Promise<void> {}
|
||||
|
||||
get(): Promise<any>;
|
||||
get(...params: any[]): Promise<any>;
|
||||
get<T>(): Promise<T>;
|
||||
get<T>(...params: any[]): Promise<T>;
|
||||
get(...params: any[]): any {
|
||||
}
|
||||
|
||||
async reset(): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
|
||||
async run(): Promise<Statement>;
|
||||
async run(...params: any[]): Promise<Statement>;
|
||||
async run(...params: any[]): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
}
|
||||
|
||||
function mockStatement(): Statement {
|
||||
return new MockStatement();
|
||||
}
|
||||
|
||||
export class MockDatabase implements Database {
|
||||
constructor() {}
|
||||
|
||||
async close(): Promise<void> {}
|
||||
|
||||
async run(...args: any): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
|
||||
async get(...args: any): Promise<any> {}
|
||||
|
||||
async all(...args: any): Promise<any[]> {
|
||||
async all<T>(sql: SqlType, ...params: any[]): Promise<T[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
async exec(...args: any): Promise<Database> {
|
||||
return this;
|
||||
async exec(sql: SqlType, ...params: any[]): Promise<void> {
|
||||
}
|
||||
|
||||
async each(...args: any): Promise<number> {
|
||||
return 0;
|
||||
|
||||
async prepare(sql: SqlType, ...params: any[]): Promise<Statement> {
|
||||
return new Statement(new sqlite3.Statement());
|
||||
}
|
||||
|
||||
async prepare(...args: any): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
|
||||
configure(...args: any): void {}
|
||||
|
||||
async migrate(...args: any): Promise<Database> {
|
||||
return this;
|
||||
}
|
||||
|
||||
on(...args: any): void {}
|
||||
}
|
||||
|
||||
export const db: MockDatabase = new MockDatabase();
|
||||
|
||||
export {Database, Statement}
|
||||
export {TypedDatabase, Statement}
|
||||
|
|
|
@ -4,12 +4,125 @@ import glob from "glob";
|
|||
import path from "path";
|
||||
import {config} from "../config";
|
||||
import Logger from "../logger";
|
||||
import sqlite, {Database, Statement} from "sqlite";
|
||||
import {Database, ISqlite, open, Statement} from "sqlite";
|
||||
import * as sqlite3 from "sqlite3";
|
||||
|
||||
const pglob = util.promisify(glob);
|
||||
const pReadFile = util.promisify(fs.readFile);
|
||||
|
||||
async function applyPatch(db: sqlite.Database, file: string): Promise<void> {
|
||||
export type RunResult = ISqlite.RunResult;
|
||||
export type SqlType = ISqlite.SqlType;
|
||||
|
||||
export interface TypedDatabase {
|
||||
/**
|
||||
* @see Database.on
|
||||
*/
|
||||
on(event: string, listener: any): Promise<void>;
|
||||
|
||||
/**
|
||||
* @see Database.run
|
||||
*/
|
||||
run(sql: SqlType, ...params: any[]): Promise<RunResult>;
|
||||
|
||||
/**
|
||||
* @see Database.get
|
||||
*/
|
||||
get<T>(sql: SqlType, ...params: any[]): Promise<T | undefined>;
|
||||
|
||||
/**
|
||||
* @see Database.each
|
||||
*/
|
||||
each<T>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
|
||||
|
||||
each<T>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
|
||||
each<T>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
|
||||
each<T>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
|
||||
each<T>(sql: SqlType, ...params: any[]): Promise<number>;
|
||||
|
||||
/**
|
||||
* @see Database.all
|
||||
*/
|
||||
all<T = never>(sql: SqlType, ...params: any[]): Promise<T[]>;
|
||||
|
||||
/**
|
||||
* @see Database.exec
|
||||
*/
|
||||
exec(sql: SqlType, ...params: any[]): Promise<void>;
|
||||
|
||||
/**
|
||||
* @see Database.prepare
|
||||
*/
|
||||
prepare(sql: SqlType, ...params: any[]): Promise<Statement>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Typesafe database wrapper.
|
||||
*
|
||||
* @see Database
|
||||
*/
|
||||
class DatabasePromiseWrapper implements TypedDatabase {
|
||||
private db: Promise<Database>;
|
||||
|
||||
constructor() {
|
||||
this.db = new Promise<Database>((resolve, reject) => {
|
||||
open({
|
||||
filename: config.server.databaseFile,
|
||||
driver: sqlite3.Database,
|
||||
})
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
this.db.catch(err => {
|
||||
Logger.tag('database', 'init').error('Error initializing database: ', err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
async on(event: string, listener: any): Promise<void> {
|
||||
const db = await this.db;
|
||||
db.on(event, listener);
|
||||
}
|
||||
|
||||
async run(sql: SqlType, ...params: any[]): Promise<RunResult> {
|
||||
const db = await this.db;
|
||||
return db.run(sql, ...params);
|
||||
}
|
||||
|
||||
async get<T>(sql: SqlType, ...params: any[]): Promise<T | undefined> {
|
||||
const db = await this.db;
|
||||
return await db.get<T>(sql, ...params);
|
||||
}
|
||||
|
||||
async each<T>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
|
||||
async each<T>(sql: SqlType, ...params: any[]): Promise<number> {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.each.apply(db, arguments);
|
||||
}
|
||||
|
||||
async all<T>(sql: SqlType, ...params: any[]): Promise<T[]> {
|
||||
const db = await this.db;
|
||||
return (await db.all<T[]>(sql, ...params));
|
||||
}
|
||||
|
||||
async exec(sql: SqlType, ...params: any[]): Promise<void> {
|
||||
const db = await this.db;
|
||||
return await db.exec(sql, ...params);
|
||||
}
|
||||
|
||||
async prepare(sql: SqlType, ...params: any[]): Promise<Statement> {
|
||||
const db = await this.db;
|
||||
return await db.prepare(sql, ...params);
|
||||
}
|
||||
}
|
||||
|
||||
async function applyPatch(db: TypedDatabase, file: string): Promise<void> {
|
||||
Logger.tag('database', 'migration').info('Checking if patch need to be applied: %s', file);
|
||||
|
||||
const contents = await pReadFile(file);
|
||||
|
@ -32,7 +145,7 @@ async function applyPatch(db: sqlite.Database, file: string): Promise<void> {
|
|||
Logger.tag('database', 'migration').info('Patch successfully applied: %s', file);
|
||||
}
|
||||
|
||||
async function applyMigrations(db: sqlite.Database): Promise<void> {
|
||||
async function applyMigrations(db: TypedDatabase): Promise<void> {
|
||||
Logger.tag('database', 'migration').info('Migrating database...');
|
||||
|
||||
const sql = 'BEGIN TRANSACTION; CREATE TABLE IF NOT EXISTS schema_version (\n' +
|
||||
|
@ -48,106 +161,18 @@ async function applyMigrations(db: sqlite.Database): Promise<void> {
|
|||
}
|
||||
}
|
||||
|
||||
const dbPromise = new Promise<Database>((resolve, reject) => {
|
||||
sqlite.open(config.server.databaseFile)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
export const db: TypedDatabase = new DatabasePromiseWrapper();
|
||||
|
||||
export async function init(): Promise<void> {
|
||||
Logger.tag('database').info('Setting up database: %s', config.server.databaseFile);
|
||||
|
||||
let db: Database;
|
||||
try {
|
||||
db = await dbPromise;
|
||||
}
|
||||
catch (error) {
|
||||
Logger.tag('database').error('Error initialzing database:', error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
db.on('profile', (sql, time) => Logger.tag('database').profile('[%sms]\t%s', time, sql));
|
||||
await db.on('profile', (sql: string, time: number) => Logger.tag('database').profile('[%sms]\t%s', time, sql));
|
||||
|
||||
try {
|
||||
await applyMigrations(db);
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
Logger.tag('database').error('Error migrating database:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around a Promise<Database> providing the same interface as the Database itself.
|
||||
*/
|
||||
class DatabasePromiseWrapper implements Database {
|
||||
constructor(private db: Promise<Database>) {
|
||||
db.catch(err => {
|
||||
Logger.tag('database', 'init').error('Error initializing database: ', err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
async close() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.close.apply(db, arguments);
|
||||
}
|
||||
|
||||
async run() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.run.apply(db, arguments);
|
||||
}
|
||||
|
||||
async get() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.get.apply(db, arguments);
|
||||
}
|
||||
|
||||
async all() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.all.apply(db, arguments);
|
||||
}
|
||||
|
||||
async exec() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.exec.apply(db, arguments);
|
||||
}
|
||||
|
||||
async each() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.each.apply(db, arguments);
|
||||
}
|
||||
|
||||
async prepare() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.prepare.apply(db, arguments);
|
||||
}
|
||||
|
||||
async configure() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.configure.apply(db, arguments);
|
||||
}
|
||||
|
||||
async migrate() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.migrate.apply(db, arguments);
|
||||
}
|
||||
|
||||
async on() {
|
||||
const db = await this.db;
|
||||
// @ts-ignore
|
||||
return await db.on.apply(db, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
export const db: Database = new DatabasePromiseWrapper(dbPromise);
|
||||
export {Database, Statement};
|
||||
export {Statement};
|
||||
|
|
|
@ -2,7 +2,7 @@ import CONSTRAINTS from "../validation/constraints";
|
|||
import ErrorTypes from "../utils/errorTypes";
|
||||
import * as MailService from "../services/mailService";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {normalizeString} from "../utils/strings";
|
||||
import {normalizeString, parseInteger} from "../utils/strings";
|
||||
import {forConstraint} from "../validation/validator";
|
||||
import {Request, Response} from "express";
|
||||
import {Mail, MailId} from "../types";
|
||||
|
@ -16,7 +16,7 @@ async function withValidMailId(req: Request): Promise<MailId> {
|
|||
throw {data: 'Invalid mail id.', type: ErrorTypes.badRequest};
|
||||
}
|
||||
|
||||
return id;
|
||||
return parseInteger(id) as MailId;
|
||||
}
|
||||
|
||||
async function doGet(req: Request): Promise<Mail> {
|
||||
|
|
|
@ -7,7 +7,7 @@ import * as Resources from "../utils/resources";
|
|||
import {normalizeString} from "../utils/strings";
|
||||
import {forConstraint} from "../validation/validator";
|
||||
import {Request, Response} from "express";
|
||||
import {MonitoringToken, to} from "../types";
|
||||
import {MonitoringToken} from "../types";
|
||||
|
||||
const isValidToken = forConstraint(CONSTRAINTS.token, false);
|
||||
|
||||
|
@ -39,7 +39,7 @@ export function confirm(req: Request, res: Response): void {
|
|||
if (!isValidToken(token)) {
|
||||
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
|
||||
}
|
||||
const validatedToken: MonitoringToken = to(token);
|
||||
const validatedToken: MonitoringToken = token as MonitoringToken;
|
||||
|
||||
MonitoringService.confirm(validatedToken)
|
||||
.then(node => Resources.success(res, {
|
||||
|
@ -59,7 +59,7 @@ export function disable(req: Request, res: Response): void {
|
|||
if (!isValidToken(token)) {
|
||||
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
|
||||
}
|
||||
const validatedToken: MonitoringToken = to(token);
|
||||
const validatedToken: MonitoringToken = token as MonitoringToken;
|
||||
|
||||
MonitoringService.disable(validatedToken)
|
||||
.then(node => Resources.success(res, {
|
||||
|
|
|
@ -8,9 +8,8 @@ import * as NodeService from "../services/nodeService";
|
|||
import {normalizeMac, normalizeString} from "../utils/strings";
|
||||
import {forConstraint, forConstraints} from "../validation/validator";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {Entity} from "../utils/resources";
|
||||
import {Request, Response} from "express";
|
||||
import {EnhancedNode, isNodeSortField, MAC, Node, to, Token} from "../types";
|
||||
import {EnhancedNode, isNodeSortField, MAC, Node, Token} from "../types";
|
||||
|
||||
const nodeFields = ['hostname', 'key', 'email', 'nickname', 'mac', 'coords', 'monitoring'];
|
||||
|
||||
|
@ -49,7 +48,7 @@ export function update (req: Request, res: Response): void {
|
|||
if (!isValidToken(token)) {
|
||||
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
|
||||
}
|
||||
const validatedToken: Token = to(token);
|
||||
const validatedToken: Token = token as Token;
|
||||
|
||||
const node = getNormalizedNodeData(data);
|
||||
if (!isValidNode(node)) {
|
||||
|
@ -68,7 +67,7 @@ export function remove(req: Request, res: Response): void {
|
|||
if (!isValidToken(token)) {
|
||||
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
|
||||
}
|
||||
const validatedToken: Token = to(token);
|
||||
const validatedToken: Token = token as Token;
|
||||
|
||||
NodeService.deleteNode(validatedToken)
|
||||
.then(() => Resources.success(res, {}))
|
||||
|
@ -80,7 +79,7 @@ export function get(req: Request, res: Response): void {
|
|||
if (!isValidToken(token)) {
|
||||
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
|
||||
}
|
||||
const validatedToken: Token = to(token);
|
||||
const validatedToken: Token = token as Token;
|
||||
|
||||
NodeService.getNodeDataByToken(validatedToken)
|
||||
.then(node => Resources.success(res, node))
|
||||
|
@ -101,7 +100,7 @@ async function doGetAll(req: Request): Promise<{ total: number; pageNodes: any }
|
|||
const nodeStateByMac = await MonitoringService.getByMacs(macs);
|
||||
|
||||
const enhancedNodes: EnhancedNode[] = _.map(realNodes, (node: Node): EnhancedNode => {
|
||||
const nodeState = nodeStateByMac[node.mac.value];
|
||||
const nodeState = nodeStateByMac[node.mac];
|
||||
if (nodeState) {
|
||||
return deepExtend({}, node, {
|
||||
site: nodeState.site,
|
||||
|
|
|
@ -9,7 +9,29 @@ import Logger from "../logger";
|
|||
import * as MailTemplateService from "./mailTemplateService";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {RestParams} from "../utils/resources";
|
||||
import {isMailSortField, Mail, MailData, MailId, MailSortField, MailType} from "../types";
|
||||
import {
|
||||
EmailAddress, isJSONObject,
|
||||
isMailSortField, isMailType, JSONObject,
|
||||
Mail,
|
||||
MailData,
|
||||
MailId,
|
||||
MailSortField,
|
||||
MailType,
|
||||
parseJSON,
|
||||
UnixTimestampSeconds
|
||||
} from "../types";
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
|
||||
type EmaiQueueRow = {
|
||||
id: MailId,
|
||||
created_at: UnixTimestampSeconds,
|
||||
data: string,
|
||||
email: string,
|
||||
failures: number,
|
||||
modified_at: UnixTimestampSeconds,
|
||||
recipient: EmailAddress,
|
||||
sender: EmailAddress,
|
||||
};
|
||||
|
||||
const MAIL_QUEUE_DB_BATCH_SIZE = 50;
|
||||
|
||||
|
@ -24,7 +46,7 @@ function transporter() {
|
|||
{
|
||||
transport: 'smtp',
|
||||
pool: true
|
||||
}
|
||||
} as JSONObject
|
||||
));
|
||||
|
||||
MailTemplateService.configureTransporter(transporterSingleton);
|
||||
|
@ -57,18 +79,29 @@ async function sendMail(options: Mail): Promise<void> {
|
|||
}
|
||||
|
||||
async function findPendingMailsBefore(beforeMoment: Moment, limit: number): Promise<Mail[]> {
|
||||
const rows = await db.all(
|
||||
const rows = await db.all<EmaiQueueRow>(
|
||||
'SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?',
|
||||
[beforeMoment.unix(), 5, limit],
|
||||
);
|
||||
|
||||
return _.map(rows, row => deepExtend(
|
||||
{},
|
||||
row,
|
||||
{
|
||||
data: JSON.parse(row.data)
|
||||
return rows.map(row => {
|
||||
const mailType = row.email;
|
||||
if (!isMailType(mailType)) {
|
||||
throw new Error(`Invalid mailtype in database: ${mailType}`);
|
||||
}
|
||||
));
|
||||
const data = parseJSON(row.data);
|
||||
if (!isJSONObject(data)) {
|
||||
throw new Error(`Invalid email data in database: ${typeof data}`);
|
||||
}
|
||||
return {
|
||||
id: row.id,
|
||||
email: mailType,
|
||||
sender: row.sender,
|
||||
recipient: row.recipient,
|
||||
data,
|
||||
failures: row.failures,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function removePendingMailFromQueue(id: MailId): Promise<void> {
|
||||
|
@ -85,8 +118,7 @@ async function incrementFailureCounterForPendingEmail(id: MailId): Promise<void>
|
|||
async function sendPendingMail(pendingMail: Mail): Promise<void> {
|
||||
try {
|
||||
await sendMail(pendingMail);
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
// we only log the error and increment the failure counter as we want to continue with pending mails
|
||||
Logger.tag('mail', 'queue').error('Error sending pending mail[' + pendingMail.id + ']:', error);
|
||||
|
||||
|
@ -98,7 +130,11 @@ async function sendPendingMail(pendingMail: Mail): Promise<void> {
|
|||
}
|
||||
|
||||
async function doGetMail(id: MailId): Promise<Mail> {
|
||||
return await db.get('SELECT * FROM email_queue WHERE id = ?', [id]);
|
||||
const row = await db.get<Mail>('SELECT * FROM email_queue WHERE id = ?', [id]);
|
||||
if (row === undefined) {
|
||||
throw {data: 'Mail not found.', type: ErrorTypes.notFound};
|
||||
}
|
||||
return row;
|
||||
}
|
||||
|
||||
export async function enqueue(sender: string, recipient: string, email: MailType, data: MailData): Promise<void> {
|
||||
|
@ -118,12 +154,12 @@ export async function getMail (id: MailId): Promise<Mail> {
|
|||
}
|
||||
|
||||
export async function getPendingMails(restParams: RestParams): Promise<{ mails: Mail[], total: number }> {
|
||||
const row = await db.get(
|
||||
const row = await db.get<{ total: number }>(
|
||||
'SELECT count(*) AS total FROM email_queue',
|
||||
[],
|
||||
);
|
||||
|
||||
const total = row.total;
|
||||
const total = row?.total || 0;
|
||||
|
||||
const filter = Resources.filterClause(
|
||||
restParams,
|
||||
|
|
|
@ -13,7 +13,13 @@ import {MailData, Mail} from "../types";
|
|||
const templateBasePath = __dirname + '/../mailTemplates';
|
||||
const snippetsBasePath = templateBasePath + '/snippets';
|
||||
|
||||
const templateFunctions: {[key: string]: (...data: MailData) => string} = {};
|
||||
const templateFunctions: {
|
||||
[key: string]:
|
||||
| ((name: string, data: MailData) => string)
|
||||
| ((data: MailData) => string)
|
||||
| ((href: string, text: string) => string)
|
||||
| ((unix: number) => string)
|
||||
} = {};
|
||||
|
||||
function renderSnippet(this: any, name: string, data: MailData): string {
|
||||
const snippetFile = snippetsBasePath + '/' + name + '.html';
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import moment from 'moment';
|
||||
import {ParsedNode, parseNode, parseNodesJson, parseTimestamp} from "./monitoringService";
|
||||
import {MAC, OnlineState, to} from "../types";
|
||||
import {Domain, MAC, OnlineState, Site} from "../types";
|
||||
import Logger from '../logger';
|
||||
import {MockLogger} from "../__mocks__/logger";
|
||||
|
||||
|
@ -44,6 +44,7 @@ test('parseTimestamp() should fail parsing empty timestamp string', () => {
|
|||
|
||||
test('parseTimestamp() should fail parsing invalid timestamp string', () => {
|
||||
// given
|
||||
// noinspection UnnecessaryLocalVariableJS
|
||||
const timestamp = TIMESTAMP_INVALID_STRING;
|
||||
|
||||
// when
|
||||
|
@ -240,12 +241,12 @@ test('parseNode() should succeed parsing node without site and domain', () => {
|
|||
|
||||
// then
|
||||
const expectedParsedNode: ParsedNode = {
|
||||
mac: to("12:34:56:78:90:AB"),
|
||||
mac: "12:34:56:78:90:AB" as MAC,
|
||||
importTimestamp: importTimestamp,
|
||||
state: OnlineState.ONLINE,
|
||||
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
|
||||
site: to("<unknown-site>"),
|
||||
domain: to("<unknown-domain>"),
|
||||
site: "<unknown-site>" as Site,
|
||||
domain: "<unknown-domain>" as Domain,
|
||||
};
|
||||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||
});
|
||||
|
@ -272,12 +273,12 @@ test('parseNode() should succeed parsing node with site and domain', () => {
|
|||
|
||||
// then
|
||||
const expectedParsedNode: ParsedNode = {
|
||||
mac: to("12:34:56:78:90:AB"),
|
||||
mac: "12:34:56:78:90:AB" as MAC,
|
||||
importTimestamp: importTimestamp,
|
||||
state: OnlineState.ONLINE,
|
||||
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
|
||||
site: to("test-site"),
|
||||
domain: to("test-domain")
|
||||
site: "test-site" as Site,
|
||||
domain: "test-domain" as Domain,
|
||||
};
|
||||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||
});
|
||||
|
@ -461,12 +462,12 @@ test('parseNodesJson() should parse valid nodes', () => {
|
|||
|
||||
// then
|
||||
const expectedParsedNode: ParsedNode = {
|
||||
mac: to("12:34:56:78:90:AB"),
|
||||
mac: "12:34:56:78:90:AB" as MAC,
|
||||
importTimestamp: parseTimestamp(TIMESTAMP_VALID_STRING),
|
||||
state: OnlineState.ONLINE,
|
||||
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
|
||||
site: to("test-site"),
|
||||
domain: to("test-domain"),
|
||||
site: "test-site" as Site,
|
||||
domain: "test-domain" as Domain,
|
||||
};
|
||||
|
||||
expect(result.importTimestamp.isValid()).toBe(true);
|
||||
|
|
|
@ -3,7 +3,7 @@ import moment, {Moment, unitOfTime} from "moment";
|
|||
import request from "request";
|
||||
|
||||
import {config} from "../config";
|
||||
import {db, Statement} from "../db/database";
|
||||
import {db, RunResult} from "../db/database";
|
||||
import * as DatabaseUtil from "../utils/databaseUtil";
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
import Logger from "../logger";
|
||||
|
@ -12,14 +12,15 @@ import * as MailService from "../services/mailService";
|
|||
import * as NodeService from "../services/nodeService";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {RestParams} from "../utils/resources";
|
||||
import {normalizeMac} from "../utils/strings";
|
||||
import {normalizeMac, parseInteger} from "../utils/strings";
|
||||
import {monitoringDisableUrl} from "../utils/urlBuilder";
|
||||
import CONSTRAINTS from "../validation/constraints";
|
||||
import {forConstraint} from "../validation/validator";
|
||||
import {
|
||||
Domain,
|
||||
equal,
|
||||
Hostname,
|
||||
isMonitoringSortField,
|
||||
isOnlineState,
|
||||
MAC,
|
||||
MailType,
|
||||
MonitoringSortField,
|
||||
|
@ -29,10 +30,25 @@ import {
|
|||
NodeStateData,
|
||||
OnlineState,
|
||||
Site,
|
||||
to,
|
||||
UnixTimestampSeconds
|
||||
} from "../types";
|
||||
|
||||
type NodeStateRow = {
|
||||
id: number,
|
||||
created_at: UnixTimestampSeconds,
|
||||
domain: Domain | null,
|
||||
hostname: Hostname | null,
|
||||
import_timestamp: UnixTimestampSeconds,
|
||||
last_seen: UnixTimestampSeconds,
|
||||
last_status_mail_sent: string | null,
|
||||
last_status_mail_type: string | null,
|
||||
mac: MAC,
|
||||
modified_at: UnixTimestampSeconds,
|
||||
monitoring_state: string | null,
|
||||
site: Site | null,
|
||||
state: string,
|
||||
};
|
||||
|
||||
const MONITORING_STATE_MACS_CHUNK_SIZE = 100;
|
||||
const NEVER_ONLINE_NODES_DELETION_CHUNK_SIZE = 20;
|
||||
const MONITORING_MAILS_DB_BATCH_SIZE = 50;
|
||||
|
@ -193,7 +209,7 @@ export function parseNode(importTimestamp: Moment, nodeData: any): ParsedNode {
|
|||
'Node ' + nodeId + ': Invalid MAC: ' + nodeData.nodeinfo.network.mac
|
||||
);
|
||||
}
|
||||
const mac = normalizeMac(nodeData.nodeinfo.network.mac);
|
||||
const mac = normalizeMac(nodeData.nodeinfo.network.mac) as MAC;
|
||||
|
||||
if (!_.isPlainObject(nodeData.flags)) {
|
||||
throw new Error(
|
||||
|
@ -214,23 +230,23 @@ export function parseNode(importTimestamp: Moment, nodeData: any): ParsedNode {
|
|||
);
|
||||
}
|
||||
|
||||
let site = null;
|
||||
let site = "<unknown-site>" as Site; // FIXME: Handle this
|
||||
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.site_code)) {
|
||||
site = nodeData.nodeinfo.system.site_code;
|
||||
site = nodeData.nodeinfo.system.site_code as Site;
|
||||
}
|
||||
|
||||
let domain = null;
|
||||
let domain = "<unknown-domain>" as Domain; // FIXME: Handle this
|
||||
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.domain_code)) {
|
||||
domain = nodeData.nodeinfo.system.domain_code;
|
||||
domain = nodeData.nodeinfo.system.domain_code as Domain;
|
||||
}
|
||||
|
||||
return {
|
||||
mac: to(mac),
|
||||
mac,
|
||||
importTimestamp: importTimestamp,
|
||||
state: isOnline ? OnlineState.ONLINE : OnlineState.OFFLINE,
|
||||
lastSeen: lastSeen,
|
||||
site: to(site || '<unknown-site>'), // FIXME: Handle this
|
||||
domain: to(domain || '<unknown-domain>') // FIXME: Handle this
|
||||
site,
|
||||
domain,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -279,7 +295,7 @@ export function parseNodesJson(body: string): NodesParsingResult {
|
|||
return result;
|
||||
}
|
||||
|
||||
async function updateSkippedNode(id: NodeId, node?: Node): Promise<Statement> {
|
||||
async function updateSkippedNode(id: NodeId, node?: Node): Promise<RunResult> {
|
||||
return await db.run(
|
||||
'UPDATE node_state ' +
|
||||
'SET hostname = ?, monitoring_state = ?, modified_at = ?' +
|
||||
|
@ -352,8 +368,7 @@ async function sendMonitoringMailsBatched(
|
|||
{
|
||||
node: node,
|
||||
lastSeen: nodeState.last_seen,
|
||||
disableUrl: monitoringDisableUrl(monitoringToken)
|
||||
|
||||
disableUrl: monitoringDisableUrl(monitoringToken),
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -378,7 +393,7 @@ async function sendMonitoringMailsBatched(
|
|||
async function sendOnlineAgainMails(startTime: Moment): Promise<void> {
|
||||
await sendMonitoringMailsBatched(
|
||||
'online again',
|
||||
'monitoring-online-again',
|
||||
MailType.MONITORING_ONLINE_AGAIN,
|
||||
async (): Promise<any[]> => await db.all(
|
||||
'SELECT * FROM node_state ' +
|
||||
'WHERE modified_at < ? AND state = ? AND last_status_mail_type IN (' +
|
||||
|
@ -395,10 +410,11 @@ async function sendOnlineAgainMails(startTime: Moment): Promise<void> {
|
|||
);
|
||||
}
|
||||
|
||||
async function sendOfflineMails(startTime: Moment, mailNumber: number): Promise<void> {
|
||||
async function sendOfflineMails(startTime: Moment, mailType: MailType): Promise<void> {
|
||||
const mailNumber = parseInteger(mailType.split("-")[2]);
|
||||
await sendMonitoringMailsBatched(
|
||||
'offline ' + mailNumber,
|
||||
'monitoring-offline-' + mailNumber,
|
||||
mailType,
|
||||
async (): Promise<any[]> => {
|
||||
const previousType =
|
||||
mailNumber === 1 ? 'monitoring-online-again' : ('monitoring-offline-' + (mailNumber - 1));
|
||||
|
@ -556,12 +572,12 @@ export async function getAll(restParams: RestParams): Promise<{ total: number, m
|
|||
|
||||
const where = Resources.whereCondition(restParams, filterFields);
|
||||
|
||||
const row = await db.get(
|
||||
const row = await db.get<{ total: number }>(
|
||||
'SELECT count(*) AS total FROM node_state WHERE ' + where.query,
|
||||
_.concat([], where.params),
|
||||
);
|
||||
|
||||
const total = row.total;
|
||||
const total = row?.total || 0;
|
||||
|
||||
const filter = Resources.filterClause(
|
||||
restParams,
|
||||
|
@ -578,7 +594,7 @@ export async function getAll(restParams: RestParams): Promise<{ total: number, m
|
|||
return {monitoringStates, total};
|
||||
}
|
||||
|
||||
export async function getByMacs(macs: MAC[]): Promise<Record<string, NodeStateData>> {
|
||||
export async function getByMacs(macs: MAC[]): Promise<Record<MAC, NodeStateData>> {
|
||||
if (_.isEmpty(macs)) {
|
||||
return {};
|
||||
}
|
||||
|
@ -588,13 +604,22 @@ export async function getByMacs(macs: MAC[]): Promise<Record<string, NodeStateDa
|
|||
for (const subMacs of _.chunk(macs, MONITORING_STATE_MACS_CHUNK_SIZE)) {
|
||||
const inCondition = DatabaseUtil.inCondition('mac', subMacs);
|
||||
|
||||
const rows = await db.all(
|
||||
const rows = await db.all<NodeStateRow>(
|
||||
'SELECT * FROM node_state WHERE ' + inCondition.query,
|
||||
_.concat([], inCondition.params),
|
||||
);
|
||||
|
||||
for (const row of rows) {
|
||||
nodeStateByMac[row.mac] = row;
|
||||
const onlineState = row.state;
|
||||
if (!isOnlineState(onlineState)) {
|
||||
throw new Error(`Invalid online state in database: "${onlineState}"`);
|
||||
}
|
||||
|
||||
nodeStateByMac[row.mac] = {
|
||||
site: row.site || "<unknown-site>" as Site, // FIXME: Handle this
|
||||
domain: row.domain || "<unknown-domain>" as Domain, // FIXME: Handle this
|
||||
state: onlineState,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -603,7 +628,7 @@ export async function getByMacs(macs: MAC[]): Promise<Record<string, NodeStateDa
|
|||
|
||||
export async function confirm(token: MonitoringToken): Promise<Node> {
|
||||
const {node, nodeSecrets} = await NodeService.getNodeDataWithSecretsByMonitoringToken(token);
|
||||
if (!node.monitoring || !nodeSecrets.monitoringToken || !equal(nodeSecrets.monitoringToken, token)) {
|
||||
if (!node.monitoring || !nodeSecrets.monitoringToken || nodeSecrets.monitoringToken !== token) {
|
||||
throw {data: 'Invalid token.', type: ErrorTypes.badRequest};
|
||||
}
|
||||
|
||||
|
@ -619,7 +644,7 @@ export async function confirm(token: MonitoringToken): Promise<Node> {
|
|||
|
||||
export async function disable(token: MonitoringToken): Promise<Node> {
|
||||
const {node, nodeSecrets} = await NodeService.getNodeDataWithSecretsByMonitoringToken(token);
|
||||
if (!node.monitoring || !nodeSecrets.monitoringToken || !equal(nodeSecrets.monitoringToken, token)) {
|
||||
if (!node.monitoring || !nodeSecrets.monitoringToken || nodeSecrets.monitoringToken !== token) {
|
||||
throw {data: 'Invalid token.', type: ErrorTypes.badRequest};
|
||||
}
|
||||
|
||||
|
@ -654,14 +679,18 @@ export async function sendMonitoringMails(): Promise<void> {
|
|||
.error('Error sending "online again" mails.', error);
|
||||
}
|
||||
|
||||
for (let mailNumber = 1; mailNumber <= 3; mailNumber++) {
|
||||
for (const mailType of [
|
||||
MailType.MONITORING_OFFLINE_1,
|
||||
MailType.MONITORING_OFFLINE_2,
|
||||
MailType.MONITORING_OFFLINE_3,
|
||||
]) {
|
||||
try {
|
||||
await sendOfflineMails(startTime, mailNumber);
|
||||
await sendOfflineMails(startTime, mailType);
|
||||
} catch (error) {
|
||||
// only logging an continuing with next type
|
||||
Logger
|
||||
.tag('monitoring', 'mail-sending')
|
||||
.error('Error sending "offline ' + mailNumber + '" mails.', error);
|
||||
.error('Error sending "' + mailType + '" mails.', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -767,7 +796,7 @@ async function deleteNeverOnlineNodesBefore(deleteBefore: UnixTimestampSeconds):
|
|||
}
|
||||
|
||||
async function deleteNodesOfflineSinceBefore(deleteBefore: UnixTimestampSeconds): Promise<void> {
|
||||
const rows = await db.all(
|
||||
const rows = await db.all<NodeStateRow>(
|
||||
'SELECT * FROM node_state WHERE state = ? AND last_seen < ?',
|
||||
[
|
||||
'OFFLINE',
|
||||
|
|
|
@ -11,14 +11,18 @@ import * as MailService from "../services/mailService";
|
|||
import {normalizeString} from "../utils/strings";
|
||||
import {monitoringConfirmUrl, monitoringDisableUrl} from "../utils/urlBuilder";
|
||||
import {
|
||||
Coordinates,
|
||||
EmailAddress,
|
||||
FastdKey,
|
||||
Hostname,
|
||||
MAC,
|
||||
MailType,
|
||||
MonitoringState,
|
||||
MonitoringToken,
|
||||
Nickname,
|
||||
Node,
|
||||
NodeSecrets,
|
||||
NodeStatistics,
|
||||
to,
|
||||
Token,
|
||||
toUnixTimestampSeconds,
|
||||
unhandledEnumField,
|
||||
|
@ -60,18 +64,17 @@ enum LINE_PREFIX {
|
|||
|
||||
const filenameParts = ['hostname', 'mac', 'key', 'token', 'monitoringToken'];
|
||||
|
||||
function generateToken<Type extends { readonly __tag: symbol, value: any } =
|
||||
{ readonly __tag: unique symbol, value: never }>(): Type {
|
||||
return to<Type>(crypto.randomBytes(8).toString('hex'));
|
||||
function generateToken<Type extends string & { readonly __tag: symbol } = never>(): Type {
|
||||
return crypto.randomBytes(8).toString('hex') as Type;
|
||||
}
|
||||
|
||||
function toNodeFilesPattern(filter: NodeFilter): string {
|
||||
const fields: (string | undefined)[] = [
|
||||
filter.hostname,
|
||||
filter.mac?.value,
|
||||
filter.key?.value,
|
||||
filter.token?.value,
|
||||
filter.monitoringToken?.value,
|
||||
filter.mac,
|
||||
filter.key,
|
||||
filter.token,
|
||||
filter.monitoringToken,
|
||||
];
|
||||
|
||||
const pattern = fields.map((value) => value || '*').join('@');
|
||||
|
@ -124,7 +127,7 @@ function isDuplicate(filter: NodeFilter, token: Token | null): boolean {
|
|||
return true;
|
||||
}
|
||||
|
||||
return parseNodeFilename(files[0]).token !== token.value;
|
||||
return parseNodeFilename(files[0]).token !== token;
|
||||
}
|
||||
|
||||
function checkNoDuplicates(token: Token | null, node: Node, nodeSecrets: NodeSecrets): void {
|
||||
|
@ -169,9 +172,9 @@ function getNodeValue(prefix: LINE_PREFIX, node: Node, nodeSecrets: NodeSecrets)
|
|||
case LINE_PREFIX.COORDS:
|
||||
return node.coords || "";
|
||||
case LINE_PREFIX.MAC:
|
||||
return node.mac.value;
|
||||
return node.mac;
|
||||
case LINE_PREFIX.TOKEN:
|
||||
return node.token.value;
|
||||
return node.token;
|
||||
case LINE_PREFIX.MONITORING:
|
||||
if (node.monitoring && node.monitoringConfirmed) {
|
||||
return "aktiv";
|
||||
|
@ -180,7 +183,7 @@ function getNodeValue(prefix: LINE_PREFIX, node: Node, nodeSecrets: NodeSecrets)
|
|||
}
|
||||
return "";
|
||||
case LINE_PREFIX.MONITORING_TOKEN:
|
||||
return nodeSecrets.monitoringToken?.value || "";
|
||||
return nodeSecrets.monitoringToken || "";
|
||||
default:
|
||||
return unhandledEnumField(prefix);
|
||||
}
|
||||
|
@ -255,13 +258,13 @@ async function deleteNodeFile(token: Token): Promise<void> {
|
|||
}
|
||||
|
||||
class NodeBuilder {
|
||||
public token: Token = to(""); // FIXME: Either make token optional in Node or handle this!
|
||||
public nickname: string = "";
|
||||
public email: string = "";
|
||||
public hostname: string = ""; // FIXME: Either make hostname optional in Node or handle this!
|
||||
public coords?: string;
|
||||
public token: Token = "" as Token; // FIXME: Either make token optional in Node or handle this!
|
||||
public nickname: Nickname = "" as Nickname;
|
||||
public email: EmailAddress = "" as EmailAddress;
|
||||
public hostname: Hostname = "" as Hostname; // FIXME: Either make hostname optional in Node or handle this!
|
||||
public coords?: Coordinates;
|
||||
public key?: FastdKey;
|
||||
public mac: MAC = to(""); // FIXME: Either make mac optional in Node or handle this!
|
||||
public mac: MAC = "" as MAC; // FIXME: Either make mac optional in Node or handle this!
|
||||
public monitoring: boolean = false;
|
||||
public monitoringConfirmed: boolean = false;
|
||||
public monitoringState: MonitoringState = MonitoringState.DISABLED;
|
||||
|
@ -291,22 +294,22 @@ class NodeBuilder {
|
|||
function setNodeValue(prefix: LINE_PREFIX, node: NodeBuilder, nodeSecrets: NodeSecrets, value: string) {
|
||||
switch (prefix) {
|
||||
case LINE_PREFIX.HOSTNAME:
|
||||
node.hostname = value;
|
||||
node.hostname = value as Hostname;
|
||||
break;
|
||||
case LINE_PREFIX.NICKNAME:
|
||||
node.nickname = value;
|
||||
node.nickname = value as Nickname;
|
||||
break;
|
||||
case LINE_PREFIX.EMAIL:
|
||||
node.email = value;
|
||||
node.email = value as EmailAddress;
|
||||
break;
|
||||
case LINE_PREFIX.COORDS:
|
||||
node.coords = value;
|
||||
node.coords = value as Coordinates;
|
||||
break;
|
||||
case LINE_PREFIX.MAC:
|
||||
node.mac = to(value);
|
||||
node.mac = value as MAC;
|
||||
break;
|
||||
case LINE_PREFIX.TOKEN:
|
||||
node.token = to(value);
|
||||
node.token = value as Token;
|
||||
break;
|
||||
case LINE_PREFIX.MONITORING:
|
||||
const active = value === 'aktiv';
|
||||
|
@ -317,7 +320,7 @@ function setNodeValue(prefix: LINE_PREFIX, node: NodeBuilder, nodeSecrets: NodeS
|
|||
active ? MonitoringState.ACTIVE : (pending ? MonitoringState.PENDING : MonitoringState.DISABLED);
|
||||
break;
|
||||
case LINE_PREFIX.MONITORING_TOKEN:
|
||||
nodeSecrets.monitoringToken = to<MonitoringToken>(value);
|
||||
nodeSecrets.monitoringToken = value as MonitoringToken;
|
||||
break;
|
||||
default:
|
||||
return unhandledEnumField(prefix);
|
||||
|
@ -340,7 +343,7 @@ async function parseNodeFile(file: string): Promise<{ node: Node, nodeSecrets: N
|
|||
|
||||
for (const line of lines) {
|
||||
if (line.substring(0, 5) === 'key "') {
|
||||
node.key = to<FastdKey>(normalizeString(line.split('"')[1]));
|
||||
node.key = normalizeString(line.split('"')[1]) as FastdKey;
|
||||
} else {
|
||||
for (const prefix of Object.values(LINE_PREFIX)) {
|
||||
if (line.substring(0, prefix.length) === prefix) {
|
||||
|
@ -393,7 +396,7 @@ async function sendMonitoringConfirmationMail(node: Node, nodeSecrets: NodeSecre
|
|||
await MailService.enqueue(
|
||||
config.server.email.from,
|
||||
node.nickname + ' <' + node.email + '>',
|
||||
'monitoring-confirmation',
|
||||
MailType.MONITORING_CONFIRMATION,
|
||||
{
|
||||
node: node,
|
||||
confirmUrl: confirmUrl,
|
||||
|
|
|
@ -1,32 +1,17 @@
|
|||
import {ArrayField, Field, RawJsonField} from "sparkson"
|
||||
import {ClientConfig, to} from "./shared";
|
||||
import {ClientConfig, JSONObject, Url} from "./shared";
|
||||
|
||||
// TODO: Replace string types by more specific types like URL, Password, etc.
|
||||
|
||||
export type Username = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export type CleartextPassword = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export type PasswordHash = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export type Username = string & { readonly __tag: unique symbol };
|
||||
export type CleartextPassword = string & { readonly __tag: unique symbol };
|
||||
export type PasswordHash = string & { readonly __tag: unique symbol };
|
||||
|
||||
export class UsersConfig {
|
||||
public username: Username;
|
||||
public passwordHash: PasswordHash;
|
||||
|
||||
constructor(
|
||||
@Field("user") username: string,
|
||||
@Field("passwordHash") passwordHash: string,
|
||||
) {
|
||||
this.username = to(username);
|
||||
this.passwordHash = to(passwordHash);
|
||||
}
|
||||
@Field("user") public username: Username,
|
||||
@Field("passwordHash") public passwordHash: PasswordHash,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class LoggingConfig {
|
||||
|
@ -49,19 +34,19 @@ export class EmailConfig {
|
|||
@Field("from") public from: string,
|
||||
|
||||
// For details see: https://nodemailer.com/2-0-0-beta/setup-smtp/
|
||||
@RawJsonField("smtp") public smtp: any, // TODO: Better types!
|
||||
@RawJsonField("smtp") public smtp: JSONObject,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ServerMapConfig {
|
||||
constructor(
|
||||
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: string[],
|
||||
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: Url[],
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ServerConfig {
|
||||
constructor(
|
||||
@Field("baseUrl") public baseUrl: string,
|
||||
@Field("baseUrl") public baseUrl: Url,
|
||||
@Field("port") public port: number,
|
||||
|
||||
@Field("databaseFile") public databaseFile: string,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import {Domain, MonitoringToken, OnlineState, Site} from "./shared";
|
||||
import {Domain, EmailAddress, JSONObject, MonitoringToken, OnlineState, Site, toIsEnum} from "./shared";
|
||||
|
||||
export * from "./config";
|
||||
export * from "./logger";
|
||||
|
@ -15,15 +15,24 @@ export type NodeSecrets = {
|
|||
monitoringToken?: MonitoringToken,
|
||||
};
|
||||
|
||||
export type MailId = string;
|
||||
export type MailData = any;
|
||||
export type MailType = string;
|
||||
export type MailId = number & { readonly __tag: unique symbol };
|
||||
export type MailData = JSONObject;
|
||||
|
||||
export enum MailType {
|
||||
MONITORING_OFFLINE_1 = "monitoring-offline-1",
|
||||
MONITORING_OFFLINE_2 = "monitoring-offline-2",
|
||||
MONITORING_OFFLINE_3 = "monitoring-offline-3",
|
||||
MONITORING_ONLINE_AGAIN = "monitoring-online-again",
|
||||
MONITORING_CONFIRMATION = "monitoring-confirmation",
|
||||
}
|
||||
|
||||
export const isMailType = toIsEnum(MailType);
|
||||
|
||||
export interface Mail {
|
||||
id: MailId,
|
||||
email: MailType,
|
||||
sender: string,
|
||||
recipient: string,
|
||||
sender: EmailAddress,
|
||||
recipient: EmailAddress,
|
||||
data: MailData,
|
||||
failures: number,
|
||||
}
|
||||
|
|
|
@ -1,8 +1,60 @@
|
|||
import {ArrayField, Field, RawJsonField} from "sparkson";
|
||||
import exp from "constants";
|
||||
|
||||
// Types shared with the client.
|
||||
export type TypeGuard<T> = (arg: unknown) => arg is T;
|
||||
|
||||
export function parseJSON(str: string): JSONValue {
|
||||
const json = JSON.parse(str);
|
||||
if (!isJSONValue(json)) {
|
||||
throw new Error("Invalid JSON returned. Should never happen.");
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
export type JSONValue =
|
||||
| null
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| JSONObject
|
||||
| JSONArray;
|
||||
|
||||
export function isJSONValue(arg: unknown): arg is JSONValue {
|
||||
return (
|
||||
arg === null ||
|
||||
isString(arg) ||
|
||||
isNumber(arg) ||
|
||||
isBoolean(arg) ||
|
||||
isJSONObject(arg) ||
|
||||
isJSONArray(arg)
|
||||
);
|
||||
}
|
||||
|
||||
export interface JSONObject {
|
||||
[x: string]: JSONValue;
|
||||
}
|
||||
|
||||
export function isJSONObject(arg: unknown): arg is JSONObject {
|
||||
if (!isObject(arg)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const obj = arg as object;
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (!isString(key) || !isJSONValue(value)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export interface JSONArray extends Array<JSONValue> {
|
||||
}
|
||||
|
||||
export const isJSONArray = toIsArray(isJSONValue);
|
||||
|
||||
export type EnumValue<E> = E[keyof E];
|
||||
export type EnumTypeGuard<E> = TypeGuard<EnumValue<E>>;
|
||||
|
||||
|
@ -10,33 +62,10 @@ export function unhandledEnumField(field: never): never {
|
|||
throw new Error(`Unhandled enum field: ${field}`);
|
||||
}
|
||||
|
||||
export function to<Type extends { readonly __tag: symbol, value: any } = { readonly __tag: unique symbol, value: never }>(value: Type['value']): Type {
|
||||
return value as any as Type;
|
||||
}
|
||||
|
||||
export function lift2<Result, Type extends { readonly __tag: symbol, value: any }>(callback: (a: Type["value"], b: Type["value"]) => Result): (newtype1: Type, newtype2: Type) => Result {
|
||||
return (a, b) => callback(a.value, b.value);
|
||||
}
|
||||
|
||||
export function equal<Result, Type extends { readonly __tag: symbol, value: any }>(a: Type, b: Type): boolean {
|
||||
return lift2((a, b) => a === b)(a, b);
|
||||
}
|
||||
|
||||
export function isObject(arg: unknown): arg is object {
|
||||
return arg !== null && typeof arg === "object";
|
||||
}
|
||||
|
||||
export function toIsNewtype<Type extends { readonly __tag: symbol, value: Value } = { readonly __tag: unique symbol, value: never }, Value = any>(isValue: TypeGuard<Value>): TypeGuard<Type> {
|
||||
// TODO: Add validation pattern.
|
||||
return (arg: unknown): arg is Type => {
|
||||
if (!isObject(arg)) {
|
||||
return false;
|
||||
}
|
||||
const newtype = arg as Type;
|
||||
return isValue(newtype.value);
|
||||
}
|
||||
}
|
||||
|
||||
export function isArray<T>(arg: unknown, isT: TypeGuard<T>): arg is Array<T> {
|
||||
if (!Array.isArray(arg)) {
|
||||
return false;
|
||||
|
@ -77,11 +106,15 @@ export function isOptional<T>(arg: unknown, isT: TypeGuard<T>): arg is (T | unde
|
|||
return arg === undefined || isT(arg);
|
||||
}
|
||||
|
||||
export type Version = string;
|
||||
export type Url = string & { readonly __tag: unique symbol };
|
||||
export const isUrl = isString;
|
||||
|
||||
// Should be good enough for now.
|
||||
export type Version = string & { readonly __tag: unique symbol };
|
||||
export const isVersion = isString;
|
||||
|
||||
export type EmailAddress = string & { readonly __tag: unique symbol };
|
||||
export const isEmailAddress = isString;
|
||||
|
||||
export type NodeStatistics = {
|
||||
registered: number;
|
||||
withVPN: number;
|
||||
|
@ -119,10 +152,11 @@ export class CommunityConfig {
|
|||
constructor(
|
||||
@Field("name") public name: string,
|
||||
@Field("domain") public domain: string,
|
||||
@Field("contactEmail") public contactEmail: string,
|
||||
@ArrayField("sites", String) public sites: string[],
|
||||
@ArrayField("domains", String) public domains: string[],
|
||||
) {}
|
||||
@Field("contactEmail") public contactEmail: EmailAddress,
|
||||
@ArrayField("sites", String) public sites: Site[],
|
||||
@ArrayField("domains", String) public domains: Domain[],
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
export function isCommunityConfig(arg: unknown): arg is CommunityConfig {
|
||||
|
@ -133,17 +167,18 @@ export function isCommunityConfig(arg: unknown): arg is CommunityConfig {
|
|||
return (
|
||||
isString(cfg.name) &&
|
||||
isString(cfg.domain) &&
|
||||
isString(cfg.contactEmail) &&
|
||||
isArray(cfg.sites, isString) &&
|
||||
isArray(cfg.domains, isString)
|
||||
isEmailAddress(cfg.contactEmail) &&
|
||||
isArray(cfg.sites, isSite) &&
|
||||
isArray(cfg.domains, isDomain)
|
||||
);
|
||||
}
|
||||
|
||||
export class LegalConfig {
|
||||
constructor(
|
||||
@Field("privacyUrl", true) public privacyUrl?: string,
|
||||
@Field("imprintUrl", true) public imprintUrl?: string,
|
||||
) {}
|
||||
@Field("privacyUrl", true) public privacyUrl?: Url,
|
||||
@Field("imprintUrl", true) public imprintUrl?: Url,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
export function isLegalConfig(arg: unknown): arg is LegalConfig {
|
||||
|
@ -152,15 +187,16 @@ export function isLegalConfig(arg: unknown): arg is LegalConfig {
|
|||
}
|
||||
const cfg = arg as LegalConfig;
|
||||
return (
|
||||
isOptional(cfg.privacyUrl, isString) &&
|
||||
isOptional(cfg.imprintUrl, isString)
|
||||
isOptional(cfg.privacyUrl, isUrl) &&
|
||||
isOptional(cfg.imprintUrl, isUrl)
|
||||
);
|
||||
}
|
||||
|
||||
export class ClientMapConfig {
|
||||
constructor(
|
||||
@Field("mapUrl") public mapUrl: string,
|
||||
) {}
|
||||
@Field("mapUrl") public mapUrl: Url,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
export function isClientMapConfig(arg: unknown): arg is ClientMapConfig {
|
||||
|
@ -168,13 +204,14 @@ export function isClientMapConfig(arg: unknown): arg is ClientMapConfig {
|
|||
return false;
|
||||
}
|
||||
const cfg = arg as ClientMapConfig;
|
||||
return isString(cfg.mapUrl);
|
||||
return isUrl(cfg.mapUrl);
|
||||
}
|
||||
|
||||
export class MonitoringConfig {
|
||||
constructor(
|
||||
@Field("enabled") public enabled: boolean,
|
||||
) {}
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
export function isMonitoringConfig(arg: unknown): arg is MonitoringConfig {
|
||||
|
@ -185,43 +222,45 @@ export function isMonitoringConfig(arg: unknown): arg is MonitoringConfig {
|
|||
return isBoolean(cfg.enabled);
|
||||
}
|
||||
|
||||
export class Coords {
|
||||
export class CoordinatesConfig {
|
||||
constructor(
|
||||
@Field("lat") public lat: number,
|
||||
@Field("lng") public lng: number,
|
||||
) {}
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
export function isCoords(arg: unknown): arg is Coords {
|
||||
export function isCoordinatesConfig(arg: unknown): arg is CoordinatesConfig {
|
||||
if (!isObject(arg)) {
|
||||
return false;
|
||||
}
|
||||
const coords = arg as Coords;
|
||||
const coords = arg as CoordinatesConfig;
|
||||
return (
|
||||
isNumber(coords.lat) &&
|
||||
isNumber(coords.lng)
|
||||
);
|
||||
}
|
||||
|
||||
export class CoordsSelectorConfig {
|
||||
export class CoordinatesSelectorConfig {
|
||||
constructor(
|
||||
@Field("lat") public lat: number,
|
||||
@Field("lng") public lng: number,
|
||||
@Field("defaultZoom") public defaultZoom: number,
|
||||
@RawJsonField("layers") public layers: any, // TODO: Better types!
|
||||
) {}
|
||||
@RawJsonField("layers") public layers: JSONObject,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
export function isCoordsSelectorConfig(arg: unknown): arg is CoordsSelectorConfig {
|
||||
export function isCoordinatesSelectorConfig(arg: unknown): arg is CoordinatesSelectorConfig {
|
||||
if (!isObject(arg)) {
|
||||
return false;
|
||||
}
|
||||
const cfg = arg as CoordsSelectorConfig;
|
||||
const cfg = arg as CoordinatesSelectorConfig;
|
||||
return (
|
||||
isNumber(cfg.lat) &&
|
||||
isNumber(cfg.lng) &&
|
||||
isNumber(cfg.defaultZoom) &&
|
||||
isObject(cfg.layers) // TODO: Better types!
|
||||
isJSONObject(cfg.layers)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -229,8 +268,9 @@ export class OtherCommunityInfoConfig {
|
|||
constructor(
|
||||
@Field("showInfo") public showInfo: boolean,
|
||||
@Field("showBorderForDebugging") public showBorderForDebugging: boolean,
|
||||
@ArrayField("localCommunityPolygon", Coords) public localCommunityPolygon: Coords[],
|
||||
) {}
|
||||
@ArrayField("localCommunityPolygon", CoordinatesConfig) public localCommunityPolygon: CoordinatesConfig[],
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
export function isOtherCommunityInfoConfig(arg: unknown): arg is OtherCommunityInfoConfig {
|
||||
|
@ -241,7 +281,7 @@ export function isOtherCommunityInfoConfig(arg: unknown): arg is OtherCommunityI
|
|||
return (
|
||||
isBoolean(cfg.showInfo) &&
|
||||
isBoolean(cfg.showBorderForDebugging) &&
|
||||
isArray(cfg.localCommunityPolygon, isCoords)
|
||||
isArray(cfg.localCommunityPolygon, isCoordinatesConfig)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -251,7 +291,7 @@ export class ClientConfig {
|
|||
@Field("legal") public legal: LegalConfig,
|
||||
@Field("map") public map: ClientMapConfig,
|
||||
@Field("monitoring") public monitoring: MonitoringConfig,
|
||||
@Field("coordsSelector") public coordsSelector: CoordsSelectorConfig,
|
||||
@Field("coordsSelector") public coordsSelector: CoordinatesSelectorConfig,
|
||||
@Field("otherCommunityInfo") public otherCommunityInfo: OtherCommunityInfoConfig,
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
||||
) {
|
||||
|
@ -268,42 +308,33 @@ export function isClientConfig(arg: unknown): arg is ClientConfig {
|
|||
isLegalConfig(cfg.legal) &&
|
||||
isClientMapConfig(cfg.map) &&
|
||||
isMonitoringConfig(cfg.monitoring) &&
|
||||
isCoordsSelectorConfig(cfg.coordsSelector) &&
|
||||
isCoordinatesSelectorConfig(cfg.coordsSelector) &&
|
||||
isOtherCommunityInfoConfig(cfg.otherCommunityInfo) &&
|
||||
isString(cfg.rootPath)
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: Token type.
|
||||
export type Token = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export const isToken = toIsNewtype<Token>(isString);
|
||||
export type Token = string & { readonly __tag: unique symbol };
|
||||
export const isToken = isString;
|
||||
|
||||
export type FastdKey = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export const isFastdKey = toIsNewtype<FastdKey>(isString);
|
||||
export type FastdKey = string & { readonly __tag: unique symbol };
|
||||
export const isFastdKey = isString;
|
||||
|
||||
export type MAC = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export const isMAC = toIsNewtype<MAC>(isString);
|
||||
export type MAC = string & { readonly __tag: unique symbol };
|
||||
export const isMAC = isString;
|
||||
|
||||
export type UnixTimestampSeconds = number & { readonly __tag: unique symbol };
|
||||
export const isUnixTimestampSeconds = isNumber;
|
||||
|
||||
export type UnixTimestampMilliseconds = number & { readonly __tag: unique symbol };
|
||||
export const isUnixTimestampMilliseconds = isNumber;
|
||||
|
||||
export function toUnixTimestampSeconds(ms: UnixTimestampMilliseconds): UnixTimestampSeconds {
|
||||
return Math.floor(ms) as UnixTimestampSeconds;
|
||||
}
|
||||
|
||||
export type MonitoringToken = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export type MonitoringToken = string & { readonly __tag: unique symbol };
|
||||
|
||||
export enum MonitoringState {
|
||||
ACTIVE = "active",
|
||||
|
@ -313,25 +344,31 @@ export enum MonitoringState {
|
|||
|
||||
export const isMonitoringState = toIsEnum(MonitoringState);
|
||||
|
||||
export type NodeId = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export type NodeId = string & { readonly __tag: unique symbol };
|
||||
|
||||
export type Hostname = string & { readonly __tag: unique symbol };
|
||||
export const isHostname = isString;
|
||||
|
||||
export type Nickname = string & { readonly __tag: unique symbol };
|
||||
export const isNickname = isString;
|
||||
|
||||
export type Coordinates = string & { readonly __tag: unique symbol };
|
||||
export const isCoordinates = isString;
|
||||
|
||||
// TODO: More Newtypes
|
||||
export interface Node {
|
||||
export type Node = {
|
||||
token: Token;
|
||||
nickname: string;
|
||||
email: string;
|
||||
hostname: string;
|
||||
coords?: string; // TODO: Use object with longitude and latitude.
|
||||
nickname: Nickname;
|
||||
email: EmailAddress;
|
||||
hostname: Hostname;
|
||||
coords?: Coordinates;
|
||||
key?: FastdKey;
|
||||
mac: MAC;
|
||||
monitoring: boolean;
|
||||
monitoringConfirmed: boolean;
|
||||
monitoringState: MonitoringState;
|
||||
modifiedAt: UnixTimestampSeconds;
|
||||
}
|
||||
};
|
||||
|
||||
export function isNode(arg: unknown): arg is Node {
|
||||
if (!isObject(arg)) {
|
||||
|
@ -340,16 +377,16 @@ export function isNode(arg: unknown): arg is Node {
|
|||
const node = arg as Node;
|
||||
return (
|
||||
isToken(node.token) &&
|
||||
isString(node.nickname) &&
|
||||
isString(node.email) &&
|
||||
isString(node.hostname) &&
|
||||
isOptional(node.coords, isString) &&
|
||||
isNickname(node.nickname) &&
|
||||
isEmailAddress(node.email) &&
|
||||
isHostname(node.hostname) &&
|
||||
isOptional(node.coords, isCoordinates) &&
|
||||
isOptional(node.key, isFastdKey) &&
|
||||
isMAC(node.mac) &&
|
||||
isBoolean(node.monitoring) &&
|
||||
isBoolean(node.monitoringConfirmed) &&
|
||||
isMonitoringState(node.monitoringState) &&
|
||||
isNumber(node.modifiedAt)
|
||||
isUnixTimestampSeconds(node.modifiedAt)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -360,17 +397,11 @@ export enum OnlineState {
|
|||
|
||||
export const isOnlineState = toIsEnum(OnlineState);
|
||||
|
||||
export type Site = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export const isSite = toIsNewtype<Site>(isString);
|
||||
export type Site = string & { readonly __tag: unique symbol };
|
||||
export const isSite = isString;
|
||||
|
||||
export type Domain = {
|
||||
value: string;
|
||||
readonly __tag: unique symbol
|
||||
};
|
||||
export const isDomain = toIsNewtype<Domain>(isString);
|
||||
export type Domain = string & { readonly __tag: unique symbol };
|
||||
export const isDomain = isString;
|
||||
|
||||
export interface EnhancedNode extends Node {
|
||||
site?: Site,
|
||||
|
|
|
@ -17,7 +17,11 @@ export function normalizeMac (mac: string): string {
|
|||
return macParts.join(':');
|
||||
}
|
||||
|
||||
export function parseInteger (str: string): number | undefined {
|
||||
export function parseInteger (str: string): number {
|
||||
const parsed = _.parseInt(str, 10);
|
||||
return parsed.toString() === str ? parsed : undefined;
|
||||
if (parsed.toString() === str) {
|
||||
return parsed;
|
||||
} else {
|
||||
throw new SyntaxError(`String does not represent a valid integer: "${str}"`);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
import _ from "lodash"
|
||||
import {config} from "../config"
|
||||
import {MonitoringToken} from "../types"
|
||||
import {MonitoringToken, Url} from "../types"
|
||||
|
||||
// TODO: Typed URLs
|
||||
|
||||
function formUrl(route: string, queryParams?: { [key: string]: string }): string {
|
||||
let url = config.server.baseUrl;
|
||||
function formUrl(route: string, queryParams?: { [key: string]: string }): Url {
|
||||
let url = config.server.baseUrl as string;
|
||||
if (route || queryParams) {
|
||||
url += '/#/';
|
||||
}
|
||||
|
@ -24,17 +22,17 @@ function formUrl(route: string, queryParams?: { [key: string]: string }): string
|
|||
'&'
|
||||
);
|
||||
}
|
||||
return url;
|
||||
return url as Url;
|
||||
}
|
||||
|
||||
export function editNodeUrl(): string {
|
||||
export function editNodeUrl(): Url {
|
||||
return formUrl('update');
|
||||
}
|
||||
|
||||
export function monitoringConfirmUrl(monitoringToken: MonitoringToken): string {
|
||||
return formUrl('monitoring/confirm', {token: monitoringToken.value});
|
||||
export function monitoringConfirmUrl(monitoringToken: MonitoringToken): Url {
|
||||
return formUrl('monitoring/confirm', {token: monitoringToken});
|
||||
}
|
||||
|
||||
export function monitoringDisableUrl(monitoringToken: MonitoringToken): string {
|
||||
return formUrl('monitoring/disable', {token: monitoringToken.value});
|
||||
export function monitoringDisableUrl(monitoringToken: MonitoringToken): Url {
|
||||
return formUrl('monitoring/disable', {token: monitoringToken});
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue