Sqlite upgrade and type refactorings

This commit is contained in:
baldo 2022-07-18 17:49:42 +02:00
parent 01691a0c20
commit 28c8429edd
20 changed files with 873 additions and 663 deletions

View file

@ -12,7 +12,6 @@
* Split into seperate packages for server and frontend. * Split into seperate packages for server and frontend.
* Make admin panel part of new frontend package. * Make admin panel part of new frontend package.
* Get rid of grunt. * Get rid of grunt.
* Bluebird for promises?
* Use generated type guards. * Use generated type guards.
## Mid term ## Mid term

View file

@ -52,7 +52,8 @@
"request": "^2.88.2", "request": "^2.88.2",
"serve-static": "^1.14.1", "serve-static": "^1.14.1",
"sparkson": "^1.3.6", "sparkson": "^1.3.6",
"sqlite": "^3.0.6" "sqlite": "^4.1.1",
"sqlite3": "^5.0.9"
}, },
"devDependencies": { "devDependencies": {
"@types/async": "^3.2.15", "@types/async": "^3.2.15",

View file

@ -9,7 +9,7 @@ import {promises as fs} from "graceful-fs";
import {config} from "./config"; import {config} from "./config";
import type {CleartextPassword, PasswordHash, Username} from "./types"; import type {CleartextPassword, PasswordHash, Username} from "./types";
import {isString, lift2, to} from "./types"; import {isString} from "./types";
import Logger from "./logger"; import Logger from "./logger";
export const app: Express = express(); export const app: Express = express();
@ -17,14 +17,14 @@ export const app: Express = express();
/** /**
* Used to have some password comparison in case the user does not exist to avoid timing attacks. * Used to have some password comparison in case the user does not exist to avoid timing attacks.
*/ */
const INVALID_PASSWORD_HASH: PasswordHash = to("$2b$05$JebmV1q/ySuxa89GoJYlc.6SEnj1OZYBOfTf.TYAehcC5HLeJiWPi"); const INVALID_PASSWORD_HASH: PasswordHash = "$2b$05$JebmV1q/ySuxa89GoJYlc.6SEnj1OZYBOfTf.TYAehcC5HLeJiWPi" as PasswordHash;
/** /**
* Trying to implement a timing safe string compare. * Trying to implement a timing safe string compare.
* *
* TODO: Write tests for timing. * TODO: Write tests for timing.
*/ */
function timingSafeEqual(a: string, b: string): boolean { function timingSafeEqual<T extends string>(a: T, b: T): boolean {
const lenA = a.length; const lenA = a.length;
const lenB = b.length; const lenB = b.length;
@ -32,7 +32,7 @@ function timingSafeEqual(a: string, b: string): boolean {
let different = Math.abs(lenA - lenB); let different = Math.abs(lenA - lenB);
// Make sure b is always the same length as a. Use slice to try avoiding optimizations. // Make sure b is always the same length as a. Use slice to try avoiding optimizations.
b = different === 0 ? b.slice() : a.slice(); b = (different === 0 ? b.slice() : a.slice()) as T;
for (let i = 0; i < lenA; i += 1) { for (let i = 0; i < lenA; i += 1) {
different += Math.abs(a.charCodeAt(i) - b.charCodeAt(i)); different += Math.abs(a.charCodeAt(i) - b.charCodeAt(i));
@ -50,15 +50,15 @@ async function isValidLogin(username: Username, password: CleartextPassword): Pr
// Iterate over all users every time to reduce risk of timing attacks. // Iterate over all users every time to reduce risk of timing attacks.
for (const userConfig of config.server.internal.users) { for (const userConfig of config.server.internal.users) {
if (lift2(timingSafeEqual)(username, userConfig.username)) { if (timingSafeEqual(username, userConfig.username)) {
passwordHash = userConfig.passwordHash; passwordHash = userConfig.passwordHash;
} }
} }
// Always compare some password even if the user does not exist to reduce risk of timing attacks. // Always compare some password even if the user does not exist to reduce risk of timing attacks.
const isValidPassword = await bcrypt.compare( const isValidPassword = await bcrypt.compare(
password.value, password,
passwordHash?.value || INVALID_PASSWORD_HASH.value passwordHash || INVALID_PASSWORD_HASH
); );
// Make sure password is only considered valid is user exists and therefor passwordHash is not undefined. // Make sure password is only considered valid is user exists and therefor passwordHash is not undefined.
@ -74,7 +74,7 @@ export function init(): void {
realm: 'Knotenformular - Intern' realm: 'Knotenformular - Intern'
}, },
function (username: string, password: string, callback: BasicAuthCheckerCallback): void { function (username: string, password: string, callback: BasicAuthCheckerCallback): void {
isValidLogin(to(username), to(password)) isValidLogin(username as Username, password as CleartextPassword)
.then(result => callback(result)) .then(result => callback(result))
.catch(err => { .catch(err => {
Logger.tag('login').error(err); Logger.tag('login').error(err);

View file

@ -3,17 +3,17 @@ import commandLineUsage from "command-line-usage"
import fs from "graceful-fs" import fs from "graceful-fs"
import url from "url" import url from "url"
import {parse} from "sparkson" import {parse} from "sparkson"
import {Config, Version} from "./types" import {Config, Url, Version} from "./types"
// @ts-ignore // @ts-ignore
export let config: Config = {}; export let config: Config = {};
export let version: Version = "unknown"; export let version: Version = "unknown" as Version;
export function parseCommandLine(): void { export function parseCommandLine(): void {
const commandLineDefs = [ const commandLineDefs = [
{ name: 'help', alias: 'h', type: Boolean, description: 'Show this help' }, {name: 'help', alias: 'h', type: Boolean, description: 'Show this help'},
{ name: 'config', alias: 'c', type: String, description: 'Location of config.json' }, {name: 'config', alias: 'c', type: String, description: 'Location of config.json'},
{ name: 'version', alias: 'v', type: Boolean, description: 'Show ffffng version' } {name: 'version', alias: 'v', type: Boolean, description: 'Show ffffng version'}
]; ];
let commandLineOptions; let commandLineOptions;
@ -34,7 +34,7 @@ export function parseCommandLine(): void {
version = JSON.parse(fs.readFileSync(packageJsonFile, 'utf8')).version; version = JSON.parse(fs.readFileSync(packageJsonFile, 'utf8')).version;
} }
function usage () { function usage() {
console.log(commandLineUsage([ console.log(commandLineUsage([
{ {
header: 'ffffng - ' + version + ' - Freifunk node management form', header: 'ffffng - ' + version + ' - Freifunk node management form',
@ -70,8 +70,10 @@ export function parseCommandLine(): void {
config = parse(Config, configJSON); config = parse(Config, configJSON);
function stripTrailingSlash(url: string): string { function stripTrailingSlash(url: Url): Url {
return url.endsWith("/") ? url.substr(0, url.length - 1) : url; return url.endsWith("/")
? url.substr(0, url.length - 1) as Url
: url;
} }
config.server.baseUrl = stripTrailingSlash(config.server.baseUrl); config.server.baseUrl = stripTrailingSlash(config.server.baseUrl);

View file

@ -1,93 +1,48 @@
import {Database, Statement} from "sqlite"; import {RunResult, SqlType, Statement, TypedDatabase} from "../database";
import * as sqlite3 from "sqlite3";
export async function init(): Promise<void> {} export async function init(): Promise<void> {
}
export class MockStatement implements Statement { export class MockDatabase implements TypedDatabase {
constructor() {} constructor() {
readonly changes: number = 0;
readonly lastID: number = 0;
readonly sql: string = "";
async all(): Promise<any[]>;
async all(...params: any[]): Promise<any[]>;
async all<T>(): Promise<T[]>;
async all<T>(...params: any[]): Promise<T[]>;
all(...params: any[]): any {
} }
async bind(): Promise<Statement>; async on(event: string, listener: any): Promise<void> {
async bind(...params: any[]): Promise<Statement>;
async bind(...params: any[]): Promise<Statement> {
return mockStatement();
} }
async each(callback?: (err: Error, row: any) => void): Promise<number>; async run(sql: SqlType, ...params: any[]): Promise<RunResult> {
async each(...params: any[]): Promise<number>; return {
async each(...callback: (((err: Error, row: any) => void) | any)[]): Promise<number> { stmt: new Statement(new sqlite3.Statement()),
};
}
async get<T = any>(sql: SqlType, ...params: any[]): Promise<T | undefined> {
return undefined;
}
async each<T = any>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
async each<T = any>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
async each<T = any>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
async each<T = any>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
async each<T = any>(sql: SqlType, ...params: any[]): Promise<number>;
async each(sql: SqlType, ...callback: (any)[]): Promise<number> {
return 0; return 0;
} }
async finalize(): Promise<void> {} async all<T>(sql: SqlType, ...params: any[]): Promise<T[]> {
get(): Promise<any>;
get(...params: any[]): Promise<any>;
get<T>(): Promise<T>;
get<T>(...params: any[]): Promise<T>;
get(...params: any[]): any {
}
async reset(): Promise<Statement> {
return mockStatement();
}
async run(): Promise<Statement>;
async run(...params: any[]): Promise<Statement>;
async run(...params: any[]): Promise<Statement> {
return mockStatement();
}
}
function mockStatement(): Statement {
return new MockStatement();
}
export class MockDatabase implements Database {
constructor() {}
async close(): Promise<void> {}
async run(...args: any): Promise<Statement> {
return mockStatement();
}
async get(...args: any): Promise<any> {}
async all(...args: any): Promise<any[]> {
return []; return [];
} }
async exec(...args: any): Promise<Database> { async exec(sql: SqlType, ...params: any[]): Promise<void> {
return this;
} }
async each(...args: any): Promise<number> {
return 0; async prepare(sql: SqlType, ...params: any[]): Promise<Statement> {
return new Statement(new sqlite3.Statement());
} }
async prepare(...args: any): Promise<Statement> {
return mockStatement();
}
configure(...args: any): void {}
async migrate(...args: any): Promise<Database> {
return this;
}
on(...args: any): void {}
} }
export const db: MockDatabase = new MockDatabase(); export const db: MockDatabase = new MockDatabase();
export {Database, Statement} export {TypedDatabase, Statement}

View file

@ -4,12 +4,125 @@ import glob from "glob";
import path from "path"; import path from "path";
import {config} from "../config"; import {config} from "../config";
import Logger from "../logger"; import Logger from "../logger";
import sqlite, {Database, Statement} from "sqlite"; import {Database, ISqlite, open, Statement} from "sqlite";
import * as sqlite3 from "sqlite3";
const pglob = util.promisify(glob); const pglob = util.promisify(glob);
const pReadFile = util.promisify(fs.readFile); const pReadFile = util.promisify(fs.readFile);
async function applyPatch(db: sqlite.Database, file: string): Promise<void> { export type RunResult = ISqlite.RunResult;
export type SqlType = ISqlite.SqlType;
export interface TypedDatabase {
/**
* @see Database.on
*/
on(event: string, listener: any): Promise<void>;
/**
* @see Database.run
*/
run(sql: SqlType, ...params: any[]): Promise<RunResult>;
/**
* @see Database.get
*/
get<T>(sql: SqlType, ...params: any[]): Promise<T | undefined>;
/**
* @see Database.each
*/
each<T>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
each<T>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
each<T>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
each<T>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
each<T>(sql: SqlType, ...params: any[]): Promise<number>;
/**
* @see Database.all
*/
all<T = never>(sql: SqlType, ...params: any[]): Promise<T[]>;
/**
* @see Database.exec
*/
exec(sql: SqlType, ...params: any[]): Promise<void>;
/**
* @see Database.prepare
*/
prepare(sql: SqlType, ...params: any[]): Promise<Statement>;
}
/**
* Typesafe database wrapper.
*
* @see Database
*/
class DatabasePromiseWrapper implements TypedDatabase {
private db: Promise<Database>;
constructor() {
this.db = new Promise<Database>((resolve, reject) => {
open({
filename: config.server.databaseFile,
driver: sqlite3.Database,
})
.then(resolve)
.catch(reject);
});
this.db.catch(err => {
Logger.tag('database', 'init').error('Error initializing database: ', err);
process.exit(1);
});
}
async on(event: string, listener: any): Promise<void> {
const db = await this.db;
db.on(event, listener);
}
async run(sql: SqlType, ...params: any[]): Promise<RunResult> {
const db = await this.db;
return db.run(sql, ...params);
}
async get<T>(sql: SqlType, ...params: any[]): Promise<T | undefined> {
const db = await this.db;
return await db.get<T>(sql, ...params);
}
async each<T>(sql: SqlType, callback: (err: any, row: T) => void): Promise<number>;
async each<T>(sql: SqlType, param1: any, callback: (err: any, row: T) => void): Promise<number>;
async each<T>(sql: SqlType, param1: any, param2: any, callback: (err: any, row: T) => void): Promise<number>;
async each<T>(sql: SqlType, param1: any, param2: any, param3: any, callback: (err: any, row: T) => void): Promise<number>;
async each<T>(sql: SqlType, ...params: any[]): Promise<number> {
const db = await this.db;
// @ts-ignore
return await db.each.apply(db, arguments);
}
async all<T>(sql: SqlType, ...params: any[]): Promise<T[]> {
const db = await this.db;
return (await db.all<T[]>(sql, ...params));
}
async exec(sql: SqlType, ...params: any[]): Promise<void> {
const db = await this.db;
return await db.exec(sql, ...params);
}
async prepare(sql: SqlType, ...params: any[]): Promise<Statement> {
const db = await this.db;
return await db.prepare(sql, ...params);
}
}
async function applyPatch(db: TypedDatabase, file: string): Promise<void> {
Logger.tag('database', 'migration').info('Checking if patch need to be applied: %s', file); Logger.tag('database', 'migration').info('Checking if patch need to be applied: %s', file);
const contents = await pReadFile(file); const contents = await pReadFile(file);
@ -23,22 +136,22 @@ async function applyPatch(db: sqlite.Database, file: string): Promise<void> {
} }
const sql = 'BEGIN TRANSACTION;\n' + const sql = 'BEGIN TRANSACTION;\n' +
contents.toString() + '\n' + contents.toString() + '\n' +
'INSERT INTO schema_version (version) VALUES (\'' + version + '\');\n' + 'INSERT INTO schema_version (version) VALUES (\'' + version + '\');\n' +
'END TRANSACTION;'; 'END TRANSACTION;';
await db.exec(sql); await db.exec(sql);
Logger.tag('database', 'migration').info('Patch successfully applied: %s', file); Logger.tag('database', 'migration').info('Patch successfully applied: %s', file);
} }
async function applyMigrations(db: sqlite.Database): Promise<void> { async function applyMigrations(db: TypedDatabase): Promise<void> {
Logger.tag('database', 'migration').info('Migrating database...'); Logger.tag('database', 'migration').info('Migrating database...');
const sql = 'BEGIN TRANSACTION; CREATE TABLE IF NOT EXISTS schema_version (\n' + const sql = 'BEGIN TRANSACTION; CREATE TABLE IF NOT EXISTS schema_version (\n' +
' version VARCHAR(255) PRIMARY KEY ASC,\n' + ' version VARCHAR(255) PRIMARY KEY ASC,\n' +
' applied_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL\n' + ' applied_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL\n' +
'); END TRANSACTION;'; '); END TRANSACTION;';
await db.exec(sql); await db.exec(sql);
@ -48,106 +161,18 @@ async function applyMigrations(db: sqlite.Database): Promise<void> {
} }
} }
const dbPromise = new Promise<Database>((resolve, reject) => { export const db: TypedDatabase = new DatabasePromiseWrapper();
sqlite.open(config.server.databaseFile)
.then(resolve)
.catch(reject);
});
export async function init(): Promise<void> { export async function init(): Promise<void> {
Logger.tag('database').info('Setting up database: %s', config.server.databaseFile); Logger.tag('database').info('Setting up database: %s', config.server.databaseFile);
await db.on('profile', (sql: string, time: number) => Logger.tag('database').profile('[%sms]\t%s', time, sql));
let db: Database;
try {
db = await dbPromise;
}
catch (error) {
Logger.tag('database').error('Error initialzing database:', error);
throw error;
}
db.on('profile', (sql, time) => Logger.tag('database').profile('[%sms]\t%s', time, sql));
try { try {
await applyMigrations(db); await applyMigrations(db);
} } catch (error) {
catch (error) {
Logger.tag('database').error('Error migrating database:', error); Logger.tag('database').error('Error migrating database:', error);
throw error; throw error;
} }
} }
/** export {Statement};
* Wrapper around a Promise<Database> providing the same interface as the Database itself.
*/
class DatabasePromiseWrapper implements Database {
constructor(private db: Promise<Database>) {
db.catch(err => {
Logger.tag('database', 'init').error('Error initializing database: ', err);
process.exit(1);
});
}
async close() {
const db = await this.db;
// @ts-ignore
return await db.close.apply(db, arguments);
}
async run() {
const db = await this.db;
// @ts-ignore
return await db.run.apply(db, arguments);
}
async get() {
const db = await this.db;
// @ts-ignore
return await db.get.apply(db, arguments);
}
async all() {
const db = await this.db;
// @ts-ignore
return await db.all.apply(db, arguments);
}
async exec() {
const db = await this.db;
// @ts-ignore
return await db.exec.apply(db, arguments);
}
async each() {
const db = await this.db;
// @ts-ignore
return await db.each.apply(db, arguments);
}
async prepare() {
const db = await this.db;
// @ts-ignore
return await db.prepare.apply(db, arguments);
}
async configure() {
const db = await this.db;
// @ts-ignore
return await db.configure.apply(db, arguments);
}
async migrate() {
const db = await this.db;
// @ts-ignore
return await db.migrate.apply(db, arguments);
}
async on() {
const db = await this.db;
// @ts-ignore
return await db.on.apply(db, arguments);
}
}
export const db: Database = new DatabasePromiseWrapper(dbPromise);
export {Database, Statement};

View file

@ -2,7 +2,7 @@ import CONSTRAINTS from "../validation/constraints";
import ErrorTypes from "../utils/errorTypes"; import ErrorTypes from "../utils/errorTypes";
import * as MailService from "../services/mailService"; import * as MailService from "../services/mailService";
import * as Resources from "../utils/resources"; import * as Resources from "../utils/resources";
import {normalizeString} from "../utils/strings"; import {normalizeString, parseInteger} from "../utils/strings";
import {forConstraint} from "../validation/validator"; import {forConstraint} from "../validation/validator";
import {Request, Response} from "express"; import {Request, Response} from "express";
import {Mail, MailId} from "../types"; import {Mail, MailId} from "../types";
@ -16,7 +16,7 @@ async function withValidMailId(req: Request): Promise<MailId> {
throw {data: 'Invalid mail id.', type: ErrorTypes.badRequest}; throw {data: 'Invalid mail id.', type: ErrorTypes.badRequest};
} }
return id; return parseInteger(id) as MailId;
} }
async function doGet(req: Request): Promise<Mail> { async function doGet(req: Request): Promise<Mail> {

View file

@ -7,7 +7,7 @@ import * as Resources from "../utils/resources";
import {normalizeString} from "../utils/strings"; import {normalizeString} from "../utils/strings";
import {forConstraint} from "../validation/validator"; import {forConstraint} from "../validation/validator";
import {Request, Response} from "express"; import {Request, Response} from "express";
import {MonitoringToken, to} from "../types"; import {MonitoringToken} from "../types";
const isValidToken = forConstraint(CONSTRAINTS.token, false); const isValidToken = forConstraint(CONSTRAINTS.token, false);
@ -39,7 +39,7 @@ export function confirm(req: Request, res: Response): void {
if (!isValidToken(token)) { if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest}); return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
} }
const validatedToken: MonitoringToken = to(token); const validatedToken: MonitoringToken = token as MonitoringToken;
MonitoringService.confirm(validatedToken) MonitoringService.confirm(validatedToken)
.then(node => Resources.success(res, { .then(node => Resources.success(res, {
@ -59,7 +59,7 @@ export function disable(req: Request, res: Response): void {
if (!isValidToken(token)) { if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest}); return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
} }
const validatedToken: MonitoringToken = to(token); const validatedToken: MonitoringToken = token as MonitoringToken;
MonitoringService.disable(validatedToken) MonitoringService.disable(validatedToken)
.then(node => Resources.success(res, { .then(node => Resources.success(res, {

View file

@ -8,9 +8,8 @@ import * as NodeService from "../services/nodeService";
import {normalizeMac, normalizeString} from "../utils/strings"; import {normalizeMac, normalizeString} from "../utils/strings";
import {forConstraint, forConstraints} from "../validation/validator"; import {forConstraint, forConstraints} from "../validation/validator";
import * as Resources from "../utils/resources"; import * as Resources from "../utils/resources";
import {Entity} from "../utils/resources";
import {Request, Response} from "express"; import {Request, Response} from "express";
import {EnhancedNode, isNodeSortField, MAC, Node, to, Token} from "../types"; import {EnhancedNode, isNodeSortField, MAC, Node, Token} from "../types";
const nodeFields = ['hostname', 'key', 'email', 'nickname', 'mac', 'coords', 'monitoring']; const nodeFields = ['hostname', 'key', 'email', 'nickname', 'mac', 'coords', 'monitoring'];
@ -49,7 +48,7 @@ export function update (req: Request, res: Response): void {
if (!isValidToken(token)) { if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest}); return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
} }
const validatedToken: Token = to(token); const validatedToken: Token = token as Token;
const node = getNormalizedNodeData(data); const node = getNormalizedNodeData(data);
if (!isValidNode(node)) { if (!isValidNode(node)) {
@ -68,7 +67,7 @@ export function remove(req: Request, res: Response): void {
if (!isValidToken(token)) { if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest}); return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
} }
const validatedToken: Token = to(token); const validatedToken: Token = token as Token;
NodeService.deleteNode(validatedToken) NodeService.deleteNode(validatedToken)
.then(() => Resources.success(res, {})) .then(() => Resources.success(res, {}))
@ -80,7 +79,7 @@ export function get(req: Request, res: Response): void {
if (!isValidToken(token)) { if (!isValidToken(token)) {
return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest}); return Resources.error(res, {data: 'Invalid token.', type: ErrorTypes.badRequest});
} }
const validatedToken: Token = to(token); const validatedToken: Token = token as Token;
NodeService.getNodeDataByToken(validatedToken) NodeService.getNodeDataByToken(validatedToken)
.then(node => Resources.success(res, node)) .then(node => Resources.success(res, node))
@ -101,7 +100,7 @@ async function doGetAll(req: Request): Promise<{ total: number; pageNodes: any }
const nodeStateByMac = await MonitoringService.getByMacs(macs); const nodeStateByMac = await MonitoringService.getByMacs(macs);
const enhancedNodes: EnhancedNode[] = _.map(realNodes, (node: Node): EnhancedNode => { const enhancedNodes: EnhancedNode[] = _.map(realNodes, (node: Node): EnhancedNode => {
const nodeState = nodeStateByMac[node.mac.value]; const nodeState = nodeStateByMac[node.mac];
if (nodeState) { if (nodeState) {
return deepExtend({}, node, { return deepExtend({}, node, {
site: nodeState.site, site: nodeState.site,

View file

@ -9,7 +9,29 @@ import Logger from "../logger";
import * as MailTemplateService from "./mailTemplateService"; import * as MailTemplateService from "./mailTemplateService";
import * as Resources from "../utils/resources"; import * as Resources from "../utils/resources";
import {RestParams} from "../utils/resources"; import {RestParams} from "../utils/resources";
import {isMailSortField, Mail, MailData, MailId, MailSortField, MailType} from "../types"; import {
EmailAddress, isJSONObject,
isMailSortField, isMailType, JSONObject,
Mail,
MailData,
MailId,
MailSortField,
MailType,
parseJSON,
UnixTimestampSeconds
} from "../types";
import ErrorTypes from "../utils/errorTypes";
type EmaiQueueRow = {
id: MailId,
created_at: UnixTimestampSeconds,
data: string,
email: string,
failures: number,
modified_at: UnixTimestampSeconds,
recipient: EmailAddress,
sender: EmailAddress,
};
const MAIL_QUEUE_DB_BATCH_SIZE = 50; const MAIL_QUEUE_DB_BATCH_SIZE = 50;
@ -24,7 +46,7 @@ function transporter() {
{ {
transport: 'smtp', transport: 'smtp',
pool: true pool: true
} } as JSONObject
)); ));
MailTemplateService.configureTransporter(transporterSingleton); MailTemplateService.configureTransporter(transporterSingleton);
@ -57,18 +79,29 @@ async function sendMail(options: Mail): Promise<void> {
} }
async function findPendingMailsBefore(beforeMoment: Moment, limit: number): Promise<Mail[]> { async function findPendingMailsBefore(beforeMoment: Moment, limit: number): Promise<Mail[]> {
const rows = await db.all( const rows = await db.all<EmaiQueueRow>(
'SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?', 'SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?',
[beforeMoment.unix(), 5, limit], [beforeMoment.unix(), 5, limit],
); );
return _.map(rows, row => deepExtend( return rows.map(row => {
{}, const mailType = row.email;
row, if (!isMailType(mailType)) {
{ throw new Error(`Invalid mailtype in database: ${mailType}`);
data: JSON.parse(row.data)
} }
)); const data = parseJSON(row.data);
if (!isJSONObject(data)) {
throw new Error(`Invalid email data in database: ${typeof data}`);
}
return {
id: row.id,
email: mailType,
sender: row.sender,
recipient: row.recipient,
data,
failures: row.failures,
};
});
} }
async function removePendingMailFromQueue(id: MailId): Promise<void> { async function removePendingMailFromQueue(id: MailId): Promise<void> {
@ -85,8 +118,7 @@ async function incrementFailureCounterForPendingEmail(id: MailId): Promise<void>
async function sendPendingMail(pendingMail: Mail): Promise<void> { async function sendPendingMail(pendingMail: Mail): Promise<void> {
try { try {
await sendMail(pendingMail); await sendMail(pendingMail);
} } catch (error) {
catch (error) {
// we only log the error and increment the failure counter as we want to continue with pending mails // we only log the error and increment the failure counter as we want to continue with pending mails
Logger.tag('mail', 'queue').error('Error sending pending mail[' + pendingMail.id + ']:', error); Logger.tag('mail', 'queue').error('Error sending pending mail[' + pendingMail.id + ']:', error);
@ -98,10 +130,14 @@ async function sendPendingMail(pendingMail: Mail): Promise<void> {
} }
async function doGetMail(id: MailId): Promise<Mail> { async function doGetMail(id: MailId): Promise<Mail> {
return await db.get('SELECT * FROM email_queue WHERE id = ?', [id]); const row = await db.get<Mail>('SELECT * FROM email_queue WHERE id = ?', [id]);
if (row === undefined) {
throw {data: 'Mail not found.', type: ErrorTypes.notFound};
}
return row;
} }
export async function enqueue (sender: string, recipient: string, email: MailType, data: MailData): Promise<void> { export async function enqueue(sender: string, recipient: string, email: MailType, data: MailData): Promise<void> {
if (!_.isPlainObject(data)) { if (!_.isPlainObject(data)) {
throw new Error('Unexpected data: ' + data); throw new Error('Unexpected data: ' + data);
} }
@ -113,17 +149,17 @@ export async function enqueue (sender: string, recipient: string, email: MailTyp
); );
} }
export async function getMail (id: MailId): Promise<Mail> { export async function getMail(id: MailId): Promise<Mail> {
return await doGetMail(id); return await doGetMail(id);
} }
export async function getPendingMails (restParams: RestParams): Promise<{mails: Mail[], total: number}> { export async function getPendingMails(restParams: RestParams): Promise<{ mails: Mail[], total: number }> {
const row = await db.get( const row = await db.get<{ total: number }>(
'SELECT count(*) AS total FROM email_queue', 'SELECT count(*) AS total FROM email_queue',
[], [],
); );
const total = row.total; const total = row?.total || 0;
const filter = Resources.filterClause( const filter = Resources.filterClause(
restParams, restParams,
@ -143,11 +179,11 @@ export async function getPendingMails (restParams: RestParams): Promise<{mails:
} }
} }
export async function deleteMail (id: MailId): Promise<void> { export async function deleteMail(id: MailId): Promise<void> {
await removePendingMailFromQueue(id); await removePendingMailFromQueue(id);
} }
export async function resetFailures (id: MailId): Promise<Mail> { export async function resetFailures(id: MailId): Promise<Mail> {
const statement = await db.run( const statement = await db.run(
'UPDATE email_queue SET failures = 0, modified_at = ? WHERE id = ?', 'UPDATE email_queue SET failures = 0, modified_at = ? WHERE id = ?',
[moment().unix(), id], [moment().unix(), id],
@ -160,7 +196,7 @@ export async function resetFailures (id: MailId): Promise<Mail> {
return await doGetMail(id); return await doGetMail(id);
} }
export async function sendPendingMails (): Promise<void> { export async function sendPendingMails(): Promise<void> {
Logger.tag('mail', 'queue').debug('Start sending pending mails...'); Logger.tag('mail', 'queue').debug('Start sending pending mails...');
const startTime = moment(); const startTime = moment();

View file

@ -13,7 +13,13 @@ import {MailData, Mail} from "../types";
const templateBasePath = __dirname + '/../mailTemplates'; const templateBasePath = __dirname + '/../mailTemplates';
const snippetsBasePath = templateBasePath + '/snippets'; const snippetsBasePath = templateBasePath + '/snippets';
const templateFunctions: {[key: string]: (...data: MailData) => string} = {}; const templateFunctions: {
[key: string]:
| ((name: string, data: MailData) => string)
| ((data: MailData) => string)
| ((href: string, text: string) => string)
| ((unix: number) => string)
} = {};
function renderSnippet(this: any, name: string, data: MailData): string { function renderSnippet(this: any, name: string, data: MailData): string {
const snippetFile = snippetsBasePath + '/' + name + '.html'; const snippetFile = snippetsBasePath + '/' + name + '.html';

View file

@ -1,6 +1,6 @@
import moment from 'moment'; import moment from 'moment';
import {ParsedNode, parseNode, parseNodesJson, parseTimestamp} from "./monitoringService"; import {ParsedNode, parseNode, parseNodesJson, parseTimestamp} from "./monitoringService";
import {MAC, OnlineState, to} from "../types"; import {Domain, MAC, OnlineState, Site} from "../types";
import Logger from '../logger'; import Logger from '../logger';
import {MockLogger} from "../__mocks__/logger"; import {MockLogger} from "../__mocks__/logger";
@ -44,6 +44,7 @@ test('parseTimestamp() should fail parsing empty timestamp string', () => {
test('parseTimestamp() should fail parsing invalid timestamp string', () => { test('parseTimestamp() should fail parsing invalid timestamp string', () => {
// given // given
// noinspection UnnecessaryLocalVariableJS
const timestamp = TIMESTAMP_INVALID_STRING; const timestamp = TIMESTAMP_INVALID_STRING;
// when // when
@ -240,12 +241,12 @@ test('parseNode() should succeed parsing node without site and domain', () => {
// then // then
const expectedParsedNode: ParsedNode = { const expectedParsedNode: ParsedNode = {
mac: to("12:34:56:78:90:AB"), mac: "12:34:56:78:90:AB" as MAC,
importTimestamp: importTimestamp, importTimestamp: importTimestamp,
state: OnlineState.ONLINE, state: OnlineState.ONLINE,
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING), lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
site: to("<unknown-site>"), site: "<unknown-site>" as Site,
domain: to("<unknown-domain>"), domain: "<unknown-domain>" as Domain,
}; };
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode); expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
}); });
@ -272,12 +273,12 @@ test('parseNode() should succeed parsing node with site and domain', () => {
// then // then
const expectedParsedNode: ParsedNode = { const expectedParsedNode: ParsedNode = {
mac: to("12:34:56:78:90:AB"), mac: "12:34:56:78:90:AB" as MAC,
importTimestamp: importTimestamp, importTimestamp: importTimestamp,
state: OnlineState.ONLINE, state: OnlineState.ONLINE,
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING), lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
site: to("test-site"), site: "test-site" as Site,
domain: to("test-domain") domain: "test-domain" as Domain,
}; };
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode); expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
}); });
@ -461,12 +462,12 @@ test('parseNodesJson() should parse valid nodes', () => {
// then // then
const expectedParsedNode: ParsedNode = { const expectedParsedNode: ParsedNode = {
mac: to("12:34:56:78:90:AB"), mac: "12:34:56:78:90:AB" as MAC,
importTimestamp: parseTimestamp(TIMESTAMP_VALID_STRING), importTimestamp: parseTimestamp(TIMESTAMP_VALID_STRING),
state: OnlineState.ONLINE, state: OnlineState.ONLINE,
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING), lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
site: to("test-site"), site: "test-site" as Site,
domain: to("test-domain"), domain: "test-domain" as Domain,
}; };
expect(result.importTimestamp.isValid()).toBe(true); expect(result.importTimestamp.isValid()).toBe(true);

View file

@ -3,7 +3,7 @@ import moment, {Moment, unitOfTime} from "moment";
import request from "request"; import request from "request";
import {config} from "../config"; import {config} from "../config";
import {db, Statement} from "../db/database"; import {db, RunResult} from "../db/database";
import * as DatabaseUtil from "../utils/databaseUtil"; import * as DatabaseUtil from "../utils/databaseUtil";
import ErrorTypes from "../utils/errorTypes"; import ErrorTypes from "../utils/errorTypes";
import Logger from "../logger"; import Logger from "../logger";
@ -12,14 +12,15 @@ import * as MailService from "../services/mailService";
import * as NodeService from "../services/nodeService"; import * as NodeService from "../services/nodeService";
import * as Resources from "../utils/resources"; import * as Resources from "../utils/resources";
import {RestParams} from "../utils/resources"; import {RestParams} from "../utils/resources";
import {normalizeMac} from "../utils/strings"; import {normalizeMac, parseInteger} from "../utils/strings";
import {monitoringDisableUrl} from "../utils/urlBuilder"; import {monitoringDisableUrl} from "../utils/urlBuilder";
import CONSTRAINTS from "../validation/constraints"; import CONSTRAINTS from "../validation/constraints";
import {forConstraint} from "../validation/validator"; import {forConstraint} from "../validation/validator";
import { import {
Domain, Domain,
equal, Hostname,
isMonitoringSortField, isMonitoringSortField,
isOnlineState,
MAC, MAC,
MailType, MailType,
MonitoringSortField, MonitoringSortField,
@ -29,10 +30,25 @@ import {
NodeStateData, NodeStateData,
OnlineState, OnlineState,
Site, Site,
to,
UnixTimestampSeconds UnixTimestampSeconds
} from "../types"; } from "../types";
type NodeStateRow = {
id: number,
created_at: UnixTimestampSeconds,
domain: Domain | null,
hostname: Hostname | null,
import_timestamp: UnixTimestampSeconds,
last_seen: UnixTimestampSeconds,
last_status_mail_sent: string | null,
last_status_mail_type: string | null,
mac: MAC,
modified_at: UnixTimestampSeconds,
monitoring_state: string | null,
site: Site | null,
state: string,
};
const MONITORING_STATE_MACS_CHUNK_SIZE = 100; const MONITORING_STATE_MACS_CHUNK_SIZE = 100;
const NEVER_ONLINE_NODES_DELETION_CHUNK_SIZE = 20; const NEVER_ONLINE_NODES_DELETION_CHUNK_SIZE = 20;
const MONITORING_MAILS_DB_BATCH_SIZE = 50; const MONITORING_MAILS_DB_BATCH_SIZE = 50;
@ -193,7 +209,7 @@ export function parseNode(importTimestamp: Moment, nodeData: any): ParsedNode {
'Node ' + nodeId + ': Invalid MAC: ' + nodeData.nodeinfo.network.mac 'Node ' + nodeId + ': Invalid MAC: ' + nodeData.nodeinfo.network.mac
); );
} }
const mac = normalizeMac(nodeData.nodeinfo.network.mac); const mac = normalizeMac(nodeData.nodeinfo.network.mac) as MAC;
if (!_.isPlainObject(nodeData.flags)) { if (!_.isPlainObject(nodeData.flags)) {
throw new Error( throw new Error(
@ -214,23 +230,23 @@ export function parseNode(importTimestamp: Moment, nodeData: any): ParsedNode {
); );
} }
let site = null; let site = "<unknown-site>" as Site; // FIXME: Handle this
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.site_code)) { if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.site_code)) {
site = nodeData.nodeinfo.system.site_code; site = nodeData.nodeinfo.system.site_code as Site;
} }
let domain = null; let domain = "<unknown-domain>" as Domain; // FIXME: Handle this
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.domain_code)) { if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.domain_code)) {
domain = nodeData.nodeinfo.system.domain_code; domain = nodeData.nodeinfo.system.domain_code as Domain;
} }
return { return {
mac: to(mac), mac,
importTimestamp: importTimestamp, importTimestamp: importTimestamp,
state: isOnline ? OnlineState.ONLINE : OnlineState.OFFLINE, state: isOnline ? OnlineState.ONLINE : OnlineState.OFFLINE,
lastSeen: lastSeen, lastSeen: lastSeen,
site: to(site || '<unknown-site>'), // FIXME: Handle this site,
domain: to(domain || '<unknown-domain>') // FIXME: Handle this domain,
}; };
} }
@ -279,7 +295,7 @@ export function parseNodesJson(body: string): NodesParsingResult {
return result; return result;
} }
async function updateSkippedNode(id: NodeId, node?: Node): Promise<Statement> { async function updateSkippedNode(id: NodeId, node?: Node): Promise<RunResult> {
return await db.run( return await db.run(
'UPDATE node_state ' + 'UPDATE node_state ' +
'SET hostname = ?, monitoring_state = ?, modified_at = ?' + 'SET hostname = ?, monitoring_state = ?, modified_at = ?' +
@ -352,8 +368,7 @@ async function sendMonitoringMailsBatched(
{ {
node: node, node: node,
lastSeen: nodeState.last_seen, lastSeen: nodeState.last_seen,
disableUrl: monitoringDisableUrl(monitoringToken) disableUrl: monitoringDisableUrl(monitoringToken),
} }
); );
@ -378,7 +393,7 @@ async function sendMonitoringMailsBatched(
async function sendOnlineAgainMails(startTime: Moment): Promise<void> { async function sendOnlineAgainMails(startTime: Moment): Promise<void> {
await sendMonitoringMailsBatched( await sendMonitoringMailsBatched(
'online again', 'online again',
'monitoring-online-again', MailType.MONITORING_ONLINE_AGAIN,
async (): Promise<any[]> => await db.all( async (): Promise<any[]> => await db.all(
'SELECT * FROM node_state ' + 'SELECT * FROM node_state ' +
'WHERE modified_at < ? AND state = ? AND last_status_mail_type IN (' + 'WHERE modified_at < ? AND state = ? AND last_status_mail_type IN (' +
@ -395,10 +410,11 @@ async function sendOnlineAgainMails(startTime: Moment): Promise<void> {
); );
} }
async function sendOfflineMails(startTime: Moment, mailNumber: number): Promise<void> { async function sendOfflineMails(startTime: Moment, mailType: MailType): Promise<void> {
const mailNumber = parseInteger(mailType.split("-")[2]);
await sendMonitoringMailsBatched( await sendMonitoringMailsBatched(
'offline ' + mailNumber, 'offline ' + mailNumber,
'monitoring-offline-' + mailNumber, mailType,
async (): Promise<any[]> => { async (): Promise<any[]> => {
const previousType = const previousType =
mailNumber === 1 ? 'monitoring-online-again' : ('monitoring-offline-' + (mailNumber - 1)); mailNumber === 1 ? 'monitoring-online-again' : ('monitoring-offline-' + (mailNumber - 1));
@ -556,12 +572,12 @@ export async function getAll(restParams: RestParams): Promise<{ total: number, m
const where = Resources.whereCondition(restParams, filterFields); const where = Resources.whereCondition(restParams, filterFields);
const row = await db.get( const row = await db.get<{ total: number }>(
'SELECT count(*) AS total FROM node_state WHERE ' + where.query, 'SELECT count(*) AS total FROM node_state WHERE ' + where.query,
_.concat([], where.params), _.concat([], where.params),
); );
const total = row.total; const total = row?.total || 0;
const filter = Resources.filterClause( const filter = Resources.filterClause(
restParams, restParams,
@ -578,7 +594,7 @@ export async function getAll(restParams: RestParams): Promise<{ total: number, m
return {monitoringStates, total}; return {monitoringStates, total};
} }
export async function getByMacs(macs: MAC[]): Promise<Record<string, NodeStateData>> { export async function getByMacs(macs: MAC[]): Promise<Record<MAC, NodeStateData>> {
if (_.isEmpty(macs)) { if (_.isEmpty(macs)) {
return {}; return {};
} }
@ -588,13 +604,22 @@ export async function getByMacs(macs: MAC[]): Promise<Record<string, NodeStateDa
for (const subMacs of _.chunk(macs, MONITORING_STATE_MACS_CHUNK_SIZE)) { for (const subMacs of _.chunk(macs, MONITORING_STATE_MACS_CHUNK_SIZE)) {
const inCondition = DatabaseUtil.inCondition('mac', subMacs); const inCondition = DatabaseUtil.inCondition('mac', subMacs);
const rows = await db.all( const rows = await db.all<NodeStateRow>(
'SELECT * FROM node_state WHERE ' + inCondition.query, 'SELECT * FROM node_state WHERE ' + inCondition.query,
_.concat([], inCondition.params), _.concat([], inCondition.params),
); );
for (const row of rows) { for (const row of rows) {
nodeStateByMac[row.mac] = row; const onlineState = row.state;
if (!isOnlineState(onlineState)) {
throw new Error(`Invalid online state in database: "${onlineState}"`);
}
nodeStateByMac[row.mac] = {
site: row.site || "<unknown-site>" as Site, // FIXME: Handle this
domain: row.domain || "<unknown-domain>" as Domain, // FIXME: Handle this
state: onlineState,
};
} }
} }
@ -603,7 +628,7 @@ export async function getByMacs(macs: MAC[]): Promise<Record<string, NodeStateDa
export async function confirm(token: MonitoringToken): Promise<Node> { export async function confirm(token: MonitoringToken): Promise<Node> {
const {node, nodeSecrets} = await NodeService.getNodeDataWithSecretsByMonitoringToken(token); const {node, nodeSecrets} = await NodeService.getNodeDataWithSecretsByMonitoringToken(token);
if (!node.monitoring || !nodeSecrets.monitoringToken || !equal(nodeSecrets.monitoringToken, token)) { if (!node.monitoring || !nodeSecrets.monitoringToken || nodeSecrets.monitoringToken !== token) {
throw {data: 'Invalid token.', type: ErrorTypes.badRequest}; throw {data: 'Invalid token.', type: ErrorTypes.badRequest};
} }
@ -619,7 +644,7 @@ export async function confirm(token: MonitoringToken): Promise<Node> {
export async function disable(token: MonitoringToken): Promise<Node> { export async function disable(token: MonitoringToken): Promise<Node> {
const {node, nodeSecrets} = await NodeService.getNodeDataWithSecretsByMonitoringToken(token); const {node, nodeSecrets} = await NodeService.getNodeDataWithSecretsByMonitoringToken(token);
if (!node.monitoring || !nodeSecrets.monitoringToken || !equal(nodeSecrets.monitoringToken, token)) { if (!node.monitoring || !nodeSecrets.monitoringToken || nodeSecrets.monitoringToken !== token) {
throw {data: 'Invalid token.', type: ErrorTypes.badRequest}; throw {data: 'Invalid token.', type: ErrorTypes.badRequest};
} }
@ -654,14 +679,18 @@ export async function sendMonitoringMails(): Promise<void> {
.error('Error sending "online again" mails.', error); .error('Error sending "online again" mails.', error);
} }
for (let mailNumber = 1; mailNumber <= 3; mailNumber++) { for (const mailType of [
MailType.MONITORING_OFFLINE_1,
MailType.MONITORING_OFFLINE_2,
MailType.MONITORING_OFFLINE_3,
]) {
try { try {
await sendOfflineMails(startTime, mailNumber); await sendOfflineMails(startTime, mailType);
} catch (error) { } catch (error) {
// only logging an continuing with next type // only logging an continuing with next type
Logger Logger
.tag('monitoring', 'mail-sending') .tag('monitoring', 'mail-sending')
.error('Error sending "offline ' + mailNumber + '" mails.', error); .error('Error sending "' + mailType + '" mails.', error);
} }
} }
} }
@ -767,7 +796,7 @@ async function deleteNeverOnlineNodesBefore(deleteBefore: UnixTimestampSeconds):
} }
async function deleteNodesOfflineSinceBefore(deleteBefore: UnixTimestampSeconds): Promise<void> { async function deleteNodesOfflineSinceBefore(deleteBefore: UnixTimestampSeconds): Promise<void> {
const rows = await db.all( const rows = await db.all<NodeStateRow>(
'SELECT * FROM node_state WHERE state = ? AND last_seen < ?', 'SELECT * FROM node_state WHERE state = ? AND last_seen < ?',
[ [
'OFFLINE', 'OFFLINE',

View file

@ -11,14 +11,18 @@ import * as MailService from "../services/mailService";
import {normalizeString} from "../utils/strings"; import {normalizeString} from "../utils/strings";
import {monitoringConfirmUrl, monitoringDisableUrl} from "../utils/urlBuilder"; import {monitoringConfirmUrl, monitoringDisableUrl} from "../utils/urlBuilder";
import { import {
Coordinates,
EmailAddress,
FastdKey, FastdKey,
Hostname,
MAC, MAC,
MailType,
MonitoringState, MonitoringState,
MonitoringToken, MonitoringToken,
Nickname,
Node, Node,
NodeSecrets, NodeSecrets,
NodeStatistics, NodeStatistics,
to,
Token, Token,
toUnixTimestampSeconds, toUnixTimestampSeconds,
unhandledEnumField, unhandledEnumField,
@ -60,18 +64,17 @@ enum LINE_PREFIX {
const filenameParts = ['hostname', 'mac', 'key', 'token', 'monitoringToken']; const filenameParts = ['hostname', 'mac', 'key', 'token', 'monitoringToken'];
function generateToken<Type extends { readonly __tag: symbol, value: any } = function generateToken<Type extends string & { readonly __tag: symbol } = never>(): Type {
{ readonly __tag: unique symbol, value: never }>(): Type { return crypto.randomBytes(8).toString('hex') as Type;
return to<Type>(crypto.randomBytes(8).toString('hex'));
} }
function toNodeFilesPattern(filter: NodeFilter): string { function toNodeFilesPattern(filter: NodeFilter): string {
const fields: (string | undefined)[] = [ const fields: (string | undefined)[] = [
filter.hostname, filter.hostname,
filter.mac?.value, filter.mac,
filter.key?.value, filter.key,
filter.token?.value, filter.token,
filter.monitoringToken?.value, filter.monitoringToken,
]; ];
const pattern = fields.map((value) => value || '*').join('@'); const pattern = fields.map((value) => value || '*').join('@');
@ -124,7 +127,7 @@ function isDuplicate(filter: NodeFilter, token: Token | null): boolean {
return true; return true;
} }
return parseNodeFilename(files[0]).token !== token.value; return parseNodeFilename(files[0]).token !== token;
} }
function checkNoDuplicates(token: Token | null, node: Node, nodeSecrets: NodeSecrets): void { function checkNoDuplicates(token: Token | null, node: Node, nodeSecrets: NodeSecrets): void {
@ -169,9 +172,9 @@ function getNodeValue(prefix: LINE_PREFIX, node: Node, nodeSecrets: NodeSecrets)
case LINE_PREFIX.COORDS: case LINE_PREFIX.COORDS:
return node.coords || ""; return node.coords || "";
case LINE_PREFIX.MAC: case LINE_PREFIX.MAC:
return node.mac.value; return node.mac;
case LINE_PREFIX.TOKEN: case LINE_PREFIX.TOKEN:
return node.token.value; return node.token;
case LINE_PREFIX.MONITORING: case LINE_PREFIX.MONITORING:
if (node.monitoring && node.monitoringConfirmed) { if (node.monitoring && node.monitoringConfirmed) {
return "aktiv"; return "aktiv";
@ -180,7 +183,7 @@ function getNodeValue(prefix: LINE_PREFIX, node: Node, nodeSecrets: NodeSecrets)
} }
return ""; return "";
case LINE_PREFIX.MONITORING_TOKEN: case LINE_PREFIX.MONITORING_TOKEN:
return nodeSecrets.monitoringToken?.value || ""; return nodeSecrets.monitoringToken || "";
default: default:
return unhandledEnumField(prefix); return unhandledEnumField(prefix);
} }
@ -255,13 +258,13 @@ async function deleteNodeFile(token: Token): Promise<void> {
} }
class NodeBuilder { class NodeBuilder {
public token: Token = to(""); // FIXME: Either make token optional in Node or handle this! public token: Token = "" as Token; // FIXME: Either make token optional in Node or handle this!
public nickname: string = ""; public nickname: Nickname = "" as Nickname;
public email: string = ""; public email: EmailAddress = "" as EmailAddress;
public hostname: string = ""; // FIXME: Either make hostname optional in Node or handle this! public hostname: Hostname = "" as Hostname; // FIXME: Either make hostname optional in Node or handle this!
public coords?: string; public coords?: Coordinates;
public key?: FastdKey; public key?: FastdKey;
public mac: MAC = to(""); // FIXME: Either make mac optional in Node or handle this! public mac: MAC = "" as MAC; // FIXME: Either make mac optional in Node or handle this!
public monitoring: boolean = false; public monitoring: boolean = false;
public monitoringConfirmed: boolean = false; public monitoringConfirmed: boolean = false;
public monitoringState: MonitoringState = MonitoringState.DISABLED; public monitoringState: MonitoringState = MonitoringState.DISABLED;
@ -291,22 +294,22 @@ class NodeBuilder {
function setNodeValue(prefix: LINE_PREFIX, node: NodeBuilder, nodeSecrets: NodeSecrets, value: string) { function setNodeValue(prefix: LINE_PREFIX, node: NodeBuilder, nodeSecrets: NodeSecrets, value: string) {
switch (prefix) { switch (prefix) {
case LINE_PREFIX.HOSTNAME: case LINE_PREFIX.HOSTNAME:
node.hostname = value; node.hostname = value as Hostname;
break; break;
case LINE_PREFIX.NICKNAME: case LINE_PREFIX.NICKNAME:
node.nickname = value; node.nickname = value as Nickname;
break; break;
case LINE_PREFIX.EMAIL: case LINE_PREFIX.EMAIL:
node.email = value; node.email = value as EmailAddress;
break; break;
case LINE_PREFIX.COORDS: case LINE_PREFIX.COORDS:
node.coords = value; node.coords = value as Coordinates;
break; break;
case LINE_PREFIX.MAC: case LINE_PREFIX.MAC:
node.mac = to(value); node.mac = value as MAC;
break; break;
case LINE_PREFIX.TOKEN: case LINE_PREFIX.TOKEN:
node.token = to(value); node.token = value as Token;
break; break;
case LINE_PREFIX.MONITORING: case LINE_PREFIX.MONITORING:
const active = value === 'aktiv'; const active = value === 'aktiv';
@ -317,7 +320,7 @@ function setNodeValue(prefix: LINE_PREFIX, node: NodeBuilder, nodeSecrets: NodeS
active ? MonitoringState.ACTIVE : (pending ? MonitoringState.PENDING : MonitoringState.DISABLED); active ? MonitoringState.ACTIVE : (pending ? MonitoringState.PENDING : MonitoringState.DISABLED);
break; break;
case LINE_PREFIX.MONITORING_TOKEN: case LINE_PREFIX.MONITORING_TOKEN:
nodeSecrets.monitoringToken = to<MonitoringToken>(value); nodeSecrets.monitoringToken = value as MonitoringToken;
break; break;
default: default:
return unhandledEnumField(prefix); return unhandledEnumField(prefix);
@ -340,7 +343,7 @@ async function parseNodeFile(file: string): Promise<{ node: Node, nodeSecrets: N
for (const line of lines) { for (const line of lines) {
if (line.substring(0, 5) === 'key "') { if (line.substring(0, 5) === 'key "') {
node.key = to<FastdKey>(normalizeString(line.split('"')[1])); node.key = normalizeString(line.split('"')[1]) as FastdKey;
} else { } else {
for (const prefix of Object.values(LINE_PREFIX)) { for (const prefix of Object.values(LINE_PREFIX)) {
if (line.substring(0, prefix.length) === prefix) { if (line.substring(0, prefix.length) === prefix) {
@ -393,7 +396,7 @@ async function sendMonitoringConfirmationMail(node: Node, nodeSecrets: NodeSecre
await MailService.enqueue( await MailService.enqueue(
config.server.email.from, config.server.email.from,
node.nickname + ' <' + node.email + '>', node.nickname + ' <' + node.email + '>',
'monitoring-confirmation', MailType.MONITORING_CONFIRMATION,
{ {
node: node, node: node,
confirmUrl: confirmUrl, confirmUrl: confirmUrl,

View file

@ -1,32 +1,17 @@
import {ArrayField, Field, RawJsonField} from "sparkson" import {ArrayField, Field, RawJsonField} from "sparkson"
import {ClientConfig, to} from "./shared"; import {ClientConfig, JSONObject, Url} from "./shared";
// TODO: Replace string types by more specific types like URL, Password, etc. // TODO: Replace string types by more specific types like URL, Password, etc.
export type Username = { export type Username = string & { readonly __tag: unique symbol };
value: string; export type CleartextPassword = string & { readonly __tag: unique symbol };
readonly __tag: unique symbol export type PasswordHash = string & { readonly __tag: unique symbol };
};
export type CleartextPassword = {
value: string;
readonly __tag: unique symbol
};
export type PasswordHash = {
value: string;
readonly __tag: unique symbol
};
export class UsersConfig { export class UsersConfig {
public username: Username;
public passwordHash: PasswordHash;
constructor( constructor(
@Field("user") username: string, @Field("user") public username: Username,
@Field("passwordHash") passwordHash: string, @Field("passwordHash") public passwordHash: PasswordHash,
) { ) {}
this.username = to(username);
this.passwordHash = to(passwordHash);
}
} }
export class LoggingConfig { export class LoggingConfig {
@ -49,19 +34,19 @@ export class EmailConfig {
@Field("from") public from: string, @Field("from") public from: string,
// For details see: https://nodemailer.com/2-0-0-beta/setup-smtp/ // For details see: https://nodemailer.com/2-0-0-beta/setup-smtp/
@RawJsonField("smtp") public smtp: any, // TODO: Better types! @RawJsonField("smtp") public smtp: JSONObject,
) {} ) {}
} }
export class ServerMapConfig { export class ServerMapConfig {
constructor( constructor(
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: string[], @ArrayField("nodesJsonUrl", String) public nodesJsonUrl: Url[],
) {} ) {}
} }
export class ServerConfig { export class ServerConfig {
constructor( constructor(
@Field("baseUrl") public baseUrl: string, @Field("baseUrl") public baseUrl: Url,
@Field("port") public port: number, @Field("port") public port: number,
@Field("databaseFile") public databaseFile: string, @Field("databaseFile") public databaseFile: string,

View file

@ -1,4 +1,4 @@
import {Domain, MonitoringToken, OnlineState, Site} from "./shared"; import {Domain, EmailAddress, JSONObject, MonitoringToken, OnlineState, Site, toIsEnum} from "./shared";
export * from "./config"; export * from "./config";
export * from "./logger"; export * from "./logger";
@ -15,15 +15,24 @@ export type NodeSecrets = {
monitoringToken?: MonitoringToken, monitoringToken?: MonitoringToken,
}; };
export type MailId = string; export type MailId = number & { readonly __tag: unique symbol };
export type MailData = any; export type MailData = JSONObject;
export type MailType = string;
export enum MailType {
MONITORING_OFFLINE_1 = "monitoring-offline-1",
MONITORING_OFFLINE_2 = "monitoring-offline-2",
MONITORING_OFFLINE_3 = "monitoring-offline-3",
MONITORING_ONLINE_AGAIN = "monitoring-online-again",
MONITORING_CONFIRMATION = "monitoring-confirmation",
}
export const isMailType = toIsEnum(MailType);
export interface Mail { export interface Mail {
id: MailId, id: MailId,
email: MailType, email: MailType,
sender: string, sender: EmailAddress,
recipient: string, recipient: EmailAddress,
data: MailData, data: MailData,
failures: number, failures: number,
} }

View file

@ -1,8 +1,60 @@
import {ArrayField, Field, RawJsonField} from "sparkson"; import {ArrayField, Field, RawJsonField} from "sparkson";
import exp from "constants";
// Types shared with the client. // Types shared with the client.
export type TypeGuard<T> = (arg: unknown) => arg is T; export type TypeGuard<T> = (arg: unknown) => arg is T;
export function parseJSON(str: string): JSONValue {
const json = JSON.parse(str);
if (!isJSONValue(json)) {
throw new Error("Invalid JSON returned. Should never happen.");
}
return json;
}
export type JSONValue =
| null
| string
| number
| boolean
| JSONObject
| JSONArray;
export function isJSONValue(arg: unknown): arg is JSONValue {
return (
arg === null ||
isString(arg) ||
isNumber(arg) ||
isBoolean(arg) ||
isJSONObject(arg) ||
isJSONArray(arg)
);
}
export interface JSONObject {
[x: string]: JSONValue;
}
export function isJSONObject(arg: unknown): arg is JSONObject {
if (!isObject(arg)) {
return false;
}
const obj = arg as object;
for (const [key, value] of Object.entries(obj)) {
if (!isString(key) || !isJSONValue(value)) {
return false;
}
}
return true;
}
export interface JSONArray extends Array<JSONValue> {
}
export const isJSONArray = toIsArray(isJSONValue);
export type EnumValue<E> = E[keyof E]; export type EnumValue<E> = E[keyof E];
export type EnumTypeGuard<E> = TypeGuard<EnumValue<E>>; export type EnumTypeGuard<E> = TypeGuard<EnumValue<E>>;
@ -10,33 +62,10 @@ export function unhandledEnumField(field: never): never {
throw new Error(`Unhandled enum field: ${field}`); throw new Error(`Unhandled enum field: ${field}`);
} }
export function to<Type extends { readonly __tag: symbol, value: any } = { readonly __tag: unique symbol, value: never }>(value: Type['value']): Type {
return value as any as Type;
}
export function lift2<Result, Type extends { readonly __tag: symbol, value: any }>(callback: (a: Type["value"], b: Type["value"]) => Result): (newtype1: Type, newtype2: Type) => Result {
return (a, b) => callback(a.value, b.value);
}
export function equal<Result, Type extends { readonly __tag: symbol, value: any }>(a: Type, b: Type): boolean {
return lift2((a, b) => a === b)(a, b);
}
export function isObject(arg: unknown): arg is object { export function isObject(arg: unknown): arg is object {
return arg !== null && typeof arg === "object"; return arg !== null && typeof arg === "object";
} }
export function toIsNewtype<Type extends { readonly __tag: symbol, value: Value } = { readonly __tag: unique symbol, value: never }, Value = any>(isValue: TypeGuard<Value>): TypeGuard<Type> {
// TODO: Add validation pattern.
return (arg: unknown): arg is Type => {
if (!isObject(arg)) {
return false;
}
const newtype = arg as Type;
return isValue(newtype.value);
}
}
export function isArray<T>(arg: unknown, isT: TypeGuard<T>): arg is Array<T> { export function isArray<T>(arg: unknown, isT: TypeGuard<T>): arg is Array<T> {
if (!Array.isArray(arg)) { if (!Array.isArray(arg)) {
return false; return false;
@ -77,11 +106,15 @@ export function isOptional<T>(arg: unknown, isT: TypeGuard<T>): arg is (T | unde
return arg === undefined || isT(arg); return arg === undefined || isT(arg);
} }
export type Version = string; export type Url = string & { readonly __tag: unique symbol };
export const isUrl = isString;
// Should be good enough for now. export type Version = string & { readonly __tag: unique symbol };
export const isVersion = isString; export const isVersion = isString;
export type EmailAddress = string & { readonly __tag: unique symbol };
export const isEmailAddress = isString;
export type NodeStatistics = { export type NodeStatistics = {
registered: number; registered: number;
withVPN: number; withVPN: number;
@ -119,10 +152,11 @@ export class CommunityConfig {
constructor( constructor(
@Field("name") public name: string, @Field("name") public name: string,
@Field("domain") public domain: string, @Field("domain") public domain: string,
@Field("contactEmail") public contactEmail: string, @Field("contactEmail") public contactEmail: EmailAddress,
@ArrayField("sites", String) public sites: string[], @ArrayField("sites", String) public sites: Site[],
@ArrayField("domains", String) public domains: string[], @ArrayField("domains", String) public domains: Domain[],
) {} ) {
}
} }
export function isCommunityConfig(arg: unknown): arg is CommunityConfig { export function isCommunityConfig(arg: unknown): arg is CommunityConfig {
@ -133,17 +167,18 @@ export function isCommunityConfig(arg: unknown): arg is CommunityConfig {
return ( return (
isString(cfg.name) && isString(cfg.name) &&
isString(cfg.domain) && isString(cfg.domain) &&
isString(cfg.contactEmail) && isEmailAddress(cfg.contactEmail) &&
isArray(cfg.sites, isString) && isArray(cfg.sites, isSite) &&
isArray(cfg.domains, isString) isArray(cfg.domains, isDomain)
); );
} }
export class LegalConfig { export class LegalConfig {
constructor( constructor(
@Field("privacyUrl", true) public privacyUrl?: string, @Field("privacyUrl", true) public privacyUrl?: Url,
@Field("imprintUrl", true) public imprintUrl?: string, @Field("imprintUrl", true) public imprintUrl?: Url,
) {} ) {
}
} }
export function isLegalConfig(arg: unknown): arg is LegalConfig { export function isLegalConfig(arg: unknown): arg is LegalConfig {
@ -152,15 +187,16 @@ export function isLegalConfig(arg: unknown): arg is LegalConfig {
} }
const cfg = arg as LegalConfig; const cfg = arg as LegalConfig;
return ( return (
isOptional(cfg.privacyUrl, isString) && isOptional(cfg.privacyUrl, isUrl) &&
isOptional(cfg.imprintUrl, isString) isOptional(cfg.imprintUrl, isUrl)
); );
} }
export class ClientMapConfig { export class ClientMapConfig {
constructor( constructor(
@Field("mapUrl") public mapUrl: string, @Field("mapUrl") public mapUrl: Url,
) {} ) {
}
} }
export function isClientMapConfig(arg: unknown): arg is ClientMapConfig { export function isClientMapConfig(arg: unknown): arg is ClientMapConfig {
@ -168,13 +204,14 @@ export function isClientMapConfig(arg: unknown): arg is ClientMapConfig {
return false; return false;
} }
const cfg = arg as ClientMapConfig; const cfg = arg as ClientMapConfig;
return isString(cfg.mapUrl); return isUrl(cfg.mapUrl);
} }
export class MonitoringConfig { export class MonitoringConfig {
constructor( constructor(
@Field("enabled") public enabled: boolean, @Field("enabled") public enabled: boolean,
) {} ) {
}
} }
export function isMonitoringConfig(arg: unknown): arg is MonitoringConfig { export function isMonitoringConfig(arg: unknown): arg is MonitoringConfig {
@ -185,43 +222,45 @@ export function isMonitoringConfig(arg: unknown): arg is MonitoringConfig {
return isBoolean(cfg.enabled); return isBoolean(cfg.enabled);
} }
export class Coords { export class CoordinatesConfig {
constructor( constructor(
@Field("lat") public lat: number, @Field("lat") public lat: number,
@Field("lng") public lng: number, @Field("lng") public lng: number,
) {} ) {
}
} }
export function isCoords(arg: unknown): arg is Coords { export function isCoordinatesConfig(arg: unknown): arg is CoordinatesConfig {
if (!isObject(arg)) { if (!isObject(arg)) {
return false; return false;
} }
const coords = arg as Coords; const coords = arg as CoordinatesConfig;
return ( return (
isNumber(coords.lat) && isNumber(coords.lat) &&
isNumber(coords.lng) isNumber(coords.lng)
); );
} }
export class CoordsSelectorConfig { export class CoordinatesSelectorConfig {
constructor( constructor(
@Field("lat") public lat: number, @Field("lat") public lat: number,
@Field("lng") public lng: number, @Field("lng") public lng: number,
@Field("defaultZoom") public defaultZoom: number, @Field("defaultZoom") public defaultZoom: number,
@RawJsonField("layers") public layers: any, // TODO: Better types! @RawJsonField("layers") public layers: JSONObject,
) {} ) {
}
} }
export function isCoordsSelectorConfig(arg: unknown): arg is CoordsSelectorConfig { export function isCoordinatesSelectorConfig(arg: unknown): arg is CoordinatesSelectorConfig {
if (!isObject(arg)) { if (!isObject(arg)) {
return false; return false;
} }
const cfg = arg as CoordsSelectorConfig; const cfg = arg as CoordinatesSelectorConfig;
return ( return (
isNumber(cfg.lat) && isNumber(cfg.lat) &&
isNumber(cfg.lng) && isNumber(cfg.lng) &&
isNumber(cfg.defaultZoom) && isNumber(cfg.defaultZoom) &&
isObject(cfg.layers) // TODO: Better types! isJSONObject(cfg.layers)
); );
} }
@ -229,8 +268,9 @@ export class OtherCommunityInfoConfig {
constructor( constructor(
@Field("showInfo") public showInfo: boolean, @Field("showInfo") public showInfo: boolean,
@Field("showBorderForDebugging") public showBorderForDebugging: boolean, @Field("showBorderForDebugging") public showBorderForDebugging: boolean,
@ArrayField("localCommunityPolygon", Coords) public localCommunityPolygon: Coords[], @ArrayField("localCommunityPolygon", CoordinatesConfig) public localCommunityPolygon: CoordinatesConfig[],
) {} ) {
}
} }
export function isOtherCommunityInfoConfig(arg: unknown): arg is OtherCommunityInfoConfig { export function isOtherCommunityInfoConfig(arg: unknown): arg is OtherCommunityInfoConfig {
@ -241,7 +281,7 @@ export function isOtherCommunityInfoConfig(arg: unknown): arg is OtherCommunityI
return ( return (
isBoolean(cfg.showInfo) && isBoolean(cfg.showInfo) &&
isBoolean(cfg.showBorderForDebugging) && isBoolean(cfg.showBorderForDebugging) &&
isArray(cfg.localCommunityPolygon, isCoords) isArray(cfg.localCommunityPolygon, isCoordinatesConfig)
); );
} }
@ -251,7 +291,7 @@ export class ClientConfig {
@Field("legal") public legal: LegalConfig, @Field("legal") public legal: LegalConfig,
@Field("map") public map: ClientMapConfig, @Field("map") public map: ClientMapConfig,
@Field("monitoring") public monitoring: MonitoringConfig, @Field("monitoring") public monitoring: MonitoringConfig,
@Field("coordsSelector") public coordsSelector: CoordsSelectorConfig, @Field("coordsSelector") public coordsSelector: CoordinatesSelectorConfig,
@Field("otherCommunityInfo") public otherCommunityInfo: OtherCommunityInfoConfig, @Field("otherCommunityInfo") public otherCommunityInfo: OtherCommunityInfoConfig,
@Field("rootPath", true, undefined, "/") public rootPath: string, @Field("rootPath", true, undefined, "/") public rootPath: string,
) { ) {
@ -268,42 +308,33 @@ export function isClientConfig(arg: unknown): arg is ClientConfig {
isLegalConfig(cfg.legal) && isLegalConfig(cfg.legal) &&
isClientMapConfig(cfg.map) && isClientMapConfig(cfg.map) &&
isMonitoringConfig(cfg.monitoring) && isMonitoringConfig(cfg.monitoring) &&
isCoordsSelectorConfig(cfg.coordsSelector) && isCoordinatesSelectorConfig(cfg.coordsSelector) &&
isOtherCommunityInfoConfig(cfg.otherCommunityInfo) && isOtherCommunityInfoConfig(cfg.otherCommunityInfo) &&
isString(cfg.rootPath) isString(cfg.rootPath)
); );
} }
// TODO: Token type. // TODO: Token type.
export type Token = { export type Token = string & { readonly __tag: unique symbol };
value: string; export const isToken = isString;
readonly __tag: unique symbol
};
export const isToken = toIsNewtype<Token>(isString);
export type FastdKey = { export type FastdKey = string & { readonly __tag: unique symbol };
value: string; export const isFastdKey = isString;
readonly __tag: unique symbol
};
export const isFastdKey = toIsNewtype<FastdKey>(isString);
export type MAC = { export type MAC = string & { readonly __tag: unique symbol };
value: string; export const isMAC = isString;
readonly __tag: unique symbol
};
export const isMAC = toIsNewtype<MAC>(isString);
export type UnixTimestampSeconds = number & { readonly __tag: unique symbol }; export type UnixTimestampSeconds = number & { readonly __tag: unique symbol };
export const isUnixTimestampSeconds = isNumber;
export type UnixTimestampMilliseconds = number & { readonly __tag: unique symbol }; export type UnixTimestampMilliseconds = number & { readonly __tag: unique symbol };
export const isUnixTimestampMilliseconds = isNumber;
export function toUnixTimestampSeconds(ms: UnixTimestampMilliseconds): UnixTimestampSeconds { export function toUnixTimestampSeconds(ms: UnixTimestampMilliseconds): UnixTimestampSeconds {
return Math.floor(ms) as UnixTimestampSeconds; return Math.floor(ms) as UnixTimestampSeconds;
} }
export type MonitoringToken = { export type MonitoringToken = string & { readonly __tag: unique symbol };
value: string;
readonly __tag: unique symbol
};
export enum MonitoringState { export enum MonitoringState {
ACTIVE = "active", ACTIVE = "active",
@ -313,25 +344,31 @@ export enum MonitoringState {
export const isMonitoringState = toIsEnum(MonitoringState); export const isMonitoringState = toIsEnum(MonitoringState);
export type NodeId = { export type NodeId = string & { readonly __tag: unique symbol };
value: string;
readonly __tag: unique symbol export type Hostname = string & { readonly __tag: unique symbol };
}; export const isHostname = isString;
export type Nickname = string & { readonly __tag: unique symbol };
export const isNickname = isString;
export type Coordinates = string & { readonly __tag: unique symbol };
export const isCoordinates = isString;
// TODO: More Newtypes // TODO: More Newtypes
export interface Node { export type Node = {
token: Token; token: Token;
nickname: string; nickname: Nickname;
email: string; email: EmailAddress;
hostname: string; hostname: Hostname;
coords?: string; // TODO: Use object with longitude and latitude. coords?: Coordinates;
key?: FastdKey; key?: FastdKey;
mac: MAC; mac: MAC;
monitoring: boolean; monitoring: boolean;
monitoringConfirmed: boolean; monitoringConfirmed: boolean;
monitoringState: MonitoringState; monitoringState: MonitoringState;
modifiedAt: UnixTimestampSeconds; modifiedAt: UnixTimestampSeconds;
} };
export function isNode(arg: unknown): arg is Node { export function isNode(arg: unknown): arg is Node {
if (!isObject(arg)) { if (!isObject(arg)) {
@ -340,16 +377,16 @@ export function isNode(arg: unknown): arg is Node {
const node = arg as Node; const node = arg as Node;
return ( return (
isToken(node.token) && isToken(node.token) &&
isString(node.nickname) && isNickname(node.nickname) &&
isString(node.email) && isEmailAddress(node.email) &&
isString(node.hostname) && isHostname(node.hostname) &&
isOptional(node.coords, isString) && isOptional(node.coords, isCoordinates) &&
isOptional(node.key, isFastdKey) && isOptional(node.key, isFastdKey) &&
isMAC(node.mac) && isMAC(node.mac) &&
isBoolean(node.monitoring) && isBoolean(node.monitoring) &&
isBoolean(node.monitoringConfirmed) && isBoolean(node.monitoringConfirmed) &&
isMonitoringState(node.monitoringState) && isMonitoringState(node.monitoringState) &&
isNumber(node.modifiedAt) isUnixTimestampSeconds(node.modifiedAt)
); );
} }
@ -360,17 +397,11 @@ export enum OnlineState {
export const isOnlineState = toIsEnum(OnlineState); export const isOnlineState = toIsEnum(OnlineState);
export type Site = { export type Site = string & { readonly __tag: unique symbol };
value: string; export const isSite = isString;
readonly __tag: unique symbol
};
export const isSite = toIsNewtype<Site>(isString);
export type Domain = { export type Domain = string & { readonly __tag: unique symbol };
value: string; export const isDomain = isString;
readonly __tag: unique symbol
};
export const isDomain = toIsNewtype<Domain>(isString);
export interface EnhancedNode extends Node { export interface EnhancedNode extends Node {
site?: Site, site?: Site,

View file

@ -17,7 +17,11 @@ export function normalizeMac (mac: string): string {
return macParts.join(':'); return macParts.join(':');
} }
export function parseInteger (str: string): number | undefined { export function parseInteger (str: string): number {
const parsed = _.parseInt(str, 10); const parsed = _.parseInt(str, 10);
return parsed.toString() === str ? parsed : undefined; if (parsed.toString() === str) {
return parsed;
} else {
throw new SyntaxError(`String does not represent a valid integer: "${str}"`);
}
} }

View file

@ -1,11 +1,9 @@
import _ from "lodash" import _ from "lodash"
import {config} from "../config" import {config} from "../config"
import {MonitoringToken} from "../types" import {MonitoringToken, Url} from "../types"
// TODO: Typed URLs function formUrl(route: string, queryParams?: { [key: string]: string }): Url {
let url = config.server.baseUrl as string;
function formUrl(route: string, queryParams?: { [key: string]: string }): string {
let url = config.server.baseUrl;
if (route || queryParams) { if (route || queryParams) {
url += '/#/'; url += '/#/';
} }
@ -24,17 +22,17 @@ function formUrl(route: string, queryParams?: { [key: string]: string }): string
'&' '&'
); );
} }
return url; return url as Url;
} }
export function editNodeUrl(): string { export function editNodeUrl(): Url {
return formUrl('update'); return formUrl('update');
} }
export function monitoringConfirmUrl(monitoringToken: MonitoringToken): string { export function monitoringConfirmUrl(monitoringToken: MonitoringToken): Url {
return formUrl('monitoring/confirm', {token: monitoringToken.value}); return formUrl('monitoring/confirm', {token: monitoringToken});
} }
export function monitoringDisableUrl(monitoringToken: MonitoringToken): string { export function monitoringDisableUrl(monitoringToken: MonitoringToken): Url {
return formUrl('monitoring/disable', {token: monitoringToken.value}); return formUrl('monitoring/disable', {token: monitoringToken});
} }

583
yarn.lock

File diff suppressed because it is too large Load diff