Migrate to version 2 of nodes.json and start adding tests
This commit is contained in:
parent
8e7b02e56d
commit
fb87695b3e
23 changed files with 7352 additions and 1228 deletions
|
@ -153,7 +153,7 @@ Dann die `config.json` anpassen nach belieben. Es gibt die folgenden Konfigurati
|
|||
}
|
||||
```
|
||||
|
||||
* **`server.map.nodesJsonUrl`** URL der `nodes.json` des meshviewers (kann eine URL oder eine Liste von URLs sein), z. B.: `["http://musterstadt.freifunk.net/nodes.json"]`
|
||||
* **`server.map.nodesJsonUrl`** URL der `nodes.json` von hopglass (Format muss Version 2 sein), z. B.: `["http://musterstadt.freifunk.net/nodes.json"]`
|
||||
|
||||
* **`client.community.name`** Name der Freifunk-Community, z. B.: `"Freifunk Musterstadt"`
|
||||
* **`client.community.domain`** Domain der Freifunk-Community, z. B.: `"musterstadt.freifunk.net"`
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
## TODO
|
||||
|
||||
* Test email rendering!
|
||||
* Get rid of unhandled promise rejection warnings in tests.
|
||||
|
||||
## Short term
|
||||
|
||||
|
|
10
jest.server.config.js
Normal file
10
jest.server.config.js
Normal file
|
@ -0,0 +1,10 @@
|
|||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
roots: ['<rootDir>/server'],
|
||||
globals: {
|
||||
'ts-jest': {
|
||||
tsConfig: '<rootDir>/server/tsconfig.json'
|
||||
}
|
||||
}
|
||||
};
|
7060
package-lock.json
generated
7060
package-lock.json
generated
File diff suppressed because it is too large
Load diff
10
package.json
10
package.json
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "ffffng",
|
||||
"version": "0.13.4",
|
||||
"version": "0.13.5-SNAPSHOT",
|
||||
"description": "Node management form for Freifunk",
|
||||
"license": "MIT",
|
||||
"author": {
|
||||
|
@ -19,9 +19,12 @@
|
|||
"ffffng": "server/main.js"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "npm run server:test",
|
||||
"build": "npm run server:build && grunt build",
|
||||
"clean": "rm -rf server-build/ && grunt clean",
|
||||
"dist": "npm run clean && npm run build && npm run test",
|
||||
"client:serve": "grunt serve",
|
||||
"server:test": "jest --config=jest.server.config.js",
|
||||
"server:build": "tsc -b server && ln -sfv ../server/db/patches ./server-build/db/ && ln -sfv ../server/templates ./server-build/ && ln -sfv ../server/mailTemplates ./server-build/",
|
||||
"server:run": "npm run server:build && node server-build/main.js"
|
||||
},
|
||||
|
@ -61,6 +64,7 @@
|
|||
"@types/glob": "^7.1.1",
|
||||
"@types/graceful-fs": "^4.1.3",
|
||||
"@types/html-to-text": "^1.4.31",
|
||||
"@types/jest": "^26.0.3",
|
||||
"@types/lodash": "^4.14.149",
|
||||
"@types/node": "^13.11.0",
|
||||
"@types/node-cron": "^2.0.3",
|
||||
|
@ -95,10 +99,12 @@
|
|||
"grunt-usemin": "^3.1.1",
|
||||
"grunt-wiredep": "^3.0.1",
|
||||
"imagemin-gifsicle": "^6.0.1",
|
||||
"jest": "^26.1.0",
|
||||
"jshint-stylish": "^2.2.1",
|
||||
"load-grunt-tasks": "^5.1.0",
|
||||
"time-grunt": "^2.0.0",
|
||||
"typescript": "^3.8.3"
|
||||
"ts-jest": "^26.1.1",
|
||||
"typescript": "^3.9.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
|
|
|
@ -41,8 +41,7 @@ fi
|
|||
echo
|
||||
|
||||
if confirm "Continue publishing?"; then
|
||||
npm run clean
|
||||
npm run build
|
||||
npm run dist
|
||||
|
||||
cd dist
|
||||
npm publish
|
||||
|
|
167
server/__mocks__/logger.test.ts
Normal file
167
server/__mocks__/logger.test.ts
Normal file
|
@ -0,0 +1,167 @@
|
|||
import {MockLogger} from "./logger";
|
||||
|
||||
test("should reset single message", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("test").debug("message");
|
||||
logger.reset();
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "test")).toEqual([]);
|
||||
});
|
||||
|
||||
test("should reset multiple messages", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("test").debug("message 1");
|
||||
logger.tag("test").debug("message 2");
|
||||
logger.reset();
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "test")).toEqual([]);
|
||||
});
|
||||
|
||||
test("should reset multiple nested messages", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("foo", "bar").debug("message 1");
|
||||
logger.tag("foo", "bar").debug("message 2");
|
||||
logger.tag("foo").debug("message 3");
|
||||
logger.tag("baz").debug("message 4");
|
||||
logger.tag("baz").debug("message 5");
|
||||
logger.reset();
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([]);
|
||||
expect(logger.getMessages("debug", "foo")).toEqual([]);
|
||||
expect(logger.getMessages("debug", "baz")).toEqual([]);
|
||||
});
|
||||
|
||||
test("should not get messages without logging", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug")).toEqual([]);
|
||||
expect(logger.getMessages("debug", "foo")).toEqual([]);
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([]);
|
||||
});
|
||||
|
||||
test("should not get messages for no tag", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("test").debug("message");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug")).toEqual([]);
|
||||
});
|
||||
|
||||
test("should not get messages for wrong single tag", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("foo").debug("message");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "bar")).toEqual([]);
|
||||
});
|
||||
|
||||
test("should not get messages for wrong tags", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("foo", "bar").debug("message");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "baz")).toEqual([]);
|
||||
expect(logger.getMessages("debug", "foo", "baz")).toEqual([]);
|
||||
});
|
||||
|
||||
test("should not get messages for wrong level", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("foo", "bar").debug("message");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("info", "foo", "bar")).toEqual([]);
|
||||
});
|
||||
|
||||
test("should get messages for no tag", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag().debug("message");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug")).toEqual([["message"]]);
|
||||
});
|
||||
|
||||
test("should get messages for single tag", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("test").debug("message");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "test")).toEqual([["message"]]);
|
||||
});
|
||||
|
||||
test("should get messages for multiple tags", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("foo", "bar").debug("message");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([["message"]]);
|
||||
});
|
||||
|
||||
test("should get messages for correct tags", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("foo", "bar").debug("message 1");
|
||||
logger.tag("foo", "baz").debug("message 2");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([["message 1"]]);
|
||||
});
|
||||
|
||||
test("should get multiple messages", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("foo", "bar").debug("message 1");
|
||||
logger.tag("foo", "bar").debug("message 2");
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([["message 1"], ["message 2"]]);
|
||||
});
|
||||
|
||||
test("should get complex message", () => {
|
||||
// given
|
||||
const logger = new MockLogger();
|
||||
|
||||
// when
|
||||
logger.tag("foo", "bar").debug("message", 1, false, {});
|
||||
|
||||
// then
|
||||
expect(logger.getMessages("debug", "foo", "bar")).toEqual([["message", 1, false, {}]]);
|
||||
});
|
73
server/__mocks__/logger.ts
Normal file
73
server/__mocks__/logger.ts
Normal file
|
@ -0,0 +1,73 @@
|
|||
import {Logger, TaggedLogger} from "../types";
|
||||
|
||||
type LogLevel = 'debug' | 'info' | 'warn' | 'error' | 'profile';
|
||||
|
||||
export type MockLogMessages = any[][];
|
||||
type TaggedLogMessages = {
|
||||
tags: {[key: string]: TaggedLogMessages},
|
||||
logs: {[key: string]: MockLogMessages}
|
||||
}
|
||||
|
||||
export class MockLogger implements Logger {
|
||||
private taggedLogMessages: TaggedLogMessages = MockLogger.emptyTaggedLogMessages();
|
||||
|
||||
constructor() {}
|
||||
|
||||
private static emptyTaggedLogMessages(): TaggedLogMessages {
|
||||
return {
|
||||
tags: {},
|
||||
logs: {}
|
||||
};
|
||||
}
|
||||
|
||||
reset(): void {
|
||||
this.taggedLogMessages = MockLogger.emptyTaggedLogMessages();
|
||||
}
|
||||
|
||||
getMessages(level: LogLevel, ...tags: string[]): MockLogMessages {
|
||||
let taggedLogMessages = this.taggedLogMessages;
|
||||
for (const tag of tags) {
|
||||
if (!taggedLogMessages.tags[tag]) {
|
||||
return [];
|
||||
}
|
||||
|
||||
taggedLogMessages = taggedLogMessages.tags[tag];
|
||||
}
|
||||
|
||||
return taggedLogMessages.logs[level] || [];
|
||||
}
|
||||
|
||||
init(): void {}
|
||||
|
||||
private doLog(taggedLogMessages: TaggedLogMessages, level: LogLevel, tags: string[], args: any[]): void {
|
||||
if (tags.length > 0) {
|
||||
const tag = tags[0];
|
||||
const remainingTags = tags.slice(1);
|
||||
const subTaggedLogsMessages: TaggedLogMessages =
|
||||
taggedLogMessages.tags[tag] || MockLogger.emptyTaggedLogMessages();
|
||||
this.doLog(subTaggedLogsMessages, level, remainingTags, args);
|
||||
taggedLogMessages.tags[tag] = subTaggedLogsMessages;
|
||||
|
||||
} else {
|
||||
const logMessages: MockLogMessages = taggedLogMessages.logs[level] || [];
|
||||
logMessages.push(args);
|
||||
taggedLogMessages.logs[level] = logMessages;
|
||||
}
|
||||
}
|
||||
|
||||
private log(level: LogLevel, tags: string[], args: any[]): void {
|
||||
this.doLog(this.taggedLogMessages, level, tags, args);
|
||||
}
|
||||
|
||||
tag(...tags: string[]): TaggedLogger {
|
||||
return {
|
||||
debug: (...args: any[]): void => this.log('debug', tags, args),
|
||||
info: (...args: any[]): void => this.log('info', tags, args),
|
||||
warn: (...args: any[]): void => this.log('warn', tags, args),
|
||||
error: (...args: any[]): void => this.log('error', tags, args),
|
||||
profile: (...args: any[]): void => this.log('profile', tags, args),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new MockLogger();
|
|
@ -7,60 +7,60 @@ import {promises as fs} from "graceful-fs"
|
|||
|
||||
import {config} from "./config";
|
||||
|
||||
const app: Express = express();
|
||||
export const app: Express = express();
|
||||
|
||||
const router = express.Router();
|
||||
export function init(): void {
|
||||
const router = express.Router();
|
||||
|
||||
// urls beneath /internal are protected
|
||||
const internalAuth = auth.basic(
|
||||
{
|
||||
realm: 'Knotenformular - Intern'
|
||||
},
|
||||
function (username: string, password: string, callback: BasicAuthCheckerCallback): void {
|
||||
callback(
|
||||
config.server.internal.active &&
|
||||
username === config.server.internal.user &&
|
||||
password === config.server.internal.password
|
||||
);
|
||||
// urls beneath /internal are protected
|
||||
const internalAuth = auth.basic(
|
||||
{
|
||||
realm: 'Knotenformular - Intern'
|
||||
},
|
||||
function (username: string, password: string, callback: BasicAuthCheckerCallback): void {
|
||||
callback(
|
||||
config.server.internal.active &&
|
||||
username === config.server.internal.user &&
|
||||
password === config.server.internal.password
|
||||
);
|
||||
}
|
||||
);
|
||||
router.use('/internal', auth.connect(internalAuth));
|
||||
|
||||
router.use(bodyParser.json());
|
||||
router.use(bodyParser.urlencoded({ extended: true }));
|
||||
|
||||
const adminDir = __dirname + '/../admin';
|
||||
const clientDir = __dirname + '/../client';
|
||||
const templateDir = __dirname + '/templates';
|
||||
|
||||
const jsTemplateFiles = [
|
||||
'/config.js'
|
||||
];
|
||||
|
||||
function usePromise(f: (req: Request, res: Response) => Promise<void>): void {
|
||||
router.use((req: Request, res: Response, next: NextFunction): void => {
|
||||
f(req, res).then(next).catch(next)
|
||||
});
|
||||
}
|
||||
);
|
||||
router.use('/internal', auth.connect(internalAuth));
|
||||
|
||||
router.use(bodyParser.json());
|
||||
router.use(bodyParser.urlencoded({ extended: true }));
|
||||
router.use(compress());
|
||||
|
||||
const adminDir = __dirname + '/../admin';
|
||||
const clientDir = __dirname + '/../client';
|
||||
const templateDir = __dirname + '/templates';
|
||||
async function serveTemplate (mimeType: string, req: Request, res: Response): Promise<void> {
|
||||
const body = await fs.readFile(templateDir + '/' + req.path + '.template', 'utf8');
|
||||
|
||||
const jsTemplateFiles = [
|
||||
'/config.js'
|
||||
];
|
||||
res.writeHead(200, { 'Content-Type': mimeType });
|
||||
res.end(_.template(body)({ config: config.client }));
|
||||
}
|
||||
|
||||
function usePromise(f: (req: Request, res: Response) => Promise<void>): void {
|
||||
router.use((req: Request, res: Response, next: NextFunction): void => {
|
||||
f(req, res).then(next).catch(next)
|
||||
usePromise(async (req: Request, res: Response): Promise<void> => {
|
||||
if (jsTemplateFiles.indexOf(req.path) >= 0) {
|
||||
await serveTemplate('application/javascript', req, res);
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/internal/admin', express.static(adminDir + '/'));
|
||||
router.use('/', express.static(clientDir + '/'));
|
||||
|
||||
app.use(config.server.rootPath, router);
|
||||
}
|
||||
|
||||
router.use(compress());
|
||||
|
||||
async function serveTemplate (mimeType: string, req: Request, res: Response): Promise<void> {
|
||||
const body = await fs.readFile(templateDir + '/' + req.path + '.template', 'utf8');
|
||||
|
||||
res.writeHead(200, { 'Content-Type': mimeType });
|
||||
res.end(_.template(body)({ config: config.client }));
|
||||
}
|
||||
|
||||
usePromise(async (req: Request, res: Response): Promise<void> => {
|
||||
if (jsTemplateFiles.indexOf(req.path) >= 0) {
|
||||
await serveTemplate('application/javascript', req, res);
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/internal/admin', express.static(adminDir + '/'));
|
||||
router.use('/', express.static(clientDir + '/'));
|
||||
|
||||
app.use(config.server.rootPath, router);
|
||||
|
||||
export default app;
|
||||
|
|
143
server/config.ts
143
server/config.ts
|
@ -2,131 +2,14 @@ import commandLineArgs from "command-line-args"
|
|||
import commandLineUsage from "command-line-usage"
|
||||
import fs from "graceful-fs"
|
||||
import url from "url"
|
||||
import {ArrayField, Field, parse, RawJsonField} from "sparkson"
|
||||
import {parse} from "sparkson"
|
||||
import {Config, Version} from "./types"
|
||||
|
||||
// TODO: Replace string types by more specific types like URL, Password, etc.
|
||||
// @ts-ignore
|
||||
export let config: Config = {};
|
||||
export let version: Version = "unknown";
|
||||
|
||||
export class LoggingConfig {
|
||||
constructor(
|
||||
@Field("directory") public directory: string,
|
||||
@Field("debug") public debug: boolean,
|
||||
@Field("profile") public profile: boolean,
|
||||
@Field("logRequests") public logRequests: boolean,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class InternalConfig {
|
||||
constructor(
|
||||
@Field("active") public active: boolean,
|
||||
@Field("user") public user: string,
|
||||
@Field("password") public password: string,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class EmailConfig {
|
||||
constructor(
|
||||
@Field("from") public from: string,
|
||||
|
||||
// For details see: https://nodemailer.com/2-0-0-beta/setup-smtp/
|
||||
@RawJsonField("smtp") public smtp: any, // TODO: Better types!
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ServerMapConfig {
|
||||
constructor(
|
||||
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: string[],
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ServerConfig {
|
||||
constructor(
|
||||
@Field("baseUrl") public baseUrl: string,
|
||||
@Field("port") public port: number,
|
||||
|
||||
@Field("databaseFile") public databaseFile: string,
|
||||
@Field("peersPath") public peersPath: string,
|
||||
|
||||
@Field("logging") public logging: LoggingConfig,
|
||||
@Field("internal") public internal: InternalConfig,
|
||||
@Field("email") public email: EmailConfig,
|
||||
@Field("map") public map: ServerMapConfig,
|
||||
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class CommunityConfig {
|
||||
constructor(
|
||||
@Field("name") public name: string,
|
||||
@Field("domain") public domain: string,
|
||||
@Field("contactEmail") public contactEmail: string,
|
||||
@ArrayField("sites", String) public sites: string[],
|
||||
@ArrayField("domains", String) public domains: string[],
|
||||
) {}
|
||||
}
|
||||
|
||||
export class LegalConfig {
|
||||
constructor(
|
||||
@Field("privacyUrl", true) public privacyUrl?: string,
|
||||
@Field("imprintUrl", true) public imprintUrl?: string,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ClientMapConfig {
|
||||
constructor(
|
||||
@Field("mapUrl") public mapUrl: string,
|
||||
) {}
|
||||
}
|
||||
export class MonitoringConfig {
|
||||
constructor(
|
||||
@Field("enabled") public enabled: boolean,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class Coords {
|
||||
constructor(
|
||||
@Field("lat") public lat: number,
|
||||
@Field("lng") public lng: number,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class CoordsSelectorConfig {
|
||||
constructor(
|
||||
@Field("lat") public lat: number,
|
||||
@Field("lng") public lng: number,
|
||||
@Field("defaultZoom") public defaultZoom: number,
|
||||
@RawJsonField("layers") public layers: any, // TODO: Better types!
|
||||
) {}
|
||||
}
|
||||
|
||||
export class OtherCommunityInfoConfig {
|
||||
constructor(
|
||||
@Field("showInfo") public showInfo: boolean,
|
||||
@Field("showBorderForDebugging") public showBorderForDebugging: boolean,
|
||||
@ArrayField("localCommunityPolygon", Coords) public localCommunityPolygon: Coords[],
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ClientConfig {
|
||||
constructor(
|
||||
@Field("community") public community: CommunityConfig,
|
||||
@Field("legal") public legal: LegalConfig,
|
||||
@Field("map") public map: ClientMapConfig,
|
||||
@Field("monitoring") public monitoring: MonitoringConfig,
|
||||
@Field("coordsSelector") public coordsSelector: CoordsSelectorConfig,
|
||||
@Field("otherCommunityInfo") public otherCommunityInfo: OtherCommunityInfoConfig,
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class Config {
|
||||
constructor(
|
||||
@Field("server") public server: ServerConfig,
|
||||
@Field("client") public client: ClientConfig
|
||||
) {}
|
||||
}
|
||||
|
||||
function parseCommandLine(): {config: Config, version: string} {
|
||||
export function parseCommandLine(): void {
|
||||
const commandLineDefs = [
|
||||
{ name: 'help', alias: 'h', type: Boolean, description: 'Show this help' },
|
||||
{ name: 'config', alias: 'c', type: String, description: 'Location of config.json' },
|
||||
|
@ -143,7 +26,6 @@ function parseCommandLine(): {config: Config, version: string} {
|
|||
}
|
||||
|
||||
const packageJsonFile = __dirname + '/../package.json';
|
||||
let version = 'unknown';
|
||||
if (fs.existsSync(packageJsonFile)) {
|
||||
version = JSON.parse(fs.readFileSync(packageJsonFile, 'utf8')).version;
|
||||
}
|
||||
|
@ -182,7 +64,7 @@ function parseCommandLine(): {config: Config, version: string} {
|
|||
process.exit(1);
|
||||
}
|
||||
|
||||
const config: Config = parse(Config, configJSON);
|
||||
config = parse(Config, configJSON);
|
||||
|
||||
function stripTrailingSlash(url: string): string {
|
||||
return url.endsWith("/") ? url.substr(0, url.length - 1) : url;
|
||||
|
@ -193,15 +75,4 @@ function parseCommandLine(): {config: Config, version: string} {
|
|||
|
||||
config.server.rootPath = url.parse(config.server.baseUrl).pathname || "/";
|
||||
config.client.rootPath = config.server.rootPath;
|
||||
|
||||
return {
|
||||
config,
|
||||
version
|
||||
}
|
||||
}
|
||||
|
||||
const {config, version} = parseCommandLine();
|
||||
|
||||
export {config};
|
||||
export {version};
|
||||
|
||||
|
|
93
server/db/__mocks__/database.ts
Normal file
93
server/db/__mocks__/database.ts
Normal file
|
@ -0,0 +1,93 @@
|
|||
import {Database, Statement} from "sqlite";
|
||||
|
||||
export async function init(): Promise<void> {}
|
||||
|
||||
export class MockStatement implements Statement {
|
||||
constructor() {}
|
||||
|
||||
readonly changes: number = 0;
|
||||
readonly lastID: number = 0;
|
||||
readonly sql: string = "";
|
||||
|
||||
async all(): Promise<any[]>;
|
||||
async all(...params: any[]): Promise<any[]>;
|
||||
async all<T>(): Promise<T[]>;
|
||||
async all<T>(...params: any[]): Promise<T[]>;
|
||||
all(...params: any[]): any {
|
||||
}
|
||||
|
||||
async bind(): Promise<Statement>;
|
||||
async bind(...params: any[]): Promise<Statement>;
|
||||
async bind(...params: any[]): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
|
||||
async each(callback?: (err: Error, row: any) => void): Promise<number>;
|
||||
async each(...params: any[]): Promise<number>;
|
||||
async each(...callback: (((err: Error, row: any) => void) | any)[]): Promise<number> {
|
||||
return 0;
|
||||
}
|
||||
|
||||
async finalize(): Promise<void> {}
|
||||
|
||||
get(): Promise<any>;
|
||||
get(...params: any[]): Promise<any>;
|
||||
get<T>(): Promise<T>;
|
||||
get<T>(...params: any[]): Promise<T>;
|
||||
get(...params: any[]): any {
|
||||
}
|
||||
|
||||
async reset(): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
|
||||
async run(): Promise<Statement>;
|
||||
async run(...params: any[]): Promise<Statement>;
|
||||
async run(...params: any[]): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
}
|
||||
|
||||
function mockStatement(): Statement {
|
||||
return new MockStatement();
|
||||
}
|
||||
|
||||
export class MockDatabase implements Database {
|
||||
constructor() {}
|
||||
|
||||
async close(): Promise<void> {}
|
||||
|
||||
async run(...args: any): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
|
||||
async get(...args: any): Promise<any> {}
|
||||
|
||||
async all(...args: any): Promise<any[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
async exec(...args: any): Promise<Database> {
|
||||
return this;
|
||||
}
|
||||
|
||||
async each(...args: any): Promise<number> {
|
||||
return 0;
|
||||
}
|
||||
|
||||
async prepare(...args: any): Promise<Statement> {
|
||||
return mockStatement();
|
||||
}
|
||||
|
||||
configure(...args: any): void {}
|
||||
|
||||
async migrate(...args: any): Promise<Database> {
|
||||
return this;
|
||||
}
|
||||
|
||||
on(...args: any): void {}
|
||||
}
|
||||
|
||||
export const db: MockDatabase = new MockDatabase();
|
||||
|
||||
export {Database, Statement}
|
|
@ -48,11 +48,14 @@ async function applyMigrations(db: sqlite.Database): Promise<void> {
|
|||
}
|
||||
}
|
||||
|
||||
const file = config.server.databaseFile;
|
||||
const dbPromise = sqlite.open(file);
|
||||
const dbPromise = new Promise<Database>((resolve, reject) => {
|
||||
sqlite.open(config.server.databaseFile)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
export async function init(): Promise<void> {
|
||||
Logger.tag('database').info('Setting up database: %s', file);
|
||||
Logger.tag('database').info('Setting up database: %s', config.server.databaseFile);
|
||||
|
||||
let db: Database;
|
||||
try {
|
||||
|
@ -78,7 +81,12 @@ export async function init(): Promise<void> {
|
|||
* Wrapper around a Promise<Database> providing the same interface as the Database itself.
|
||||
*/
|
||||
class DatabasePromiseWrapper implements Database {
|
||||
constructor(private db: Promise<Database>) {}
|
||||
constructor(private db: Promise<Database>) {
|
||||
db.catch(err => {
|
||||
Logger.tag('database', 'init').error('Error initializing database: ', err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
async close() {
|
||||
const db = await this.db;
|
||||
|
|
|
@ -5,4 +5,7 @@
|
|||
const realFs = require('fs');
|
||||
const gracefulFs = require('graceful-fs');
|
||||
gracefulFs.gracefulify(realFs);
|
||||
|
||||
// Init config by parsing commandline. Afterwards all other imports may happen.
|
||||
require('./config').parseCommandLine();
|
||||
})();
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
const app = require('./app').default;
|
||||
const config = require('./config').config;
|
||||
|
||||
// Hack to allow proper logging of Error.
|
||||
Object.defineProperty(Error.prototype, 'message', {
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
Object.defineProperty(Error.prototype, 'stack', {
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
|
||||
const scribe = require('scribe-js')({
|
||||
rootPath: config.server.logging.directory,
|
||||
});
|
||||
|
||||
function addLogger(name, color, active) {
|
||||
if (active) {
|
||||
process.console.addLogger(name, color, {
|
||||
logInConsole: false
|
||||
});
|
||||
} else {
|
||||
process.console[name] = function () {
|
||||
this._reset(); // forget tags, etc. for this logging event
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
addLogger('debug', 'grey', config.server.logging.debug);
|
||||
addLogger('profile', 'blue', config.server.logging.profile);
|
||||
|
||||
if (config.server.logging.logRequests) {
|
||||
app.use(scribe.express.logger());
|
||||
}
|
||||
if (config.server.internal.active) {
|
||||
const prefix = config.server.rootPath === '/' ? '' : config.server.rootPath;
|
||||
app.use(prefix + '/internal/logs', scribe.webPanel());
|
||||
}
|
||||
|
||||
// Hack to allow correct logging of node.js Error objects.
|
||||
// See: https://github.com/bluejamesbond/Scribe.js/issues/70
|
||||
Object.defineProperty(Error.prototype, 'toJSON', {
|
||||
configurable: true,
|
||||
value: function () {
|
||||
const alt = {};
|
||||
const storeKey = function (key) {
|
||||
alt[key] = this[key];
|
||||
};
|
||||
Object.getOwnPropertyNames(this).forEach(storeKey, this);
|
||||
return alt;
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = process.console;
|
94
server/logger.ts
Normal file
94
server/logger.ts
Normal file
|
@ -0,0 +1,94 @@
|
|||
import {Logger, TaggedLogger} from "./types";
|
||||
|
||||
function procConsole() {
|
||||
// @ts-ignore
|
||||
return process.console;
|
||||
}
|
||||
|
||||
function init(): void {
|
||||
const app = require('./app').app;
|
||||
const config = require('./config').config;
|
||||
|
||||
// Hack to allow proper logging of Error.
|
||||
Object.defineProperty(Error.prototype, 'message', {
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
Object.defineProperty(Error.prototype, 'stack', {
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
|
||||
const scribe = require('scribe-js')({
|
||||
rootPath: config.server.logging.directory,
|
||||
});
|
||||
|
||||
function addLogger(name: string, color: string, active: boolean) {
|
||||
if (active) {
|
||||
procConsole().addLogger(name, color, {
|
||||
logInConsole: false
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
procConsole()[name] = function () {
|
||||
this._reset(); // forget tags, etc. for this logging event
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
addLogger('debug', 'grey', config.server.logging.debug);
|
||||
addLogger('profile', 'blue', config.server.logging.profile);
|
||||
|
||||
if (config.server.logging.logRequests) {
|
||||
app.use(scribe.express.logger());
|
||||
}
|
||||
if (config.server.internal.active) {
|
||||
const prefix = config.server.rootPath === '/' ? '' : config.server.rootPath;
|
||||
app.use(prefix + '/internal/logs', scribe.webPanel());
|
||||
}
|
||||
|
||||
// Hack to allow correct logging of node.js Error objects.
|
||||
// See: https://github.com/bluejamesbond/Scribe.js/issues/70
|
||||
Object.defineProperty(Error.prototype, 'toJSON', {
|
||||
configurable: true,
|
||||
value: function () {
|
||||
const alt: {[key: string]: any} = {};
|
||||
const storeKey = (key: string) => {
|
||||
alt[key] = this[key];
|
||||
};
|
||||
Object.getOwnPropertyNames(this).forEach(storeKey, this);
|
||||
return alt;
|
||||
}
|
||||
});
|
||||
|
||||
// @ts-ignore
|
||||
for (const key of Object.keys(procConsole())) {
|
||||
// @ts-ignore
|
||||
module.exports[key] = procConsole()[key];
|
||||
}
|
||||
}
|
||||
|
||||
const logger: Logger = {
|
||||
init,
|
||||
tag(...tags): TaggedLogger {
|
||||
return {
|
||||
debug(...args: any): void {
|
||||
procConsole().tag(...tags).debug(...args);
|
||||
},
|
||||
info(...args: any): void {
|
||||
procConsole().tag(...tags).info(...args);
|
||||
},
|
||||
warn(...args: any): void {
|
||||
procConsole().tag(...tags).warn(...args);
|
||||
},
|
||||
error(...args: any): void {
|
||||
procConsole().tag(...tags).error(...args);
|
||||
},
|
||||
profile(...args: any): void {
|
||||
procConsole().tag(...tags).profile(...args);
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
export default logger;
|
|
@ -1,23 +1,28 @@
|
|||
import "./init"
|
||||
import { config } from "./config"
|
||||
import {config} from "./config"
|
||||
import Logger from "./logger"
|
||||
import * as db from "./db/database"
|
||||
import * as scheduler from "./jobs/scheduler"
|
||||
import * as router from "./router"
|
||||
import app from "./app"
|
||||
import * as app from "./app"
|
||||
|
||||
app.init();
|
||||
Logger.init();
|
||||
Logger.tag('main', 'startup').info('Server starting up...');
|
||||
|
||||
db.init()
|
||||
.then(() => {
|
||||
async function main() {
|
||||
Logger.tag('main').info('Initializing...');
|
||||
|
||||
await db.init();
|
||||
scheduler.init();
|
||||
|
||||
router.init();
|
||||
|
||||
app.listen(config.server.port, '::');
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Could not init database: ', error);
|
||||
process.exit(1);
|
||||
});
|
||||
app.app.listen(config.server.port, '::');
|
||||
}
|
||||
|
||||
main()
|
||||
.catch(error => {
|
||||
console.error('Unhandled runtime error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import express from "express"
|
||||
|
||||
import app from "./app"
|
||||
import {app} from "./app"
|
||||
import {config} from "./config"
|
||||
|
||||
import * as VersionResource from "./resources/versionResource"
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import _ from "lodash";
|
||||
import deepExtend from "deep-extend";
|
||||
import moment, {Moment} from "moment";
|
||||
import {createTransport} from "nodemailer";
|
||||
import {createTransport, Transporter} from "nodemailer";
|
||||
|
||||
import {config} from "../config";
|
||||
import {db, Statement} from "../db/database";
|
||||
import {db} from "../db/database";
|
||||
import Logger from "../logger";
|
||||
import * as MailTemplateService from "./mailTemplateService";
|
||||
import * as Resources from "../utils/resources";
|
||||
|
@ -13,16 +13,25 @@ import {Mail, MailData, MailId, MailType} from "../types";
|
|||
|
||||
const MAIL_QUEUE_DB_BATCH_SIZE = 50;
|
||||
|
||||
const transporter = createTransport(deepExtend(
|
||||
{},
|
||||
config.server.email.smtp,
|
||||
{
|
||||
transport: 'smtp',
|
||||
pool: true
|
||||
}
|
||||
));
|
||||
// TODO: Extract transporter into own module and initialize during main().
|
||||
let transporterSingleton: Transporter | null = null;
|
||||
|
||||
MailTemplateService.configureTransporter(transporter);
|
||||
function transporter() {
|
||||
if (!transporterSingleton) {
|
||||
transporterSingleton = createTransport(deepExtend(
|
||||
{},
|
||||
config.server.email.smtp,
|
||||
{
|
||||
transport: 'smtp',
|
||||
pool: true
|
||||
}
|
||||
));
|
||||
|
||||
MailTemplateService.configureTransporter(transporterSingleton);
|
||||
}
|
||||
|
||||
return transporterSingleton;
|
||||
}
|
||||
|
||||
async function sendMail(options: Mail): Promise<void> {
|
||||
Logger
|
||||
|
@ -42,7 +51,7 @@ async function sendMail(options: Mail): Promise<void> {
|
|||
html: renderedTemplate.body
|
||||
};
|
||||
|
||||
await transporter.sendMail(mailOptions);
|
||||
await transporter().sendMail(mailOptions);
|
||||
|
||||
Logger.tag('mail', 'queue').info('Mail[%d] has been send.', options.id);
|
||||
}
|
||||
|
|
471
server/services/monitoringService.test.ts
Normal file
471
server/services/monitoringService.test.ts
Normal file
|
@ -0,0 +1,471 @@
|
|||
import moment from 'moment';
|
||||
import {ParsedNode, parseNode, parseNodesJson, parseTimestamp} from "./monitoringService";
|
||||
import {NodeState} from "../types";
|
||||
import Logger from '../logger';
|
||||
import {MockLogger} from "../__mocks__/logger";
|
||||
|
||||
const mockedLogger = Logger as MockLogger;
|
||||
|
||||
jest.mock('../logger');
|
||||
jest.mock('../db/database');
|
||||
|
||||
const NODES_JSON_INVALID_VERSION = 1;
|
||||
const NODES_JSON_VALID_VERSION = 2;
|
||||
|
||||
const TIMESTAMP_INVALID_STRING = "2020-01-02T42:99:23.000Z";
|
||||
const TIMESTAMP_VALID_STRING = "2020-01-02T12:34:56.000Z";
|
||||
|
||||
|
||||
beforeEach(() => {
|
||||
mockedLogger.reset();
|
||||
});
|
||||
|
||||
test('parseTimestamp() should fail parsing non-string timestamp', () => {
|
||||
// given
|
||||
const timestamp = {};
|
||||
|
||||
// when
|
||||
const parsedTimestamp = parseTimestamp(timestamp);
|
||||
|
||||
// then
|
||||
expect(parsedTimestamp.isValid()).toBe(false);
|
||||
});
|
||||
|
||||
test('parseTimestamp() should fail parsing empty timestamp string', () => {
|
||||
// given
|
||||
const timestamp = "";
|
||||
|
||||
// when
|
||||
const parsedTimestamp = parseTimestamp(timestamp);
|
||||
|
||||
// then
|
||||
expect(parsedTimestamp.isValid()).toBe(false);
|
||||
});
|
||||
|
||||
test('parseTimestamp() should fail parsing invalid timestamp string', () => {
|
||||
// given
|
||||
const timestamp = TIMESTAMP_INVALID_STRING;
|
||||
|
||||
// when
|
||||
const parsedTimestamp = parseTimestamp(timestamp);
|
||||
|
||||
// then
|
||||
expect(parsedTimestamp.isValid()).toBe(false);
|
||||
});
|
||||
|
||||
test('parseTimestamp() should succeed parsing valid timestamp string', () => {
|
||||
// given
|
||||
const timestamp = TIMESTAMP_VALID_STRING;
|
||||
|
||||
// when
|
||||
const parsedTimestamp = parseTimestamp(timestamp);
|
||||
|
||||
// then
|
||||
expect(parsedTimestamp.isValid()).toBe(true);
|
||||
expect(parsedTimestamp.toISOString()).toEqual(timestamp);
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for undefined node data', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = undefined;
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node data', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node info', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for non-string node id', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: 42
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node id', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: ""
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty network info', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for invalid mac', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "xxx"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for missing flags', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty flags', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
},
|
||||
flags: {}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for missing last seen timestamp', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for invalid last seen timestamp', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
},
|
||||
lastseen: 42
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should succeed parsing node without site and domain', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING
|
||||
};
|
||||
|
||||
// then
|
||||
const expectedParsedNode: ParsedNode = {
|
||||
mac: "12:34:56:78:90:AB",
|
||||
importTimestamp: importTimestamp,
|
||||
state: NodeState.ONLINE,
|
||||
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
|
||||
site: '<unknown-site>',
|
||||
domain: '<unknown-domain>'
|
||||
};
|
||||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||
});
|
||||
|
||||
test('parseNode() should succeed parsing node with site and domain', () => {
|
||||
// given
|
||||
const importTimestamp = moment();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING,
|
||||
};
|
||||
|
||||
// then
|
||||
const expectedParsedNode: ParsedNode = {
|
||||
mac: "12:34:56:78:90:AB",
|
||||
importTimestamp: importTimestamp,
|
||||
state: NodeState.ONLINE,
|
||||
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
|
||||
site: 'test-site',
|
||||
domain: 'test-domain'
|
||||
};
|
||||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing empty string', () => {
|
||||
// given
|
||||
const json = "";
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing malformed JSON', () => {
|
||||
// given
|
||||
const json = '{"version": 2]';
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON null', () => {
|
||||
// given
|
||||
const json = JSON.stringify(null);
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON string', () => {
|
||||
// given
|
||||
const json = JSON.stringify("foo");
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON number', () => {
|
||||
// given
|
||||
const json = JSON.stringify(42);
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing empty JSON object', () => {
|
||||
// given
|
||||
const json = JSON.stringify({});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for mismatching version', () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_INVALID_VERSION
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for missing timestamp', () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
nodes: []
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for invalid timestamp', () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_INVALID_STRING,
|
||||
nodes: []
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for nodes object instead of array', () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_VALID_STRING,
|
||||
nodes: {}
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should succeed parsing no nodes', () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_VALID_STRING,
|
||||
nodes: []
|
||||
});
|
||||
|
||||
// when
|
||||
const result = parseNodesJson(json);
|
||||
|
||||
// then
|
||||
expect(result.importTimestamp.isValid()).toBe(true);
|
||||
expect(result.nodes).toEqual([]);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should skip parsing invalid nodes', () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_VALID_STRING,
|
||||
nodes: [
|
||||
{},
|
||||
{
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
},
|
||||
lastseen: TIMESTAMP_INVALID_STRING,
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
// when
|
||||
const result = parseNodesJson(json);
|
||||
|
||||
// then
|
||||
expect(result.importTimestamp.isValid()).toBe(true);
|
||||
expect(result.nodes).toEqual([]);
|
||||
expect(mockedLogger.getMessages('error', 'monitoring', 'parsing-nodes-json').length).toEqual(2);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should parse valid nodes', () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_VALID_STRING,
|
||||
nodes: [
|
||||
{}, // keep an invalid one for good measure
|
||||
{
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING,
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
// when
|
||||
const result = parseNodesJson(json);
|
||||
|
||||
// then
|
||||
const expectedParsedNode: ParsedNode = {
|
||||
mac: "12:34:56:78:90:AB",
|
||||
importTimestamp: parseTimestamp(TIMESTAMP_VALID_STRING),
|
||||
state: NodeState.ONLINE,
|
||||
lastSeen: parseTimestamp(TIMESTAMP_VALID_STRING),
|
||||
site: 'test-site',
|
||||
domain: 'test-domain'
|
||||
};
|
||||
|
||||
expect(result.importTimestamp.isValid()).toBe(true);
|
||||
expect(result.nodes).toEqual([expectedParsedNode]);
|
||||
expect(mockedLogger.getMessages('error', 'monitoring', 'parsing-nodes-json').length).toEqual(1);
|
||||
});
|
|
@ -33,7 +33,7 @@ const DELETE_OFFLINE_NODES_AFTER_DURATION: {amount: number, unit: unitOfTime.Dur
|
|||
unit: 'days'
|
||||
};
|
||||
|
||||
type ParsedNode = {
|
||||
export type ParsedNode = {
|
||||
mac: string,
|
||||
importTimestamp: Moment,
|
||||
state: NodeState,
|
||||
|
@ -42,7 +42,7 @@ type ParsedNode = {
|
|||
domain: string,
|
||||
};
|
||||
|
||||
type NodesParsingResult = {
|
||||
export type NodesParsingResult = {
|
||||
importTimestamp: Moment,
|
||||
nodes: ParsedNode[],
|
||||
}
|
||||
|
@ -131,25 +131,34 @@ async function storeNodeInformation(nodeData: ParsedNode, node: Node): Promise<v
|
|||
|
||||
const isValidMac = forConstraint(CONSTRAINTS.node.mac, false);
|
||||
|
||||
function parseTimestamp(timestamp: any): Moment {
|
||||
export function parseTimestamp(timestamp: any): Moment {
|
||||
if (!_.isString(timestamp)) {
|
||||
return moment.invalid();
|
||||
}
|
||||
return moment.utc(timestamp);
|
||||
}
|
||||
|
||||
function parseNode(importTimestamp: Moment, nodeData: any, nodeId: NodeId): ParsedNode {
|
||||
// TODO: Use sparkson for JSON parsing.
|
||||
export function parseNode(importTimestamp: Moment, nodeData: any): ParsedNode {
|
||||
if (!_.isPlainObject(nodeData)) {
|
||||
throw new Error(
|
||||
'Node ' + nodeId + ': Unexpected node type: ' + (typeof nodeData)
|
||||
'Unexpected node type: ' + (typeof nodeData)
|
||||
);
|
||||
}
|
||||
|
||||
if (!_.isPlainObject(nodeData.nodeinfo)) {
|
||||
throw new Error(
|
||||
'Node ' + nodeId + ': Unexpected nodeinfo type: ' + (typeof nodeData.nodeinfo)
|
||||
'Unexpected nodeinfo type: ' + (typeof nodeData.nodeinfo)
|
||||
);
|
||||
}
|
||||
|
||||
const nodeId = nodeData.nodeinfo.node_id;
|
||||
if (!nodeId || !_.isString(nodeId)) {
|
||||
throw new Error(
|
||||
`Invalid node id of type "${typeof nodeId}": ${nodeId}`
|
||||
);
|
||||
}
|
||||
|
||||
if (!_.isPlainObject(nodeData.nodeinfo.network)) {
|
||||
throw new Error(
|
||||
'Node ' + nodeId + ': Unexpected nodeinfo.network type: ' + (typeof nodeData.nodeinfo.network)
|
||||
|
@ -197,52 +206,51 @@ function parseNode(importTimestamp: Moment, nodeData: any, nodeId: NodeId): Pars
|
|||
importTimestamp: importTimestamp,
|
||||
state: isOnline ? NodeState.ONLINE : NodeState.OFFLINE,
|
||||
lastSeen: lastSeen,
|
||||
site: site,
|
||||
domain: domain
|
||||
site: site || '<unknown-site>',
|
||||
domain: domain || '<unknown-domain>'
|
||||
};
|
||||
}
|
||||
|
||||
function parseNodesJson (body: string): NodesParsingResult {
|
||||
// TODO: Use sparkson for JSON parsing.
|
||||
export function parseNodesJson (body: string): NodesParsingResult {
|
||||
Logger.tag('monitoring', 'information-retrieval').debug('Parsing nodes.json...');
|
||||
|
||||
const data: {[key: string]: any} = {};
|
||||
|
||||
const json = JSON.parse(body);
|
||||
|
||||
if (json.version !== 1) {
|
||||
throw new Error('Unexpected nodes.json version: ' + json.version);
|
||||
if (!_.isPlainObject(json)) {
|
||||
throw new Error(`Expecting a JSON object as the nodes.json root, but got: ${typeof json}`);
|
||||
}
|
||||
data.importTimestamp = parseTimestamp(json.timestamp);
|
||||
|
||||
if (!data.importTimestamp.isValid()) {
|
||||
const expectedVersion = 2;
|
||||
if (json.version !== expectedVersion) {
|
||||
throw new Error(`Unexpected nodes.json version "${json.version}". Expected: "${expectedVersion}"`);
|
||||
}
|
||||
|
||||
const result: NodesParsingResult = {
|
||||
importTimestamp: parseTimestamp(json.timestamp),
|
||||
nodes: []
|
||||
};
|
||||
|
||||
if (!result.importTimestamp.isValid()) {
|
||||
throw new Error('Invalid timestamp: ' + json.timestamp);
|
||||
}
|
||||
|
||||
if (!_.isPlainObject(json.nodes)) {
|
||||
throw new Error('Invalid nodes object type: ' + (typeof json.nodes));
|
||||
if (!_.isArray(json.nodes)) {
|
||||
throw new Error('Invalid nodes array type: ' + (typeof json.nodes));
|
||||
}
|
||||
|
||||
data.nodes = _.filter(
|
||||
_.values(
|
||||
_.map(
|
||||
json.nodes,
|
||||
function (nodeData, nodeId) {
|
||||
try {
|
||||
return parseNode(data.importTimestamp, nodeData, nodeId);
|
||||
}
|
||||
catch (error) {
|
||||
Logger.tag('monitoring', 'information-retrieval').error(error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
)
|
||||
),
|
||||
function (node) {
|
||||
return node !== null;
|
||||
for (const nodeData of json.nodes) {
|
||||
try {
|
||||
const parsedNode = parseNode(result.importTimestamp, nodeData);
|
||||
Logger.tag('monitoring', 'parsing-nodes-json').debug(`Parsing node successful: ${parsedNode.mac}`);
|
||||
result.nodes.push(parsedNode);
|
||||
}
|
||||
);
|
||||
catch (error) {
|
||||
Logger.tag('monitoring', 'parsing-nodes-json').error("Could not parse node.", error, nodeData);
|
||||
}
|
||||
}
|
||||
|
||||
return data as NodesParsingResult;
|
||||
return result;
|
||||
}
|
||||
|
||||
async function updateSkippedNode(id: NodeId, node?: Node): Promise<Statement> {
|
||||
|
|
125
server/types/config.ts
Normal file
125
server/types/config.ts
Normal file
|
@ -0,0 +1,125 @@
|
|||
import {ArrayField, Field, RawJsonField} from "sparkson"
|
||||
|
||||
export type Version = string;
|
||||
|
||||
// TODO: Replace string types by more specific types like URL, Password, etc.
|
||||
|
||||
export class LoggingConfig {
|
||||
constructor(
|
||||
@Field("directory") public directory: string,
|
||||
@Field("debug") public debug: boolean,
|
||||
@Field("profile") public profile: boolean,
|
||||
@Field("logRequests") public logRequests: boolean,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class InternalConfig {
|
||||
constructor(
|
||||
@Field("active") public active: boolean,
|
||||
@Field("user") public user: string,
|
||||
@Field("password") public password: string,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class EmailConfig {
|
||||
constructor(
|
||||
@Field("from") public from: string,
|
||||
|
||||
// For details see: https://nodemailer.com/2-0-0-beta/setup-smtp/
|
||||
@RawJsonField("smtp") public smtp: any, // TODO: Better types!
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ServerMapConfig {
|
||||
constructor(
|
||||
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: string[],
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ServerConfig {
|
||||
constructor(
|
||||
@Field("baseUrl") public baseUrl: string,
|
||||
@Field("port") public port: number,
|
||||
|
||||
@Field("databaseFile") public databaseFile: string,
|
||||
@Field("peersPath") public peersPath: string,
|
||||
|
||||
@Field("logging") public logging: LoggingConfig,
|
||||
@Field("internal") public internal: InternalConfig,
|
||||
@Field("email") public email: EmailConfig,
|
||||
@Field("map") public map: ServerMapConfig,
|
||||
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class CommunityConfig {
|
||||
constructor(
|
||||
@Field("name") public name: string,
|
||||
@Field("domain") public domain: string,
|
||||
@Field("contactEmail") public contactEmail: string,
|
||||
@ArrayField("sites", String) public sites: string[],
|
||||
@ArrayField("domains", String) public domains: string[],
|
||||
) {}
|
||||
}
|
||||
|
||||
export class LegalConfig {
|
||||
constructor(
|
||||
@Field("privacyUrl", true) public privacyUrl?: string,
|
||||
@Field("imprintUrl", true) public imprintUrl?: string,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ClientMapConfig {
|
||||
constructor(
|
||||
@Field("mapUrl") public mapUrl: string,
|
||||
) {}
|
||||
}
|
||||
export class MonitoringConfig {
|
||||
constructor(
|
||||
@Field("enabled") public enabled: boolean,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class Coords {
|
||||
constructor(
|
||||
@Field("lat") public lat: number,
|
||||
@Field("lng") public lng: number,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class CoordsSelectorConfig {
|
||||
constructor(
|
||||
@Field("lat") public lat: number,
|
||||
@Field("lng") public lng: number,
|
||||
@Field("defaultZoom") public defaultZoom: number,
|
||||
@RawJsonField("layers") public layers: any, // TODO: Better types!
|
||||
) {}
|
||||
}
|
||||
|
||||
export class OtherCommunityInfoConfig {
|
||||
constructor(
|
||||
@Field("showInfo") public showInfo: boolean,
|
||||
@Field("showBorderForDebugging") public showBorderForDebugging: boolean,
|
||||
@ArrayField("localCommunityPolygon", Coords) public localCommunityPolygon: Coords[],
|
||||
) {}
|
||||
}
|
||||
|
||||
export class ClientConfig {
|
||||
constructor(
|
||||
@Field("community") public community: CommunityConfig,
|
||||
@Field("legal") public legal: LegalConfig,
|
||||
@Field("map") public map: ClientMapConfig,
|
||||
@Field("monitoring") public monitoring: MonitoringConfig,
|
||||
@Field("coordsSelector") public coordsSelector: CoordsSelectorConfig,
|
||||
@Field("otherCommunityInfo") public otherCommunityInfo: OtherCommunityInfoConfig,
|
||||
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
||||
) {}
|
||||
}
|
||||
|
||||
export class Config {
|
||||
constructor(
|
||||
@Field("server") public server: ServerConfig,
|
||||
@Field("client") public client: ClientConfig
|
||||
) {}
|
||||
}
|
|
@ -1,3 +1,6 @@
|
|||
export * from "./config";
|
||||
export * from "./logger";
|
||||
|
||||
// TODO: Token type.
|
||||
export type Token = string;
|
||||
export type FastdKey = string;
|
||||
|
|
12
server/types/logger.ts
Normal file
12
server/types/logger.ts
Normal file
|
@ -0,0 +1,12 @@
|
|||
export interface TaggedLogger {
|
||||
debug(...args: any[]): void;
|
||||
info(...args: any[]): void;
|
||||
warn(...args: any[]): void;
|
||||
error(...args: any[]): void;
|
||||
profile(...args: any[]): void;
|
||||
}
|
||||
|
||||
export interface Logger {
|
||||
init(): void;
|
||||
tag(...tags: string[]): TaggedLogger;
|
||||
}
|
Loading…
Reference in a new issue