ESLint: Auto reformat and fixing some warnings / errors.
This commit is contained in:
parent
5237db38e0
commit
91690509d3
50 changed files with 2141 additions and 1493 deletions
|
@ -1,10 +1,10 @@
|
|||
import _ from "lodash";
|
||||
import moment, {Moment} from "moment";
|
||||
import {db} from "../db/database";
|
||||
import moment, { Moment } from "moment";
|
||||
import { db } from "../db/database";
|
||||
import Logger from "../logger";
|
||||
import * as MailTemplateService from "./mailTemplateService";
|
||||
import * as Resources from "../utils/resources";
|
||||
import {RestParams} from "../utils/resources";
|
||||
import { RestParams } from "../utils/resources";
|
||||
import {
|
||||
EmailAddress,
|
||||
isJSONObject,
|
||||
|
@ -16,32 +16,31 @@ import {
|
|||
MailSortField,
|
||||
MailType,
|
||||
parseJSON,
|
||||
UnixTimestampSeconds
|
||||
UnixTimestampSeconds,
|
||||
} from "../types";
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
import {send} from "../mail";
|
||||
import { send } from "../mail";
|
||||
|
||||
type EmaiQueueRow = {
|
||||
id: MailId,
|
||||
created_at: UnixTimestampSeconds,
|
||||
data: string,
|
||||
email: string,
|
||||
failures: number,
|
||||
modified_at: UnixTimestampSeconds,
|
||||
recipient: EmailAddress,
|
||||
sender: EmailAddress,
|
||||
id: MailId;
|
||||
created_at: UnixTimestampSeconds;
|
||||
data: string;
|
||||
email: string;
|
||||
failures: number;
|
||||
modified_at: UnixTimestampSeconds;
|
||||
recipient: EmailAddress;
|
||||
sender: EmailAddress;
|
||||
};
|
||||
|
||||
const MAIL_QUEUE_DB_BATCH_SIZE = 50;
|
||||
|
||||
async function sendMail(options: Mail): Promise<void> {
|
||||
Logger
|
||||
.tag('mail', 'queue')
|
||||
.info(
|
||||
'Sending pending mail[%d] of type %s. ' +
|
||||
'Had %d failures before.',
|
||||
options.id, options.email, options.failures
|
||||
);
|
||||
Logger.tag("mail", "queue").info(
|
||||
"Sending pending mail[%d] of type %s. " + "Had %d failures before.",
|
||||
options.id,
|
||||
options.email,
|
||||
options.failures
|
||||
);
|
||||
|
||||
const renderedTemplate = await MailTemplateService.render(options);
|
||||
|
||||
|
@ -49,21 +48,24 @@ async function sendMail(options: Mail): Promise<void> {
|
|||
from: options.sender,
|
||||
to: options.recipient,
|
||||
subject: renderedTemplate.subject,
|
||||
html: renderedTemplate.body
|
||||
html: renderedTemplate.body,
|
||||
};
|
||||
|
||||
await send(mailOptions);
|
||||
|
||||
Logger.tag('mail', 'queue').info('Mail[%d] has been send.', options.id);
|
||||
Logger.tag("mail", "queue").info("Mail[%d] has been send.", options.id);
|
||||
}
|
||||
|
||||
async function findPendingMailsBefore(beforeMoment: Moment, limit: number): Promise<Mail[]> {
|
||||
async function findPendingMailsBefore(
|
||||
beforeMoment: Moment,
|
||||
limit: number
|
||||
): Promise<Mail[]> {
|
||||
const rows = await db.all<EmaiQueueRow>(
|
||||
'SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?',
|
||||
[beforeMoment.unix(), 5, limit],
|
||||
"SELECT * FROM email_queue WHERE modified_at < ? AND failures < ? ORDER BY id ASC LIMIT ?",
|
||||
[beforeMoment.unix(), 5, limit]
|
||||
);
|
||||
|
||||
return rows.map(row => {
|
||||
return rows.map((row) => {
|
||||
const mailType = row.email;
|
||||
if (!isMailType(mailType)) {
|
||||
throw new Error(`Invalid mailtype in database: ${mailType}`);
|
||||
|
@ -84,13 +86,15 @@ async function findPendingMailsBefore(beforeMoment: Moment, limit: number): Prom
|
|||
}
|
||||
|
||||
async function removePendingMailFromQueue(id: MailId): Promise<void> {
|
||||
await db.run('DELETE FROM email_queue WHERE id = ?', [id]);
|
||||
await db.run("DELETE FROM email_queue WHERE id = ?", [id]);
|
||||
}
|
||||
|
||||
async function incrementFailureCounterForPendingEmail(id: MailId): Promise<void> {
|
||||
async function incrementFailureCounterForPendingEmail(
|
||||
id: MailId
|
||||
): Promise<void> {
|
||||
await db.run(
|
||||
'UPDATE email_queue SET failures = failures + 1, modified_at = ? WHERE id = ?',
|
||||
[moment().unix(), id],
|
||||
"UPDATE email_queue SET failures = failures + 1, modified_at = ? WHERE id = ?",
|
||||
[moment().unix(), id]
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -99,7 +103,10 @@ async function sendPendingMail(pendingMail: Mail): Promise<void> {
|
|||
await sendMail(pendingMail);
|
||||
} catch (error) {
|
||||
// we only log the error and increment the failure counter as we want to continue with pending mails
|
||||
Logger.tag('mail', 'queue').error('Error sending pending mail[' + pendingMail.id + ']:', error);
|
||||
Logger.tag("mail", "queue").error(
|
||||
"Error sending pending mail[" + pendingMail.id + "]:",
|
||||
error
|
||||
);
|
||||
|
||||
await incrementFailureCounterForPendingEmail(pendingMail.id);
|
||||
return;
|
||||
|
@ -109,22 +116,29 @@ async function sendPendingMail(pendingMail: Mail): Promise<void> {
|
|||
}
|
||||
|
||||
async function doGetMail(id: MailId): Promise<Mail> {
|
||||
const row = await db.get<Mail>('SELECT * FROM email_queue WHERE id = ?', [id]);
|
||||
const row = await db.get<Mail>("SELECT * FROM email_queue WHERE id = ?", [
|
||||
id,
|
||||
]);
|
||||
if (row === undefined) {
|
||||
throw {data: 'Mail not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Mail not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
return row;
|
||||
}
|
||||
|
||||
export async function enqueue(sender: string, recipient: string, email: MailType, data: MailData): Promise<void> {
|
||||
export async function enqueue(
|
||||
sender: string,
|
||||
recipient: string,
|
||||
email: MailType,
|
||||
data: MailData
|
||||
): Promise<void> {
|
||||
if (!_.isPlainObject(data)) {
|
||||
throw new Error('Unexpected data: ' + data);
|
||||
throw new Error("Unexpected data: " + data);
|
||||
}
|
||||
await db.run(
|
||||
'INSERT INTO email_queue ' +
|
||||
'(failures, sender, recipient, email, data) ' +
|
||||
'VALUES (?, ?, ?, ?, ?)',
|
||||
[0, sender, recipient, email, JSON.stringify(data)],
|
||||
"INSERT INTO email_queue " +
|
||||
"(failures, sender, recipient, email, data) " +
|
||||
"VALUES (?, ?, ?, ?, ?)",
|
||||
[0, sender, recipient, email, JSON.stringify(data)]
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -132,10 +146,12 @@ export async function getMail(id: MailId): Promise<Mail> {
|
|||
return await doGetMail(id);
|
||||
}
|
||||
|
||||
export async function getPendingMails(restParams: RestParams): Promise<{ mails: Mail[], total: number }> {
|
||||
export async function getPendingMails(
|
||||
restParams: RestParams
|
||||
): Promise<{ mails: Mail[]; total: number }> {
|
||||
const row = await db.get<{ total: number }>(
|
||||
'SELECT count(*) AS total FROM email_queue',
|
||||
[],
|
||||
"SELECT count(*) AS total FROM email_queue",
|
||||
[]
|
||||
);
|
||||
|
||||
const total = row?.total || 0;
|
||||
|
@ -144,18 +160,18 @@ export async function getPendingMails(restParams: RestParams): Promise<{ mails:
|
|||
restParams,
|
||||
MailSortField.ID,
|
||||
isMailSortField,
|
||||
['id', 'failures', 'sender', 'recipient', 'email']
|
||||
["id", "failures", "sender", "recipient", "email"]
|
||||
);
|
||||
|
||||
const mails = await db.all(
|
||||
'SELECT * FROM email_queue WHERE ' + filter.query,
|
||||
filter.params,
|
||||
"SELECT * FROM email_queue WHERE " + filter.query,
|
||||
filter.params
|
||||
);
|
||||
|
||||
return {
|
||||
mails,
|
||||
total
|
||||
}
|
||||
total,
|
||||
};
|
||||
}
|
||||
|
||||
export async function deleteMail(id: MailId): Promise<void> {
|
||||
|
@ -164,29 +180,32 @@ export async function deleteMail(id: MailId): Promise<void> {
|
|||
|
||||
export async function resetFailures(id: MailId): Promise<Mail> {
|
||||
const statement = await db.run(
|
||||
'UPDATE email_queue SET failures = 0, modified_at = ? WHERE id = ?',
|
||||
[moment().unix(), id],
|
||||
"UPDATE email_queue SET failures = 0, modified_at = ? WHERE id = ?",
|
||||
[moment().unix(), id]
|
||||
);
|
||||
|
||||
if (!statement.changes) {
|
||||
throw new Error('Error: could not reset failure count for mail: ' + id);
|
||||
throw new Error("Error: could not reset failure count for mail: " + id);
|
||||
}
|
||||
|
||||
return await doGetMail(id);
|
||||
}
|
||||
|
||||
export async function sendPendingMails(): Promise<void> {
|
||||
Logger.tag('mail', 'queue').debug('Start sending pending mails...');
|
||||
Logger.tag("mail", "queue").debug("Start sending pending mails...");
|
||||
|
||||
const startTime = moment();
|
||||
|
||||
while (true) {
|
||||
Logger.tag('mail', 'queue').debug('Sending next batch...');
|
||||
Logger.tag("mail", "queue").debug("Sending next batch...");
|
||||
|
||||
const pendingMails = await findPendingMailsBefore(startTime, MAIL_QUEUE_DB_BATCH_SIZE);
|
||||
const pendingMails = await findPendingMailsBefore(
|
||||
startTime,
|
||||
MAIL_QUEUE_DB_BATCH_SIZE
|
||||
);
|
||||
|
||||
if (_.isEmpty(pendingMails)) {
|
||||
Logger.tag('mail', 'queue').debug('Done sending pending mails.');
|
||||
Logger.tag("mail", "queue").debug("Done sending pending mails.");
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,38 +1,40 @@
|
|||
import _ from "lodash";
|
||||
import deepExtend from "deep-extend";
|
||||
import {readFileSync, promises as fs} from "graceful-fs";
|
||||
import { readFileSync, promises as fs } from "graceful-fs";
|
||||
import moment from "moment";
|
||||
import {htmlToText} from "nodemailer-html-to-text";
|
||||
import { htmlToText } from "nodemailer-html-to-text";
|
||||
|
||||
import {config} from "../config";
|
||||
import { config } from "../config";
|
||||
import Logger from "../logger";
|
||||
import {editNodeUrl} from "../utils/urlBuilder";
|
||||
import {Transporter} from "nodemailer";
|
||||
import {MailData, Mail} from "../types";
|
||||
import { editNodeUrl } from "../utils/urlBuilder";
|
||||
import { Transporter } from "nodemailer";
|
||||
import { MailData, Mail } from "../types";
|
||||
|
||||
const templateBasePath = __dirname + '/../mailTemplates';
|
||||
const snippetsBasePath = templateBasePath + '/snippets';
|
||||
const templateBasePath = __dirname + "/../mailTemplates";
|
||||
const snippetsBasePath = templateBasePath + "/snippets";
|
||||
|
||||
const templateFunctions: {
|
||||
[key: string]:
|
||||
| ((name: string, data: MailData) => string)
|
||||
| ((data: MailData) => string)
|
||||
| ((href: string, text: string) => string)
|
||||
| ((unix: number) => string)
|
||||
| ((unix: number) => string);
|
||||
} = {};
|
||||
|
||||
function renderSnippet(this: any, name: string, data: MailData): string {
|
||||
const snippetFile = snippetsBasePath + '/' + name + '.html';
|
||||
const snippetFile = snippetsBasePath + "/" + name + ".html";
|
||||
|
||||
return _.template(readFileSync(snippetFile).toString())(deepExtend(
|
||||
{},
|
||||
this, // parent data
|
||||
data,
|
||||
templateFunctions
|
||||
));
|
||||
return _.template(readFileSync(snippetFile).toString())(
|
||||
deepExtend(
|
||||
{},
|
||||
this, // parent data
|
||||
data,
|
||||
templateFunctions
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function snippet(name: string): ((this: any, data: MailData) => string) {
|
||||
function snippet(name: string): (this: any, data: MailData) => string {
|
||||
return function (this: any, data: MailData): string {
|
||||
return renderSnippet.bind(this)(name, data);
|
||||
};
|
||||
|
@ -44,7 +46,7 @@ function renderLink(href: string, text: string): string {
|
|||
'<a href="<%- href %>#" style="color: #E5287A;"><%- text %></a>'
|
||||
)({
|
||||
href: href,
|
||||
text: text || href
|
||||
text: text || href,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -53,17 +55,17 @@ function renderHR(): string {
|
|||
}
|
||||
|
||||
function formatDateTime(unix: number): string {
|
||||
return moment.unix(unix).locale('de').local().format('DD.MM.YYYY HH:mm');
|
||||
return moment.unix(unix).locale("de").local().format("DD.MM.YYYY HH:mm");
|
||||
}
|
||||
|
||||
function formatFromNow(unix: number): string {
|
||||
return moment.unix(unix).locale('de').fromNow();
|
||||
return moment.unix(unix).locale("de").fromNow();
|
||||
}
|
||||
|
||||
templateFunctions.header = snippet('header');
|
||||
templateFunctions.footer = snippet('footer');
|
||||
templateFunctions.header = snippet("header");
|
||||
templateFunctions.footer = snippet("footer");
|
||||
|
||||
templateFunctions.monitoringFooter = snippet('monitoring-footer');
|
||||
templateFunctions.monitoringFooter = snippet("monitoring-footer");
|
||||
|
||||
templateFunctions.snippet = renderSnippet;
|
||||
|
||||
|
@ -73,24 +75,29 @@ templateFunctions.hr = renderHR;
|
|||
templateFunctions.formatDateTime = formatDateTime;
|
||||
templateFunctions.formatFromNow = formatFromNow;
|
||||
|
||||
export function configureTransporter (transporter: Transporter): void {
|
||||
transporter.use('compile', htmlToText({
|
||||
tables: ['.table']
|
||||
}));
|
||||
export function configureTransporter(transporter: Transporter): void {
|
||||
transporter.use(
|
||||
"compile",
|
||||
htmlToText({
|
||||
tables: [".table"],
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export async function render(mailOptions: Mail): Promise<{subject: string, body: string}> {
|
||||
const templatePathPrefix = templateBasePath + '/' + mailOptions.email;
|
||||
export async function render(
|
||||
mailOptions: Mail
|
||||
): Promise<{ subject: string; body: string }> {
|
||||
const templatePathPrefix = templateBasePath + "/" + mailOptions.email;
|
||||
|
||||
const subject = await fs.readFile(templatePathPrefix + '.subject.txt');
|
||||
const body = await fs.readFile(templatePathPrefix + '.body.html');
|
||||
const subject = await fs.readFile(templatePathPrefix + ".subject.txt");
|
||||
const body = await fs.readFile(templatePathPrefix + ".body.html");
|
||||
|
||||
const data = deepExtend(
|
||||
{},
|
||||
mailOptions.data,
|
||||
{
|
||||
community: config.client.community,
|
||||
editNodeUrl: editNodeUrl()
|
||||
editNodeUrl: editNodeUrl(),
|
||||
},
|
||||
templateFunctions
|
||||
);
|
||||
|
@ -98,12 +105,13 @@ export async function render(mailOptions: Mail): Promise<{subject: string, body:
|
|||
try {
|
||||
return {
|
||||
subject: _.template(subject.toString())(data).trim(),
|
||||
body: _.template(body.toString())(data)
|
||||
body: _.template(body.toString())(data),
|
||||
};
|
||||
} catch (error) {
|
||||
Logger
|
||||
.tag('mail', 'template')
|
||||
.error('Error rendering template for mail[' + mailOptions.id + ']:', error);
|
||||
Logger.tag("mail", "template").error(
|
||||
"Error rendering template for mail[" + mailOptions.id + "]:",
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import {ParsedNode, parseNode, parseNodesJson} from "./monitoringService";
|
||||
import {Domain, MAC, OnlineState, Site, UnixTimestampSeconds} from "../types";
|
||||
import Logger from '../logger';
|
||||
import {MockLogger} from "../__mocks__/logger";
|
||||
import {now, parseTimestamp} from "../utils/time";
|
||||
import { ParsedNode, parseNode, parseNodesJson } from "./monitoringService";
|
||||
import { Domain, MAC, OnlineState, Site, UnixTimestampSeconds } from "../types";
|
||||
import Logger from "../logger";
|
||||
import { MockLogger } from "../__mocks__/logger";
|
||||
import { now, parseTimestamp } from "../utils/time";
|
||||
|
||||
const mockedLogger = Logger as MockLogger;
|
||||
|
||||
jest.mock('../logger');
|
||||
jest.mock('../db/database');
|
||||
jest.mock("../logger");
|
||||
jest.mock("../db/database");
|
||||
|
||||
const NODES_JSON_INVALID_VERSION = 1;
|
||||
const NODES_JSON_VALID_VERSION = 2;
|
||||
|
@ -25,7 +25,7 @@ beforeEach(() => {
|
|||
mockedLogger.reset();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for undefined node data', () => {
|
||||
test("parseNode() should fail parsing node for undefined node data", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = undefined;
|
||||
|
@ -34,7 +34,7 @@ test('parseNode() should fail parsing node for undefined node data', () => {
|
|||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node data', () => {
|
||||
test("parseNode() should fail parsing node for empty node data", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {};
|
||||
|
@ -43,159 +43,159 @@ test('parseNode() should fail parsing node for empty node data', () => {
|
|||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node info', () => {
|
||||
test("parseNode() should fail parsing node for empty node info", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {}
|
||||
nodeinfo: {},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for non-string node id', () => {
|
||||
test("parseNode() should fail parsing node for non-string node id", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: 42
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty node id', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: ""
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty network info', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for invalid mac', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "xxx"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for missing flags', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for empty flags', () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
node_id: 42,
|
||||
},
|
||||
flags: {}
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for missing last seen timestamp', () => {
|
||||
test("parseNode() should fail parsing node for empty node id", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "",
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for empty network info", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {},
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for invalid mac", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
mac: "xxx",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for missing flags", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for empty flags", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
flags: {},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test("parseNode() should fail parsing node for missing last seen timestamp", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
}
|
||||
online: true,
|
||||
},
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should fail parsing node for invalid last seen timestamp', () => {
|
||||
test("parseNode() should fail parsing node for invalid last seen timestamp", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: 42
|
||||
lastseen: 42,
|
||||
};
|
||||
|
||||
// then
|
||||
expect(() => parseNode(importTimestamp, nodeData)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNode() should succeed parsing node without site and domain', () => {
|
||||
test("parseNode() should succeed parsing node without site and domain", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
}
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING
|
||||
lastseen: TIMESTAMP_VALID_STRING,
|
||||
};
|
||||
|
||||
// then
|
||||
|
@ -210,22 +210,22 @@ test('parseNode() should succeed parsing node without site and domain', () => {
|
|||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||
});
|
||||
|
||||
test('parseNode() should succeed parsing node with site and domain', () => {
|
||||
test("parseNode() should succeed parsing node with site and domain", () => {
|
||||
// given
|
||||
const importTimestamp = now();
|
||||
const nodeData = {
|
||||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
domain_code: "test-domain",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING,
|
||||
};
|
||||
|
@ -242,7 +242,7 @@ test('parseNode() should succeed parsing node with site and domain', () => {
|
|||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing empty string', () => {
|
||||
test("parseNodesJson() should fail parsing empty string", () => {
|
||||
// given
|
||||
const json = "";
|
||||
|
||||
|
@ -250,7 +250,7 @@ test('parseNodesJson() should fail parsing empty string', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing malformed JSON', () => {
|
||||
test("parseNodesJson() should fail parsing malformed JSON", () => {
|
||||
// given
|
||||
const json = '{"version": 2]';
|
||||
|
||||
|
@ -258,7 +258,7 @@ test('parseNodesJson() should fail parsing malformed JSON', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON null', () => {
|
||||
test("parseNodesJson() should fail parsing JSON null", () => {
|
||||
// given
|
||||
const json = JSON.stringify(null);
|
||||
|
||||
|
@ -266,7 +266,7 @@ test('parseNodesJson() should fail parsing JSON null', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON string', () => {
|
||||
test("parseNodesJson() should fail parsing JSON string", () => {
|
||||
// given
|
||||
const json = JSON.stringify("foo");
|
||||
|
||||
|
@ -274,7 +274,7 @@ test('parseNodesJson() should fail parsing JSON string', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing JSON number', () => {
|
||||
test("parseNodesJson() should fail parsing JSON number", () => {
|
||||
// given
|
||||
const json = JSON.stringify(42);
|
||||
|
||||
|
@ -282,7 +282,7 @@ test('parseNodesJson() should fail parsing JSON number', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing empty JSON object', () => {
|
||||
test("parseNodesJson() should fail parsing empty JSON object", () => {
|
||||
// given
|
||||
const json = JSON.stringify({});
|
||||
|
||||
|
@ -290,57 +290,57 @@ test('parseNodesJson() should fail parsing empty JSON object', () => {
|
|||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for mismatching version', () => {
|
||||
test("parseNodesJson() should fail parsing for mismatching version", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_INVALID_VERSION
|
||||
version: NODES_JSON_INVALID_VERSION,
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for missing timestamp', () => {
|
||||
test("parseNodesJson() should fail parsing for missing timestamp", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
nodes: []
|
||||
nodes: [],
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for invalid timestamp', () => {
|
||||
test("parseNodesJson() should fail parsing for invalid timestamp", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_INVALID_STRING,
|
||||
nodes: []
|
||||
nodes: [],
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should fail parsing for nodes object instead of array', () => {
|
||||
test("parseNodesJson() should fail parsing for nodes object instead of array", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_VALID_STRING,
|
||||
nodes: {}
|
||||
nodes: {},
|
||||
});
|
||||
|
||||
// then
|
||||
expect(() => parseNodesJson(json)).toThrowError();
|
||||
});
|
||||
|
||||
test('parseNodesJson() should succeed parsing no nodes', () => {
|
||||
test("parseNodesJson() should succeed parsing no nodes", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
timestamp: TIMESTAMP_VALID_STRING,
|
||||
nodes: []
|
||||
nodes: [],
|
||||
});
|
||||
|
||||
// when
|
||||
|
@ -352,7 +352,7 @@ test('parseNodesJson() should succeed parsing no nodes', () => {
|
|||
expect(result.totalNodesCount).toEqual(0);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should skip parsing invalid nodes', () => {
|
||||
test("parseNodesJson() should skip parsing invalid nodes", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
|
@ -363,19 +363,19 @@ test('parseNodesJson() should skip parsing invalid nodes', () => {
|
|||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
domain_code: "test-domain",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: TIMESTAMP_INVALID_STRING,
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// when
|
||||
|
@ -385,10 +385,13 @@ test('parseNodesJson() should skip parsing invalid nodes', () => {
|
|||
expect(result.nodes).toEqual([]);
|
||||
expect(result.failedNodesCount).toEqual(2);
|
||||
expect(result.totalNodesCount).toEqual(2);
|
||||
expect(mockedLogger.getMessages('error', 'monitoring', 'parsing-nodes-json').length).toEqual(2);
|
||||
expect(
|
||||
mockedLogger.getMessages("error", "monitoring", "parsing-nodes-json")
|
||||
.length
|
||||
).toEqual(2);
|
||||
});
|
||||
|
||||
test('parseNodesJson() should parse valid nodes', () => {
|
||||
test("parseNodesJson() should parse valid nodes", () => {
|
||||
// given
|
||||
const json = JSON.stringify({
|
||||
version: NODES_JSON_VALID_VERSION,
|
||||
|
@ -399,19 +402,19 @@ test('parseNodesJson() should parse valid nodes', () => {
|
|||
nodeinfo: {
|
||||
node_id: "1234567890ab",
|
||||
network: {
|
||||
mac: "12:34:56:78:90:ab"
|
||||
mac: "12:34:56:78:90:ab",
|
||||
},
|
||||
system: {
|
||||
site_code: "test-site",
|
||||
domain_code: "test-domain"
|
||||
}
|
||||
domain_code: "test-domain",
|
||||
},
|
||||
},
|
||||
flags: {
|
||||
online: true
|
||||
online: true,
|
||||
},
|
||||
lastseen: TIMESTAMP_VALID_STRING,
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// when
|
||||
|
@ -430,5 +433,8 @@ test('parseNodesJson() should parse valid nodes', () => {
|
|||
expect(result.nodes).toEqual([expectedParsedNode]);
|
||||
expect(result.failedNodesCount).toEqual(1);
|
||||
expect(result.totalNodesCount).toEqual(2);
|
||||
expect(mockedLogger.getMessages('error', 'monitoring', 'parsing-nodes-json').length).toEqual(1);
|
||||
expect(
|
||||
mockedLogger.getMessages("error", "monitoring", "parsing-nodes-json")
|
||||
.length
|
||||
).toEqual(1);
|
||||
});
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,15 +1,18 @@
|
|||
import async from "async";
|
||||
import crypto from "crypto";
|
||||
import oldFs, {promises as fs} from "graceful-fs";
|
||||
import oldFs, { promises as fs } from "graceful-fs";
|
||||
import glob from "glob";
|
||||
|
||||
import {config} from "../config";
|
||||
import { config } from "../config";
|
||||
import ErrorTypes from "../utils/errorTypes";
|
||||
import Logger from "../logger";
|
||||
import logger from "../logger";
|
||||
import * as MailService from "../services/mailService";
|
||||
import {normalizeString} from "../shared/utils/strings";
|
||||
import {monitoringConfirmUrl, monitoringDisableUrl} from "../utils/urlBuilder";
|
||||
import { normalizeString } from "../shared/utils/strings";
|
||||
import {
|
||||
monitoringConfirmUrl,
|
||||
monitoringDisableUrl,
|
||||
} from "../utils/urlBuilder";
|
||||
import {
|
||||
BaseNode,
|
||||
Coordinates,
|
||||
|
@ -36,27 +39,27 @@ import {
|
|||
TypeGuard,
|
||||
unhandledEnumField,
|
||||
UnixTimestampMilliseconds,
|
||||
UnixTimestampSeconds
|
||||
UnixTimestampSeconds,
|
||||
} from "../types";
|
||||
import util from "util";
|
||||
|
||||
const pglob = util.promisify(glob);
|
||||
|
||||
type NodeFilter = {
|
||||
hostname?: Hostname,
|
||||
mac?: MAC,
|
||||
key?: FastdKey,
|
||||
token?: Token,
|
||||
monitoringToken?: MonitoringToken,
|
||||
}
|
||||
hostname?: Hostname;
|
||||
mac?: MAC;
|
||||
key?: FastdKey;
|
||||
token?: Token;
|
||||
monitoringToken?: MonitoringToken;
|
||||
};
|
||||
|
||||
type NodeFilenameParsed = {
|
||||
hostname?: Hostname,
|
||||
mac?: MAC,
|
||||
key?: FastdKey,
|
||||
token?: Token,
|
||||
monitoringToken?: MonitoringToken,
|
||||
}
|
||||
hostname?: Hostname;
|
||||
mac?: MAC;
|
||||
key?: FastdKey;
|
||||
token?: Token;
|
||||
monitoringToken?: MonitoringToken;
|
||||
};
|
||||
|
||||
enum LINE_PREFIX {
|
||||
HOSTNAME = "# Knotenname: ",
|
||||
|
@ -69,9 +72,10 @@ enum LINE_PREFIX {
|
|||
MONITORING_TOKEN = "# Monitoring-Token: ",
|
||||
}
|
||||
|
||||
|
||||
function generateToken<Type extends string & { readonly __tag: symbol } = never>(): Type {
|
||||
return crypto.randomBytes(8).toString('hex') as Type;
|
||||
function generateToken<
|
||||
Type extends string & { readonly __tag: symbol } = never
|
||||
>(): Type {
|
||||
return crypto.randomBytes(8).toString("hex") as Type;
|
||||
}
|
||||
|
||||
function toNodeFilesPattern(filter: NodeFilter): string {
|
||||
|
@ -83,9 +87,9 @@ function toNodeFilesPattern(filter: NodeFilter): string {
|
|||
filter.monitoringToken,
|
||||
];
|
||||
|
||||
const pattern = fields.map((value) => value || '*').join('@');
|
||||
const pattern = fields.map((value) => value || "*").join("@");
|
||||
|
||||
return config.server.peersPath + '/' + pattern.toLowerCase();
|
||||
return config.server.peersPath + "/" + pattern.toLowerCase();
|
||||
}
|
||||
|
||||
function findNodeFiles(filter: NodeFilter): Promise<string[]> {
|
||||
|
@ -97,24 +101,25 @@ function findNodeFilesSync(filter: NodeFilter) {
|
|||
}
|
||||
|
||||
async function findFilesInPeersPath(): Promise<string[]> {
|
||||
const files = await pglob(config.server.peersPath + '/*');
|
||||
const files = await pglob(config.server.peersPath + "/*");
|
||||
|
||||
return await async.filter(files, (file, fileCallback) => {
|
||||
if (file[0] === '.') {
|
||||
if (file[0] === ".") {
|
||||
return fileCallback(null, false);
|
||||
}
|
||||
|
||||
fs.lstat(file)
|
||||
.then(stats => fileCallback(null, stats.isFile()))
|
||||
.then((stats) => fileCallback(null, stats.isFile()))
|
||||
.catch(fileCallback);
|
||||
});
|
||||
}
|
||||
|
||||
function parseNodeFilename(filename: string): NodeFilenameParsed {
|
||||
const parts = filename.split('@', 5);
|
||||
const parts = filename.split("@", 5);
|
||||
|
||||
function get<T>(isT: TypeGuard<T>, index: number): T | undefined {
|
||||
const value = index >= 0 && index < parts.length ? parts[index] : undefined;
|
||||
const value =
|
||||
index >= 0 && index < parts.length ? parts[index] : undefined;
|
||||
return isT(value) ? value : undefined;
|
||||
}
|
||||
|
||||
|
@ -140,35 +145,65 @@ function isDuplicate(filter: NodeFilter, token?: Token): boolean {
|
|||
return parseNodeFilename(files[0]).token !== token;
|
||||
}
|
||||
|
||||
function checkNoDuplicates(token: Token | undefined, node: BaseNode, nodeSecrets: NodeSecrets): void {
|
||||
if (isDuplicate({hostname: node.hostname}, token)) {
|
||||
throw {data: {msg: 'Already exists.', field: 'hostname'}, type: ErrorTypes.conflict};
|
||||
function checkNoDuplicates(
|
||||
token: Token | undefined,
|
||||
node: BaseNode,
|
||||
nodeSecrets: NodeSecrets
|
||||
): void {
|
||||
if (isDuplicate({ hostname: node.hostname }, token)) {
|
||||
throw {
|
||||
data: { msg: "Already exists.", field: "hostname" },
|
||||
type: ErrorTypes.conflict,
|
||||
};
|
||||
}
|
||||
|
||||
if (node.key) {
|
||||
if (isDuplicate({key: node.key}, token)) {
|
||||
throw {data: {msg: 'Already exists.', field: 'key'}, type: ErrorTypes.conflict};
|
||||
if (isDuplicate({ key: node.key }, token)) {
|
||||
throw {
|
||||
data: { msg: "Already exists.", field: "key" },
|
||||
type: ErrorTypes.conflict,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (isDuplicate({mac: node.mac}, token)) {
|
||||
throw {data: {msg: 'Already exists.', field: 'mac'}, type: ErrorTypes.conflict};
|
||||
if (isDuplicate({ mac: node.mac }, token)) {
|
||||
throw {
|
||||
data: { msg: "Already exists.", field: "mac" },
|
||||
type: ErrorTypes.conflict,
|
||||
};
|
||||
}
|
||||
|
||||
if (nodeSecrets.monitoringToken && isDuplicate({monitoringToken: nodeSecrets.monitoringToken}, token)) {
|
||||
throw {data: {msg: 'Already exists.', field: 'monitoringToken'}, type: ErrorTypes.conflict};
|
||||
if (
|
||||
nodeSecrets.monitoringToken &&
|
||||
isDuplicate({ monitoringToken: nodeSecrets.monitoringToken }, token)
|
||||
) {
|
||||
throw {
|
||||
data: { msg: "Already exists.", field: "monitoringToken" },
|
||||
type: ErrorTypes.conflict,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function toNodeFilename(token: Token, node: BaseNode, nodeSecrets: NodeSecrets): string {
|
||||
return config.server.peersPath + '/' +
|
||||
function toNodeFilename(
|
||||
token: Token,
|
||||
node: BaseNode,
|
||||
nodeSecrets: NodeSecrets
|
||||
): string {
|
||||
return (
|
||||
config.server.peersPath +
|
||||
"/" +
|
||||
(
|
||||
(node.hostname || '') + '@' +
|
||||
(node.mac || '') + '@' +
|
||||
(node.key || '') + '@' +
|
||||
(token || '') + '@' +
|
||||
(nodeSecrets.monitoringToken || '')
|
||||
).toLowerCase();
|
||||
(node.hostname || "") +
|
||||
"@" +
|
||||
(node.mac || "") +
|
||||
"@" +
|
||||
(node.key || "") +
|
||||
"@" +
|
||||
(token || "") +
|
||||
"@" +
|
||||
(nodeSecrets.monitoringToken || "")
|
||||
).toLowerCase()
|
||||
);
|
||||
}
|
||||
|
||||
function getNodeValue(
|
||||
|
@ -194,7 +229,10 @@ function getNodeValue(
|
|||
case LINE_PREFIX.MONITORING:
|
||||
if (node.monitoring && monitoringState === MonitoringState.ACTIVE) {
|
||||
return "aktiv";
|
||||
} else if (node.monitoring && monitoringState === MonitoringState.PENDING) {
|
||||
} else if (
|
||||
node.monitoring &&
|
||||
monitoringState === MonitoringState.PENDING
|
||||
) {
|
||||
return "pending";
|
||||
}
|
||||
return "";
|
||||
|
@ -210,13 +248,19 @@ async function writeNodeFile(
|
|||
token: Token,
|
||||
node: CreateOrUpdateNode,
|
||||
monitoringState: MonitoringState,
|
||||
nodeSecrets: NodeSecrets,
|
||||
nodeSecrets: NodeSecrets
|
||||
): Promise<StoredNode> {
|
||||
const filename = toNodeFilename(token, node, nodeSecrets);
|
||||
let data = '';
|
||||
let data = "";
|
||||
|
||||
for (const prefix of Object.values(LINE_PREFIX)) {
|
||||
data += `${prefix}${getNodeValue(prefix, token, node, monitoringState, nodeSecrets)}\n`;
|
||||
data += `${prefix}${getNodeValue(
|
||||
prefix,
|
||||
token,
|
||||
node,
|
||||
monitoringState,
|
||||
nodeSecrets
|
||||
)}\n`;
|
||||
}
|
||||
|
||||
if (node.key) {
|
||||
|
@ -225,9 +269,9 @@ async function writeNodeFile(
|
|||
|
||||
// since node.js is single threaded we don't need a lock when working with synchronous operations
|
||||
if (isUpdate) {
|
||||
const files = findNodeFilesSync({token: token});
|
||||
const files = findNodeFilesSync({ token: token });
|
||||
if (files.length !== 1) {
|
||||
throw {data: 'Node not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Node not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
|
||||
checkNoDuplicates(token, node, nodeSecrets);
|
||||
|
@ -236,41 +280,65 @@ async function writeNodeFile(
|
|||
try {
|
||||
oldFs.unlinkSync(file);
|
||||
} catch (error) {
|
||||
Logger.tag('node', 'save').error('Could not delete old node file: ' + file, error);
|
||||
throw {data: 'Could not remove old node data.', type: ErrorTypes.internalError};
|
||||
Logger.tag("node", "save").error(
|
||||
"Could not delete old node file: " + file,
|
||||
error
|
||||
);
|
||||
throw {
|
||||
data: "Could not remove old node data.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
checkNoDuplicates(undefined, node, nodeSecrets);
|
||||
}
|
||||
|
||||
try {
|
||||
oldFs.writeFileSync(filename, data, 'utf8');
|
||||
const {node: storedNode} = await parseNodeFile(filename);
|
||||
oldFs.writeFileSync(filename, data, "utf8");
|
||||
const { node: storedNode } = await parseNodeFile(filename);
|
||||
return storedNode;
|
||||
} catch (error) {
|
||||
Logger.tag('node', 'save').error('Could not write node file: ' + filename, error);
|
||||
throw {data: 'Could not write node data.', type: ErrorTypes.internalError};
|
||||
Logger.tag("node", "save").error(
|
||||
"Could not write node file: " + filename,
|
||||
error
|
||||
);
|
||||
throw {
|
||||
data: "Could not write node data.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteNodeFile(token: Token): Promise<void> {
|
||||
let files;
|
||||
try {
|
||||
files = await findNodeFiles({token: token});
|
||||
files = await findNodeFiles({ token: token });
|
||||
} catch (error) {
|
||||
Logger.tag('node', 'delete').error('Could not find node file: ' + files, error);
|
||||
throw {data: 'Could not delete node.', type: ErrorTypes.internalError};
|
||||
Logger.tag("node", "delete").error(
|
||||
"Could not find node file: " + files,
|
||||
error
|
||||
);
|
||||
throw {
|
||||
data: "Could not delete node.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
|
||||
if (files.length !== 1) {
|
||||
throw {data: 'Node not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Node not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
|
||||
try {
|
||||
oldFs.unlinkSync(files[0]);
|
||||
} catch (error) {
|
||||
Logger.tag('node', 'delete').error('Could not delete node file: ' + files, error);
|
||||
throw {data: 'Could not delete node.', type: ErrorTypes.internalError};
|
||||
Logger.tag("node", "delete").error(
|
||||
"Could not delete node file: " + files,
|
||||
error
|
||||
);
|
||||
throw {
|
||||
data: "Could not delete node.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -284,10 +352,7 @@ class StoredNodeBuilder {
|
|||
public mac: MAC = "" as MAC; // FIXME: Either make mac optional in Node or handle this!
|
||||
public monitoringState: MonitoringState = MonitoringState.DISABLED;
|
||||
|
||||
constructor(
|
||||
public readonly modifiedAt: UnixTimestampSeconds,
|
||||
) {
|
||||
}
|
||||
constructor(public readonly modifiedAt: UnixTimestampSeconds) {}
|
||||
|
||||
public build(): StoredNode {
|
||||
const node = {
|
||||
|
@ -304,14 +369,22 @@ class StoredNodeBuilder {
|
|||
|
||||
if (!isStoredNode(node)) {
|
||||
logger.tag("NodeService").error("Not a valid StoredNode:", node);
|
||||
throw {data: "Could not build StoredNode.", type: ErrorTypes.internalError};
|
||||
throw {
|
||||
data: "Could not build StoredNode.",
|
||||
type: ErrorTypes.internalError,
|
||||
};
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
}
|
||||
|
||||
function setNodeValue(prefix: LINE_PREFIX, node: StoredNodeBuilder, nodeSecrets: NodeSecrets, value: string) {
|
||||
function setNodeValue(
|
||||
prefix: LINE_PREFIX,
|
||||
node: StoredNodeBuilder,
|
||||
nodeSecrets: NodeSecrets,
|
||||
value: string
|
||||
) {
|
||||
switch (prefix) {
|
||||
case LINE_PREFIX.HOSTNAME:
|
||||
node.hostname = value as Hostname;
|
||||
|
@ -332,10 +405,13 @@ function setNodeValue(prefix: LINE_PREFIX, node: StoredNodeBuilder, nodeSecrets:
|
|||
node.token = value as Token;
|
||||
break;
|
||||
case LINE_PREFIX.MONITORING:
|
||||
const active = value === 'aktiv';
|
||||
const pending = value === 'pending';
|
||||
node.monitoringState =
|
||||
active ? MonitoringState.ACTIVE : (pending ? MonitoringState.PENDING : MonitoringState.DISABLED);
|
||||
const active = value === "aktiv";
|
||||
const pending = value === "pending";
|
||||
node.monitoringState = active
|
||||
? MonitoringState.ACTIVE
|
||||
: pending
|
||||
? MonitoringState.PENDING
|
||||
: MonitoringState.DISABLED;
|
||||
break;
|
||||
case LINE_PREFIX.MONITORING_TOKEN:
|
||||
nodeSecrets.monitoringToken = value as MonitoringToken;
|
||||
|
@ -346,11 +422,14 @@ function setNodeValue(prefix: LINE_PREFIX, node: StoredNodeBuilder, nodeSecrets:
|
|||
}
|
||||
|
||||
async function getModifiedAt(file: string): Promise<UnixTimestampSeconds> {
|
||||
const modifiedAtMs = (await fs.lstat(file)).mtimeMs as UnixTimestampMilliseconds;
|
||||
const modifiedAtMs = (await fs.lstat(file))
|
||||
.mtimeMs as UnixTimestampMilliseconds;
|
||||
return toUnixTimestampSeconds(modifiedAtMs);
|
||||
}
|
||||
|
||||
async function parseNodeFile(file: string): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets }> {
|
||||
async function parseNodeFile(
|
||||
file: string
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets }> {
|
||||
const contents = await fs.readFile(file);
|
||||
const modifiedAt = await getModifiedAt(file);
|
||||
|
||||
|
@ -365,7 +444,9 @@ async function parseNodeFile(file: string): Promise<{ node: StoredNode, nodeSecr
|
|||
} else {
|
||||
for (const prefix of Object.values(LINE_PREFIX)) {
|
||||
if (line.substring(0, prefix.length) === prefix) {
|
||||
const value = normalizeString(line.substring(prefix.length));
|
||||
const value = normalizeString(
|
||||
line.substring(prefix.length)
|
||||
);
|
||||
setNodeValue(prefix, node, nodeSecrets, value);
|
||||
break;
|
||||
}
|
||||
|
@ -379,7 +460,9 @@ async function parseNodeFile(file: string): Promise<{ node: StoredNode, nodeSecr
|
|||
};
|
||||
}
|
||||
|
||||
async function findNodeDataByFilePattern(filter: NodeFilter): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets } | null> {
|
||||
async function findNodeDataByFilePattern(
|
||||
filter: NodeFilter
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets } | null> {
|
||||
const files = await findNodeFiles(filter);
|
||||
|
||||
if (files.length !== 1) {
|
||||
|
@ -390,22 +473,27 @@ async function findNodeDataByFilePattern(filter: NodeFilter): Promise<{ node: St
|
|||
return await parseNodeFile(file);
|
||||
}
|
||||
|
||||
async function getNodeDataByFilePattern(filter: NodeFilter): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets }> {
|
||||
async function getNodeDataByFilePattern(
|
||||
filter: NodeFilter
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets }> {
|
||||
const result = await findNodeDataByFilePattern(filter);
|
||||
if (!result) {
|
||||
throw {data: 'Node not found.', type: ErrorTypes.notFound};
|
||||
throw { data: "Node not found.", type: ErrorTypes.notFound };
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function sendMonitoringConfirmationMail(node: StoredNode, nodeSecrets: NodeSecrets): Promise<void> {
|
||||
async function sendMonitoringConfirmationMail(
|
||||
node: StoredNode,
|
||||
nodeSecrets: NodeSecrets
|
||||
): Promise<void> {
|
||||
const monitoringToken = nodeSecrets.monitoringToken;
|
||||
if (!monitoringToken) {
|
||||
Logger
|
||||
.tag('monitoring', 'confirmation')
|
||||
.error('Could not enqueue confirmation mail. No monitoring token found.');
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
Logger.tag("monitoring", "confirmation").error(
|
||||
"Could not enqueue confirmation mail. No monitoring token found."
|
||||
);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
|
||||
const confirmUrl = monitoringConfirmUrl(monitoringToken);
|
||||
|
@ -413,26 +501,36 @@ async function sendMonitoringConfirmationMail(node: StoredNode, nodeSecrets: Nod
|
|||
|
||||
await MailService.enqueue(
|
||||
config.server.email.from,
|
||||
node.nickname + ' <' + node.email + '>',
|
||||
node.nickname + " <" + node.email + ">",
|
||||
MailType.MONITORING_CONFIRMATION,
|
||||
{
|
||||
node: node,
|
||||
confirmUrl: confirmUrl,
|
||||
disableUrl: disableUrl
|
||||
},
|
||||
disableUrl: disableUrl,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function createNode(node: CreateOrUpdateNode): Promise<StoredNode> {
|
||||
export async function createNode(
|
||||
node: CreateOrUpdateNode
|
||||
): Promise<StoredNode> {
|
||||
const token: Token = generateToken();
|
||||
const nodeSecrets: NodeSecrets = {};
|
||||
|
||||
const monitoringState = node.monitoring ? MonitoringState.PENDING : MonitoringState.DISABLED;
|
||||
const monitoringState = node.monitoring
|
||||
? MonitoringState.PENDING
|
||||
: MonitoringState.DISABLED;
|
||||
if (node.monitoring) {
|
||||
nodeSecrets.monitoringToken = generateToken<MonitoringToken>();
|
||||
}
|
||||
|
||||
const createdNode = await writeNodeFile(false, token, node, monitoringState, nodeSecrets);
|
||||
const createdNode = await writeNodeFile(
|
||||
false,
|
||||
token,
|
||||
node,
|
||||
monitoringState,
|
||||
nodeSecrets
|
||||
);
|
||||
|
||||
if (createdNode.monitoringState == MonitoringState.PENDING) {
|
||||
await sendMonitoringConfirmationMail(createdNode, nodeSecrets);
|
||||
|
@ -441,8 +539,12 @@ export async function createNode(node: CreateOrUpdateNode): Promise<StoredNode>
|
|||
return createdNode;
|
||||
}
|
||||
|
||||
export async function updateNode(token: Token, node: CreateOrUpdateNode): Promise<StoredNode> {
|
||||
const {node: currentNode, nodeSecrets} = await getNodeDataWithSecretsByToken(token);
|
||||
export async function updateNode(
|
||||
token: Token,
|
||||
node: CreateOrUpdateNode
|
||||
): Promise<StoredNode> {
|
||||
const { node: currentNode, nodeSecrets } =
|
||||
await getNodeDataWithSecretsByToken(token);
|
||||
|
||||
let monitoringState = MonitoringState.DISABLED;
|
||||
let monitoringToken: MonitoringToken | undefined = undefined;
|
||||
|
@ -461,11 +563,12 @@ export async function updateNode(token: Token, node: CreateOrUpdateNode): Promis
|
|||
// new email so we need a new token and a reconfirmation
|
||||
monitoringState = MonitoringState.PENDING;
|
||||
monitoringToken = generateToken<MonitoringToken>();
|
||||
|
||||
} else {
|
||||
// email unchanged, keep token (fix if not set) and confirmation state
|
||||
monitoringState = currentNode.monitoringState;
|
||||
monitoringToken = nodeSecrets.monitoringToken || generateToken<MonitoringToken>();
|
||||
monitoringToken =
|
||||
nodeSecrets.monitoringToken ||
|
||||
generateToken<MonitoringToken>();
|
||||
}
|
||||
break;
|
||||
|
||||
|
@ -476,9 +579,15 @@ export async function updateNode(token: Token, node: CreateOrUpdateNode): Promis
|
|||
|
||||
nodeSecrets.monitoringToken = monitoringToken;
|
||||
|
||||
const storedNode = await writeNodeFile(true, token, node, monitoringState, nodeSecrets);
|
||||
const storedNode = await writeNodeFile(
|
||||
true,
|
||||
token,
|
||||
node,
|
||||
monitoringState,
|
||||
nodeSecrets
|
||||
);
|
||||
if (storedNode.monitoringState === MonitoringState.PENDING) {
|
||||
await sendMonitoringConfirmationMail(storedNode, nodeSecrets)
|
||||
await sendMonitoringConfirmationMail(storedNode, nodeSecrets);
|
||||
}
|
||||
|
||||
return storedNode;
|
||||
|
@ -488,7 +597,7 @@ export async function internalUpdateNode(
|
|||
token: Token,
|
||||
node: CreateOrUpdateNode,
|
||||
monitoringState: MonitoringState,
|
||||
nodeSecrets: NodeSecrets,
|
||||
nodeSecrets: NodeSecrets
|
||||
): Promise<StoredNode> {
|
||||
return await writeNodeFile(true, token, node, monitoringState, nodeSecrets);
|
||||
}
|
||||
|
@ -502,52 +611,58 @@ export async function getAllNodes(): Promise<StoredNode[]> {
|
|||
try {
|
||||
files = await findNodeFiles({});
|
||||
} catch (error) {
|
||||
Logger.tag('nodes').error('Error getting all nodes:', error);
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
Logger.tag("nodes").error("Error getting all nodes:", error);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
|
||||
const nodes: StoredNode[] = [];
|
||||
for (const file of files) {
|
||||
try {
|
||||
const {node} = await parseNodeFile(file);
|
||||
const { node } = await parseNodeFile(file);
|
||||
nodes.push(node);
|
||||
} catch (error) {
|
||||
Logger.tag('nodes').error('Error getting all nodes:', error);
|
||||
throw {data: 'Internal error.', type: ErrorTypes.internalError};
|
||||
Logger.tag("nodes").error("Error getting all nodes:", error);
|
||||
throw { data: "Internal error.", type: ErrorTypes.internalError };
|
||||
}
|
||||
}
|
||||
|
||||
return nodes;
|
||||
}
|
||||
|
||||
export async function findNodeDataWithSecretsByMac(mac: MAC): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets } | null> {
|
||||
return await findNodeDataByFilePattern({mac});
|
||||
export async function findNodeDataWithSecretsByMac(
|
||||
mac: MAC
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets } | null> {
|
||||
return await findNodeDataByFilePattern({ mac });
|
||||
}
|
||||
|
||||
export async function findNodeDataByMac(mac: MAC): Promise<StoredNode | null> {
|
||||
const result = await findNodeDataByFilePattern({mac});
|
||||
const result = await findNodeDataByFilePattern({ mac });
|
||||
return result ? result.node : null;
|
||||
}
|
||||
|
||||
export async function getNodeDataWithSecretsByToken(token: Token): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets }> {
|
||||
return await getNodeDataByFilePattern({token: token});
|
||||
export async function getNodeDataWithSecretsByToken(
|
||||
token: Token
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets }> {
|
||||
return await getNodeDataByFilePattern({ token: token });
|
||||
}
|
||||
|
||||
export async function getNodeDataByToken(token: Token): Promise<StoredNode> {
|
||||
const {node} = await getNodeDataByFilePattern({token: token});
|
||||
const { node } = await getNodeDataByFilePattern({ token: token });
|
||||
return node;
|
||||
}
|
||||
|
||||
export async function getNodeDataWithSecretsByMonitoringToken(
|
||||
monitoringToken: MonitoringToken
|
||||
): Promise<{ node: StoredNode, nodeSecrets: NodeSecrets }> {
|
||||
return await getNodeDataByFilePattern({monitoringToken: monitoringToken});
|
||||
): Promise<{ node: StoredNode; nodeSecrets: NodeSecrets }> {
|
||||
return await getNodeDataByFilePattern({ monitoringToken: monitoringToken });
|
||||
}
|
||||
|
||||
export async function getNodeDataByMonitoringToken(
|
||||
monitoringToken: MonitoringToken
|
||||
): Promise<StoredNode> {
|
||||
const {node} = await getNodeDataByFilePattern({monitoringToken: monitoringToken});
|
||||
const { node } = await getNodeDataByFilePattern({
|
||||
monitoringToken: monitoringToken,
|
||||
});
|
||||
return node;
|
||||
}
|
||||
|
||||
|
@ -555,7 +670,7 @@ export async function fixNodeFilenames(): Promise<void> {
|
|||
const files = await findFilesInPeersPath();
|
||||
|
||||
for (const file of files) {
|
||||
const {node, nodeSecrets} = await parseNodeFile(file);
|
||||
const { node, nodeSecrets } = await parseNodeFile(file);
|
||||
|
||||
const expectedFilename = toNodeFilename(node.token, node, nodeSecrets);
|
||||
if (file !== expectedFilename) {
|
||||
|
@ -563,16 +678,23 @@ export async function fixNodeFilenames(): Promise<void> {
|
|||
await fs.rename(file, expectedFilename);
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
'Cannot rename file ' + file + ' to ' + expectedFilename + ' => ' + error
|
||||
"Cannot rename file " +
|
||||
file +
|
||||
" to " +
|
||||
expectedFilename +
|
||||
" => " +
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function findNodesModifiedBefore(timestamp: UnixTimestampSeconds): Promise<StoredNode[]> {
|
||||
export async function findNodesModifiedBefore(
|
||||
timestamp: UnixTimestampSeconds
|
||||
): Promise<StoredNode[]> {
|
||||
const nodes = await getAllNodes();
|
||||
return nodes.filter(node => node.modifiedAt < timestamp);
|
||||
return nodes.filter((node) => node.modifiedAt < timestamp);
|
||||
}
|
||||
|
||||
export async function getNodeStatistics(): Promise<NodeStatistics> {
|
||||
|
@ -584,8 +706,8 @@ export async function getNodeStatistics(): Promise<NodeStatistics> {
|
|||
withCoords: 0,
|
||||
monitoring: {
|
||||
active: 0,
|
||||
pending: 0
|
||||
}
|
||||
pending: 0,
|
||||
},
|
||||
};
|
||||
|
||||
for (const node of nodes) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue