Merge branch 'main' into new-admin
This commit is contained in:
commit
697df1ca99
27 changed files with 424 additions and 320 deletions
|
@ -146,7 +146,7 @@ Dann die `config.json` anpassen nach belieben. Es gibt die folgenden Konfigurati
|
||||||
|
|
||||||
* **`server.email.from`** Absender für versendete E-Mails, z. B.: `"Freifunk Knotenformular <no-reply@musterstadt.freifunk.net>"`
|
* **`server.email.from`** Absender für versendete E-Mails, z. B.: `"Freifunk Knotenformular <no-reply@musterstadt.freifunk.net>"`
|
||||||
* **`server.email.smtp`** Konfiguration des SMTP-Servers für den E-Mail-Versand entsprechend der Dokumentation unter
|
* **`server.email.smtp`** Konfiguration des SMTP-Servers für den E-Mail-Versand entsprechend der Dokumentation unter
|
||||||
[https://nodemailer.com/2-0-0-beta/setup-smtp/](https://nodemailer.com/2-0-0-beta/setup-smtp/), z. B.:
|
[https://nodemailer.com/smtp/](https://nodemailer.com/smtp/), z. B.:
|
||||||
|
|
||||||
```
|
```
|
||||||
{
|
{
|
||||||
|
|
|
@ -2,9 +2,6 @@
|
||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
* Different user accounts for admin panel:
|
|
||||||
* Username + password hash in config
|
|
||||||
* Commandline tool to generate hash
|
|
||||||
* Test email rendering!
|
* Test email rendering!
|
||||||
|
|
||||||
## Short term
|
## Short term
|
||||||
|
|
|
@ -68,7 +68,7 @@
|
||||||
"@types/html-to-text": "^8.0.1",
|
"@types/html-to-text": "^8.0.1",
|
||||||
"@types/jest": "^28.1.6",
|
"@types/jest": "^28.1.6",
|
||||||
"@types/lodash": "^4.14.178",
|
"@types/lodash": "^4.14.178",
|
||||||
"@types/node": "^18.0.6",
|
"@types/node": "^18.6.2",
|
||||||
"@types/node-cron": "^3.0.2",
|
"@types/node-cron": "^3.0.2",
|
||||||
"@types/nodemailer": "^6.4.4",
|
"@types/nodemailer": "^6.4.4",
|
||||||
"@types/request": "^2.48.8",
|
"@types/request": "^2.48.8",
|
||||||
|
@ -101,7 +101,7 @@
|
||||||
"time-grunt": "^2.0.0",
|
"time-grunt": "^2.0.0",
|
||||||
"ts-jest": "^28.0.7",
|
"ts-jest": "^28.0.7",
|
||||||
"typescript": "^4.7.4",
|
"typescript": "^4.7.4",
|
||||||
"yarn-audit-fix": "^9.3.2"
|
"yarn-audit-fix": "^9.3.3"
|
||||||
},
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"grunt-connect-proxy/**/http-proxy": "~1.18.1",
|
"grunt-connect-proxy/**/http-proxy": "~1.18.1",
|
||||||
|
|
|
@ -85,7 +85,7 @@ export class Task {
|
||||||
|
|
||||||
this.job.run().then(result => {
|
this.job.run().then(result => {
|
||||||
done(TaskState.IDLE, result);
|
done(TaskState.IDLE, result);
|
||||||
}).catch((err: any) => {
|
}).catch(err => {
|
||||||
Logger.tag('jobs').error("Job %s failed: %s", this.name, err);
|
Logger.tag('jobs').error("Job %s failed: %s", this.name, err);
|
||||||
done(TaskState.FAILED, null);
|
done(TaskState.FAILED, null);
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,14 +1,25 @@
|
||||||
import {LogLevel, LogLevels, isLogLevel} from "./types";
|
import {isLogLevel, isUndefined, LoggingConfig, LogLevel, LogLevels} from "./types";
|
||||||
import {ActivatableLoggerImpl} from "./logger";
|
import {ActivatableLoggerImpl} from "./logger";
|
||||||
import _ from "lodash";
|
|
||||||
|
function withDefault<T>(value: T | undefined, defaultValue: T): T {
|
||||||
|
return isUndefined(value) ? defaultValue : value;
|
||||||
|
}
|
||||||
|
|
||||||
class TestableLogger extends ActivatableLoggerImpl {
|
class TestableLogger extends ActivatableLoggerImpl {
|
||||||
private logs: any[][] = [];
|
private logs: any[][] = [];
|
||||||
|
|
||||||
constructor(enabled?: boolean) {
|
constructor(
|
||||||
|
enabled?: boolean,
|
||||||
|
debug?: boolean,
|
||||||
|
profile?: boolean,
|
||||||
|
) {
|
||||||
super();
|
super();
|
||||||
this.init(
|
this.init(
|
||||||
enabled === false ? false : true, // default is true
|
new LoggingConfig(
|
||||||
|
withDefault(enabled, true),
|
||||||
|
withDefault(debug, true),
|
||||||
|
withDefault(profile, true),
|
||||||
|
),
|
||||||
(...args: any[]): void => this.doLog(...args)
|
(...args: any[]): void => this.doLog(...args)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -35,14 +46,15 @@ function parseLogEntry(logEntry: any[]): ParsedLogEntry {
|
||||||
`Empty log entry. Should always start with log message: ${logEntry}`
|
`Empty log entry. Should always start with log message: ${logEntry}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const logMessage = logEntry[0];
|
const logMessage = logEntry[0];
|
||||||
if (typeof logMessage !== "string") {
|
if (typeof logMessage !== "string") {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Expected log entry to start with string, but got: ${logMessage}`
|
`Expected log entry to start with string, but got: ${logMessage}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// noinspection RegExpRedundantEscape
|
||||||
const regexp = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} ([A-Z]+) - (\[[^\]]*\])? *(.*)$/;
|
const regexp = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} ([A-Z]+) - (\[[^\]]*\])? *(.*)$/;
|
||||||
const groups = logMessage.match(regexp);
|
const groups = logMessage.match(regexp);
|
||||||
if (groups === null || groups.length < 4) {
|
if (groups === null || groups.length < 4) {
|
||||||
|
@ -59,13 +71,13 @@ function parseLogEntry(logEntry: any[]): ParsedLogEntry {
|
||||||
}
|
}
|
||||||
|
|
||||||
const tagsStr = groups[2].substring(1, groups[2].length - 1);
|
const tagsStr = groups[2].substring(1, groups[2].length - 1);
|
||||||
const tags = tagsStr ? _.split(tagsStr, ", ") : [];
|
const tags = tagsStr ? tagsStr.split(", "): [];
|
||||||
const message = groups[3];
|
const message = groups[3];
|
||||||
const args = logEntry.slice(1);
|
const args = logEntry.slice(1);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
level: level as LogLevel,
|
level: level as LogLevel,
|
||||||
tags,
|
tags,
|
||||||
message,
|
message,
|
||||||
args,
|
args,
|
||||||
};
|
};
|
||||||
|
@ -158,7 +170,7 @@ for (const level of LogLevels) {
|
||||||
message: "%s %d %f %o %%",
|
message: "%s %d %f %o %%",
|
||||||
args: [],
|
args: [],
|
||||||
}]);
|
}]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test(`should not escape ${level} message arguments`, () => {
|
test(`should not escape ${level} message arguments`, () => {
|
||||||
// given
|
// given
|
||||||
|
@ -174,7 +186,7 @@ for (const level of LogLevels) {
|
||||||
message: "message",
|
message: "message",
|
||||||
args: [1, "%s", "%d", "%f", "%o", "%"],
|
args: [1, "%s", "%d", "%f", "%o", "%"],
|
||||||
}]);
|
}]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test(`should not log ${level} message on disabled logger`, () => {
|
test(`should not log ${level} message on disabled logger`, () => {
|
||||||
// given
|
// given
|
||||||
|
@ -185,6 +197,59 @@ for (const level of LogLevels) {
|
||||||
|
|
||||||
// then
|
// then
|
||||||
expect(parseLogs(logger.getLogs())).toEqual([]);
|
expect(parseLogs(logger.getLogs())).toEqual([]);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test(`should not log debug message with disabled debugging`, () => {
|
||||||
|
// given
|
||||||
|
const logger = new TestableLogger(true, false, true);
|
||||||
|
|
||||||
|
// when
|
||||||
|
logger.tag("tag").debug("message");
|
||||||
|
|
||||||
|
// then
|
||||||
|
expect(parseLogs(logger.getLogs())).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test(`should log profile message with disabled debugging`, () => {
|
||||||
|
// given
|
||||||
|
const logger = new TestableLogger(true, false, true);
|
||||||
|
|
||||||
|
// when
|
||||||
|
logger.tag("tag").profile("message");
|
||||||
|
|
||||||
|
// then
|
||||||
|
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||||
|
level: "profile",
|
||||||
|
tags: ["tag"],
|
||||||
|
message: "message",
|
||||||
|
args: [],
|
||||||
|
}]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test(`should not log profile message with disabled profiling`, () => {
|
||||||
|
// given
|
||||||
|
const logger = new TestableLogger(true, true, false);
|
||||||
|
|
||||||
|
// when
|
||||||
|
logger.tag("tag").profile("message");
|
||||||
|
|
||||||
|
// then
|
||||||
|
expect(parseLogs(logger.getLogs())).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test(`should log debug message with disabled profiling`, () => {
|
||||||
|
// given
|
||||||
|
const logger = new TestableLogger(true, true, false);
|
||||||
|
|
||||||
|
// when
|
||||||
|
logger.tag("tag").debug("message");
|
||||||
|
|
||||||
|
// then
|
||||||
|
expect(parseLogs(logger.getLogs())).toEqual([{
|
||||||
|
level: "debug",
|
||||||
|
tags: ["tag"],
|
||||||
|
message: "message",
|
||||||
|
args: [],
|
||||||
|
}]);
|
||||||
|
});
|
||||||
|
|
|
@ -1,48 +1,51 @@
|
||||||
import {Logger, TaggedLogger, LogLevel} from './types';
|
import {isString, Logger, LoggingConfig, LogLevel, TaggedLogger} from './types';
|
||||||
import moment from 'moment';
|
import moment from 'moment';
|
||||||
import _ from 'lodash';
|
|
||||||
|
|
||||||
export type LoggingFunction = (...args: any[]) => void;
|
export type LoggingFunction = (...args: any[]) => void;
|
||||||
|
|
||||||
const noopTaggedLogger: TaggedLogger = {
|
const noopTaggedLogger: TaggedLogger = {
|
||||||
log(level: LogLevel, ...args: any[]): void {},
|
log(_level: LogLevel, ..._args: any[]): void {},
|
||||||
debug(...args: any[]): void {},
|
debug(..._args: any[]): void {},
|
||||||
info(...args: any[]): void {},
|
info(..._args: any[]): void {},
|
||||||
warn(...args: any[]): void {},
|
warn(..._args: any[]): void {},
|
||||||
error(...args: any[]): void {},
|
error(..._args: any[]): void {},
|
||||||
profile(...args: any[]): void {},
|
profile(..._args: any[]): void {},
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface ActivatableLogger extends Logger {
|
export interface ActivatableLogger extends Logger {
|
||||||
init(enabled: boolean, loggingFunction?: LoggingFunction): void;
|
init(config: LoggingConfig, loggingFunction?: LoggingFunction): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TODO: Check if LoggingConfig.debug and LoggingConfig.profile are handled.
|
||||||
|
*/
|
||||||
export class ActivatableLoggerImpl implements ActivatableLogger {
|
export class ActivatableLoggerImpl implements ActivatableLogger {
|
||||||
private enabled: boolean = false;
|
private config: LoggingConfig = new LoggingConfig(false, false, false);
|
||||||
private loggingFunction: LoggingFunction = console.info;
|
private loggingFunction: LoggingFunction = console.info;
|
||||||
|
|
||||||
init(enabled: boolean, loggingFunction?: LoggingFunction): void {
|
init(config: LoggingConfig, loggingFunction?: LoggingFunction): void {
|
||||||
const config = require('./config').config;
|
this.config = config;
|
||||||
this.enabled = enabled;
|
|
||||||
this.loggingFunction = loggingFunction || console.info;
|
this.loggingFunction = loggingFunction || console.info;
|
||||||
}
|
}
|
||||||
|
|
||||||
tag(...tags: string[]): TaggedLogger {
|
tag(...tags: string[]): TaggedLogger {
|
||||||
if (this.enabled) {
|
if (this.config.enabled) {
|
||||||
|
const debug = this.config.debug;
|
||||||
|
const profile = this.config.profile;
|
||||||
const loggingFunction = this.loggingFunction;
|
const loggingFunction = this.loggingFunction;
|
||||||
return {
|
return {
|
||||||
log(level: LogLevel, ...args: any[]): void {
|
log(level: LogLevel, ...args: any[]): void {
|
||||||
const timeStr = moment().format('YYYY-MM-DD HH:mm:ss');
|
const timeStr = moment().format('YYYY-MM-DD HH:mm:ss');
|
||||||
const levelStr = level.toUpperCase();
|
const levelStr = level.toUpperCase();
|
||||||
const tagsStr = tags ? '[' + _.join(tags, ', ') + ']' : '';
|
const tagsStr = tags ? '[' + tags.join(', ') + ']' : '';
|
||||||
const messagePrefix = `${timeStr} ${levelStr} - ${tagsStr}`;
|
const messagePrefix = `${timeStr} ${levelStr} - ${tagsStr}`;
|
||||||
|
|
||||||
// Make sure to only replace %s, etc. in real log message
|
// Make sure to only replace %s, etc. in real log message
|
||||||
// but not in tags.
|
// but not in tags.
|
||||||
const escapedMessagePrefix = messagePrefix.replace(/%/g, '%%');
|
const escapedMessagePrefix = messagePrefix.replace(/%/g, '%%');
|
||||||
|
|
||||||
let message = '';
|
let message = '';
|
||||||
if (args && _.isString(args[0])) {
|
if (args && isString(args[0])) {
|
||||||
message = args[0];
|
message = args[0];
|
||||||
args.shift();
|
args.shift();
|
||||||
}
|
}
|
||||||
|
@ -53,7 +56,9 @@ export class ActivatableLoggerImpl implements ActivatableLogger {
|
||||||
loggingFunction(logStr, ...args);
|
loggingFunction(logStr, ...args);
|
||||||
},
|
},
|
||||||
debug(...args: any[]): void {
|
debug(...args: any[]): void {
|
||||||
this.log('debug', ...args);
|
if (debug) {
|
||||||
|
this.log('debug', ...args);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
info(...args: any[]): void {
|
info(...args: any[]): void {
|
||||||
this.log('info', ...args);
|
this.log('info', ...args);
|
||||||
|
@ -65,7 +70,9 @@ export class ActivatableLoggerImpl implements ActivatableLogger {
|
||||||
this.log('error', ...args);
|
this.log('error', ...args);
|
||||||
},
|
},
|
||||||
profile(...args: any[]): void {
|
profile(...args: any[]): void {
|
||||||
this.log('profile', ...args);
|
if (profile) {
|
||||||
|
this.log('profile', ...args);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
29
server/mail/index.ts
Normal file
29
server/mail/index.ts
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
import {createTransport, Transporter} from "nodemailer";
|
||||||
|
import {config} from "../config";
|
||||||
|
import * as MailTemplateService from "../services/mailTemplateService";
|
||||||
|
import Mail from "nodemailer/lib/mailer";
|
||||||
|
import SMTPTransport from "nodemailer/lib/smtp-transport";
|
||||||
|
|
||||||
|
let transporterSingleton: Transporter | null = null;
|
||||||
|
|
||||||
|
function transporter() {
|
||||||
|
if (!transporterSingleton) {
|
||||||
|
const options = {
|
||||||
|
...config.server.email.smtp,
|
||||||
|
pool: true,
|
||||||
|
};
|
||||||
|
transporterSingleton = createTransport(new SMTPTransport(options));
|
||||||
|
|
||||||
|
MailTemplateService.configureTransporter(transporterSingleton);
|
||||||
|
}
|
||||||
|
|
||||||
|
return transporterSingleton;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function init(): void {
|
||||||
|
transporter();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function send(options: Mail.Options): Promise<void> {
|
||||||
|
await transporter().sendMail(options);
|
||||||
|
}
|
|
@ -5,17 +5,18 @@ import * as db from "./db/database"
|
||||||
import * as scheduler from "./jobs/scheduler"
|
import * as scheduler from "./jobs/scheduler"
|
||||||
import * as router from "./router"
|
import * as router from "./router"
|
||||||
import * as app from "./app"
|
import * as app from "./app"
|
||||||
|
import * as mail from "./mail";
|
||||||
|
|
||||||
app.init();
|
app.init();
|
||||||
Logger.init(config.server.logging.enabled);
|
Logger.init(config.server.logging);
|
||||||
Logger.tag('main', 'startup').info('Server starting up...');
|
Logger.tag('main', 'startup').info('Server starting up...');
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
Logger.tag('main').info('Initializing...');
|
Logger.tag('main').info('Initializing...');
|
||||||
|
|
||||||
await db.init();
|
await db.init();
|
||||||
|
mail.init();
|
||||||
scheduler.init();
|
scheduler.init();
|
||||||
|
|
||||||
router.init();
|
router.init();
|
||||||
|
|
||||||
app.app.listen(config.server.port, '::');
|
app.app.listen(config.server.port, '::');
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import _ from "lodash";
|
|
||||||
|
|
||||||
import CONSTRAINTS from "../validation/constraints";
|
import CONSTRAINTS from "../validation/constraints";
|
||||||
import ErrorTypes from "../utils/errorTypes";
|
import ErrorTypes from "../utils/errorTypes";
|
||||||
import * as MonitoringService from "../services/monitoringService";
|
import * as MonitoringService from "../services/monitoringService";
|
||||||
|
@ -12,13 +10,14 @@ import {isMonitoringToken, JSONObject, MonitoringResponse, MonitoringToken, toMo
|
||||||
|
|
||||||
const isValidToken = forConstraint(CONSTRAINTS.token, false);
|
const isValidToken = forConstraint(CONSTRAINTS.token, false);
|
||||||
|
|
||||||
|
// FIXME: Get rid of any
|
||||||
async function doGetAll(req: Request): Promise<{ total: number, result: any }> {
|
async function doGetAll(req: Request): Promise<{ total: number, result: any }> {
|
||||||
const restParams = await Resources.getValidRestParams('list', null, req);
|
const restParams = await Resources.getValidRestParams('list', null, req);
|
||||||
const {monitoringStates, total} = await MonitoringService.getAll(restParams);
|
const {monitoringStates, total} = await MonitoringService.getAll(restParams);
|
||||||
return {
|
return {
|
||||||
total,
|
total,
|
||||||
result: _.map(monitoringStates, function (state) {
|
result: monitoringStates.map(state => {
|
||||||
state.mapId = _.toLower(state.mac).replace(/:/g, '');
|
state.mapId = state.mac.toLowerCase().replace(/:/g, "");
|
||||||
return state;
|
return state;
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import _ from "lodash";
|
|
||||||
|
|
||||||
import Constraints from "../validation/constraints";
|
import Constraints from "../validation/constraints";
|
||||||
import ErrorTypes from "../utils/errorTypes";
|
import ErrorTypes from "../utils/errorTypes";
|
||||||
import * as MonitoringService from "../services/monitoringService";
|
import * as MonitoringService from "../services/monitoringService";
|
||||||
|
@ -12,13 +10,17 @@ import {Request, Response} from "express";
|
||||||
import {
|
import {
|
||||||
CreateOrUpdateNode,
|
CreateOrUpdateNode,
|
||||||
DomainSpecificNodeResponse,
|
DomainSpecificNodeResponse,
|
||||||
|
isCreateOrUpdateNode,
|
||||||
isNodeSortField,
|
isNodeSortField,
|
||||||
isToken, JSONObject,
|
isString,
|
||||||
|
isToken,
|
||||||
|
isUndefined,
|
||||||
|
JSONObject,
|
||||||
|
JSONValue,
|
||||||
MAC,
|
MAC,
|
||||||
NodeResponse,
|
NodeResponse,
|
||||||
NodeStateData,
|
NodeStateData,
|
||||||
NodeTokenResponse,
|
NodeTokenResponse,
|
||||||
StoredNode,
|
|
||||||
toDomainSpecificNodeResponse,
|
toDomainSpecificNodeResponse,
|
||||||
Token,
|
Token,
|
||||||
toNodeResponse,
|
toNodeResponse,
|
||||||
|
@ -27,16 +29,27 @@ import {
|
||||||
|
|
||||||
const nodeFields = ['hostname', 'key', 'email', 'nickname', 'mac', 'coords', 'monitoring'];
|
const nodeFields = ['hostname', 'key', 'email', 'nickname', 'mac', 'coords', 'monitoring'];
|
||||||
|
|
||||||
function getNormalizedNodeData(reqData: any): CreateOrUpdateNode {
|
function getNormalizedNodeData(reqData: JSONObject): CreateOrUpdateNode {
|
||||||
const node: { [key: string]: any } = {};
|
const node: { [key: string]: any } = {};
|
||||||
_.each(nodeFields, function (field) {
|
for (const field of nodeFields) {
|
||||||
let value = normalizeString(reqData[field]);
|
let value: JSONValue | undefined = reqData[field];
|
||||||
if (field === 'mac') {
|
if (isString(value)) {
|
||||||
value = normalizeMac(value as MAC);
|
value = normalizeString(value);
|
||||||
|
if (field === 'mac') {
|
||||||
|
value = normalizeMac(value as MAC);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
node[field] = value;
|
|
||||||
});
|
if (!isUndefined(value)) {
|
||||||
return node as CreateOrUpdateNode;
|
node[field] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isCreateOrUpdateNode(node)) {
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw {data: "Invalid node data.", type: ErrorTypes.badRequest};
|
||||||
}
|
}
|
||||||
|
|
||||||
const isValidNode = forConstraints(Constraints.node, false);
|
const isValidNode = forConstraints(Constraints.node, false);
|
||||||
|
@ -90,15 +103,15 @@ async function doGetAll(req: Request): Promise<{ total: number; pageNodes: any }
|
||||||
|
|
||||||
const nodes = await NodeService.getAllNodes();
|
const nodes = await NodeService.getAllNodes();
|
||||||
|
|
||||||
const realNodes = _.filter(nodes, node =>
|
const realNodes = nodes.filter(node =>
|
||||||
// We ignore nodes without tokens as those are only manually added ones like gateways.
|
// We ignore nodes without tokens as those are only manually added ones like gateways.
|
||||||
!!node.token
|
!!node.token // FIXME: As node.token may not be undefined or null here, handle this when loading!
|
||||||
);
|
);
|
||||||
|
|
||||||
const macs: MAC[] = _.map(realNodes, (node: StoredNode): MAC => node.mac);
|
const macs: MAC[] = realNodes.map(node => node.mac);
|
||||||
const nodeStateByMac = await MonitoringService.getByMacs(macs);
|
const nodeStateByMac = await MonitoringService.getByMacs(macs);
|
||||||
|
|
||||||
const domainSpecificNodes: DomainSpecificNodeResponse[] = _.map(realNodes, (node: StoredNode): DomainSpecificNodeResponse => {
|
const domainSpecificNodes: DomainSpecificNodeResponse[] = realNodes.map(node => {
|
||||||
const nodeState: NodeStateData = nodeStateByMac[node.mac] || {};
|
const nodeState: NodeStateData = nodeStateByMac[node.mac] || {};
|
||||||
return toDomainSpecificNodeResponse(node, nodeState);
|
return toDomainSpecificNodeResponse(node, nodeState);
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
import _ from "lodash";
|
|
||||||
|
|
||||||
import CONSTRAINTS from "../validation/constraints";
|
import CONSTRAINTS from "../validation/constraints";
|
||||||
import ErrorTypes from "../utils/errorTypes";
|
import ErrorTypes from "../utils/errorTypes";
|
||||||
import * as Resources from "../utils/resources";
|
import * as Resources from "../utils/resources";
|
||||||
import {Entity, handleJSONWithData, RequestData} from "../utils/resources";
|
import {handleJSONWithData, RequestData} from "../utils/resources";
|
||||||
import {getTasks, Task, TaskState} from "../jobs/scheduler";
|
import {getTasks, Task, TaskState} from "../jobs/scheduler";
|
||||||
import {normalizeString} from "../utils/strings";
|
import {normalizeString} from "../utils/strings";
|
||||||
import {forConstraint} from "../validation/validator";
|
import {forConstraint} from "../validation/validator";
|
||||||
|
@ -12,18 +10,18 @@ import {isString, isTaskSortField} from "../types";
|
||||||
|
|
||||||
const isValidId = forConstraint(CONSTRAINTS.id, false);
|
const isValidId = forConstraint(CONSTRAINTS.id, false);
|
||||||
|
|
||||||
interface TaskResponse {
|
type TaskResponse = {
|
||||||
id: number,
|
id: number;
|
||||||
name: string,
|
name: string;
|
||||||
description: string,
|
description: string;
|
||||||
schedule: string,
|
schedule: string;
|
||||||
runningSince: number | null,
|
runningSince: number | null;
|
||||||
lastRunStarted: number | null,
|
lastRunStarted: number | null;
|
||||||
lastRunDuration: number | null,
|
lastRunDuration: number | null;
|
||||||
state: string,
|
state: string;
|
||||||
result: string | null,
|
result: string | null;
|
||||||
message: string | null,
|
message: string | null;
|
||||||
enabled: boolean,
|
enabled: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
function toTaskResponse(task: Task): TaskResponse {
|
function toTaskResponse(task: Task): TaskResponse {
|
||||||
|
@ -77,11 +75,11 @@ async function setTaskEnabled(data: RequestData, enable: boolean): Promise<TaskR
|
||||||
return toTaskResponse(task);
|
return toTaskResponse(task);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function doGetAll(req: Request): Promise<{ total: number, pageTasks: Entity[] }> {
|
async function doGetAll(req: Request): Promise<{ total: number, pageTasks: Task[] }> {
|
||||||
const restParams = await Resources.getValidRestParams('list', null, req);
|
const restParams = await Resources.getValidRestParams('list', null, req);
|
||||||
|
|
||||||
const tasks = Resources.sort(
|
const tasks = Resources.sort(
|
||||||
_.values(getTasks()),
|
Object.values(getTasks()),
|
||||||
isTaskSortField,
|
isTaskSortField,
|
||||||
restParams
|
restParams
|
||||||
);
|
);
|
||||||
|
@ -104,7 +102,7 @@ export function getAll(req: Request, res: Response): void {
|
||||||
doGetAll(req)
|
doGetAll(req)
|
||||||
.then(({total, pageTasks}) => {
|
.then(({total, pageTasks}) => {
|
||||||
res.set('X-Total-Count', total.toString(10));
|
res.set('X-Total-Count', total.toString(10));
|
||||||
Resources.success(res, _.map(pageTasks, toTaskResponse));
|
Resources.success(res, pageTasks.map(toTaskResponse));
|
||||||
})
|
})
|
||||||
.catch(err => Resources.error(res, err));
|
.catch(err => Resources.error(res, err));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,15 @@
|
||||||
import _ from "lodash";
|
import _ from "lodash";
|
||||||
import deepExtend from "deep-extend";
|
|
||||||
import moment, {Moment} from "moment";
|
import moment, {Moment} from "moment";
|
||||||
import {createTransport, Transporter} from "nodemailer";
|
|
||||||
|
|
||||||
import {config} from "../config";
|
|
||||||
import {db} from "../db/database";
|
import {db} from "../db/database";
|
||||||
import Logger from "../logger";
|
import Logger from "../logger";
|
||||||
import * as MailTemplateService from "./mailTemplateService";
|
import * as MailTemplateService from "./mailTemplateService";
|
||||||
import * as Resources from "../utils/resources";
|
import * as Resources from "../utils/resources";
|
||||||
import {RestParams} from "../utils/resources";
|
import {RestParams} from "../utils/resources";
|
||||||
import {
|
import {
|
||||||
EmailAddress, isJSONObject,
|
EmailAddress,
|
||||||
isMailSortField, isMailType, JSONObject,
|
isJSONObject,
|
||||||
|
isMailSortField,
|
||||||
|
isMailType,
|
||||||
Mail,
|
Mail,
|
||||||
MailData,
|
MailData,
|
||||||
MailId,
|
MailId,
|
||||||
|
@ -21,6 +19,7 @@ import {
|
||||||
UnixTimestampSeconds
|
UnixTimestampSeconds
|
||||||
} from "../types";
|
} from "../types";
|
||||||
import ErrorTypes from "../utils/errorTypes";
|
import ErrorTypes from "../utils/errorTypes";
|
||||||
|
import {send} from "../mail";
|
||||||
|
|
||||||
type EmaiQueueRow = {
|
type EmaiQueueRow = {
|
||||||
id: MailId,
|
id: MailId,
|
||||||
|
@ -35,26 +34,6 @@ type EmaiQueueRow = {
|
||||||
|
|
||||||
const MAIL_QUEUE_DB_BATCH_SIZE = 50;
|
const MAIL_QUEUE_DB_BATCH_SIZE = 50;
|
||||||
|
|
||||||
// TODO: Extract transporter into own module and initialize during main().
|
|
||||||
let transporterSingleton: Transporter | null = null;
|
|
||||||
|
|
||||||
function transporter() {
|
|
||||||
if (!transporterSingleton) {
|
|
||||||
transporterSingleton = createTransport(deepExtend(
|
|
||||||
{},
|
|
||||||
config.server.email.smtp,
|
|
||||||
{
|
|
||||||
transport: 'smtp',
|
|
||||||
pool: true
|
|
||||||
} as JSONObject
|
|
||||||
));
|
|
||||||
|
|
||||||
MailTemplateService.configureTransporter(transporterSingleton);
|
|
||||||
}
|
|
||||||
|
|
||||||
return transporterSingleton;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sendMail(options: Mail): Promise<void> {
|
async function sendMail(options: Mail): Promise<void> {
|
||||||
Logger
|
Logger
|
||||||
.tag('mail', 'queue')
|
.tag('mail', 'queue')
|
||||||
|
@ -73,7 +52,7 @@ async function sendMail(options: Mail): Promise<void> {
|
||||||
html: renderedTemplate.body
|
html: renderedTemplate.body
|
||||||
};
|
};
|
||||||
|
|
||||||
await transporter().sendMail(mailOptions);
|
await send(mailOptions);
|
||||||
|
|
||||||
Logger.tag('mail', 'queue').info('Mail[%d] has been send.', options.id);
|
Logger.tag('mail', 'queue').info('Mail[%d] has been send.', options.id);
|
||||||
}
|
}
|
||||||
|
@ -170,7 +149,7 @@ export async function getPendingMails(restParams: RestParams): Promise<{ mails:
|
||||||
|
|
||||||
const mails = await db.all(
|
const mails = await db.all(
|
||||||
'SELECT * FROM email_queue WHERE ' + filter.query,
|
'SELECT * FROM email_queue WHERE ' + filter.query,
|
||||||
_.concat([], filter.params),
|
filter.params,
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -97,7 +97,7 @@ export async function render(mailOptions: Mail): Promise<{subject: string, body:
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return {
|
return {
|
||||||
subject: _.trim(_.template(subject.toString())(data)),
|
subject: _.template(subject.toString())(data).trim(),
|
||||||
body: _.template(body.toString())(data)
|
body: _.template(body.toString())(data)
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import moment from 'moment';
|
|
||||||
import {ParsedNode, parseNode, parseNodesJson} from "./monitoringService";
|
import {ParsedNode, parseNode, parseNodesJson} from "./monitoringService";
|
||||||
import {Domain, MAC, OnlineState, Site, UnixTimestampSeconds} from "../types";
|
import {Domain, MAC, OnlineState, Site, UnixTimestampSeconds} from "../types";
|
||||||
import Logger from '../logger';
|
import Logger from '../logger';
|
||||||
|
@ -205,8 +204,8 @@ test('parseNode() should succeed parsing node without site and domain', () => {
|
||||||
importTimestamp: importTimestamp,
|
importTimestamp: importTimestamp,
|
||||||
state: OnlineState.ONLINE,
|
state: OnlineState.ONLINE,
|
||||||
lastSeen: TIMESTAMP_VALID,
|
lastSeen: TIMESTAMP_VALID,
|
||||||
site: "<unknown-site>" as Site,
|
site: undefined,
|
||||||
domain: "<unknown-domain>" as Domain,
|
domain: undefined,
|
||||||
};
|
};
|
||||||
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
expect(parseNode(importTimestamp, nodeData)).toEqual(expectedParsedNode);
|
||||||
});
|
});
|
||||||
|
|
|
@ -19,8 +19,13 @@ import {
|
||||||
Domain,
|
Domain,
|
||||||
DurationSeconds,
|
DurationSeconds,
|
||||||
Hostname,
|
Hostname,
|
||||||
|
isBoolean,
|
||||||
|
isDomain,
|
||||||
isMonitoringSortField,
|
isMonitoringSortField,
|
||||||
isOnlineState,
|
isOnlineState,
|
||||||
|
isSite,
|
||||||
|
isString,
|
||||||
|
isUndefined,
|
||||||
MAC,
|
MAC,
|
||||||
MailType,
|
MailType,
|
||||||
MonitoringSortField,
|
MonitoringSortField,
|
||||||
|
@ -71,8 +76,8 @@ export type ParsedNode = {
|
||||||
importTimestamp: UnixTimestampSeconds,
|
importTimestamp: UnixTimestampSeconds,
|
||||||
state: OnlineState,
|
state: OnlineState,
|
||||||
lastSeen: UnixTimestampSeconds,
|
lastSeen: UnixTimestampSeconds,
|
||||||
site: Site,
|
site?: Site,
|
||||||
domain: Domain,
|
domain?: Domain,
|
||||||
};
|
};
|
||||||
|
|
||||||
export type NodesParsingResult = {
|
export type NodesParsingResult = {
|
||||||
|
@ -162,7 +167,7 @@ async function storeNodeInformation(nodeData: ParsedNode, node: StoredNode): Pro
|
||||||
|
|
||||||
const row = await db.get('SELECT * FROM node_state WHERE mac = ?', [node.mac]);
|
const row = await db.get('SELECT * FROM node_state WHERE mac = ?', [node.mac]);
|
||||||
|
|
||||||
if (_.isUndefined(row)) {
|
if (isUndefined(row)) {
|
||||||
return await insertNodeInformation(nodeData, node);
|
return await insertNodeInformation(nodeData, node);
|
||||||
} else {
|
} else {
|
||||||
return await updateNodeInformation(nodeData, node, row);
|
return await updateNodeInformation(nodeData, node, row);
|
||||||
|
@ -171,7 +176,6 @@ async function storeNodeInformation(nodeData: ParsedNode, node: StoredNode): Pro
|
||||||
|
|
||||||
const isValidMac = forConstraint(CONSTRAINTS.node.mac, false);
|
const isValidMac = forConstraint(CONSTRAINTS.node.mac, false);
|
||||||
|
|
||||||
// TODO: Use sparkson for JSON parsing.
|
|
||||||
export function parseNode(importTimestamp: UnixTimestampSeconds, nodeData: any): ParsedNode {
|
export function parseNode(importTimestamp: UnixTimestampSeconds, nodeData: any): ParsedNode {
|
||||||
if (!_.isPlainObject(nodeData)) {
|
if (!_.isPlainObject(nodeData)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -186,7 +190,7 @@ export function parseNode(importTimestamp: UnixTimestampSeconds, nodeData: any):
|
||||||
}
|
}
|
||||||
|
|
||||||
const nodeId = nodeData.nodeinfo.node_id;
|
const nodeId = nodeData.nodeinfo.node_id;
|
||||||
if (!nodeId || !_.isString(nodeId)) {
|
if (!nodeId || !isString(nodeId)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Invalid node id of type "${typeof nodeId}": ${nodeId}`
|
`Invalid node id of type "${typeof nodeId}": ${nodeId}`
|
||||||
);
|
);
|
||||||
|
@ -210,7 +214,7 @@ export function parseNode(importTimestamp: UnixTimestampSeconds, nodeData: any):
|
||||||
'Node ' + nodeId + ': Unexpected flags type: ' + (typeof nodeData.flags)
|
'Node ' + nodeId + ': Unexpected flags type: ' + (typeof nodeData.flags)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!_.isBoolean(nodeData.flags.online)) {
|
if (!isBoolean(nodeData.flags.online)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Node ' + nodeId + ': Unexpected flags.online type: ' + (typeof nodeData.flags.online)
|
'Node ' + nodeId + ': Unexpected flags.online type: ' + (typeof nodeData.flags.online)
|
||||||
);
|
);
|
||||||
|
@ -224,14 +228,14 @@ export function parseNode(importTimestamp: UnixTimestampSeconds, nodeData: any):
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let site = "<unknown-site>" as Site; // FIXME: Handle this
|
let site: Site | undefined;
|
||||||
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.site_code)) {
|
if (_.isPlainObject(nodeData.nodeinfo.system) && isSite(nodeData.nodeinfo.system.site_code)) {
|
||||||
site = nodeData.nodeinfo.system.site_code as Site;
|
site = nodeData.nodeinfo.system.site_code;
|
||||||
}
|
}
|
||||||
|
|
||||||
let domain = "<unknown-domain>" as Domain; // FIXME: Handle this
|
let domain: Domain | undefined;
|
||||||
if (_.isPlainObject(nodeData.nodeinfo.system) && _.isString(nodeData.nodeinfo.system.domain_code)) {
|
if (_.isPlainObject(nodeData.nodeinfo.system) && isDomain(nodeData.nodeinfo.system.domain_code)) {
|
||||||
domain = nodeData.nodeinfo.system.domain_code as Domain;
|
domain = nodeData.nodeinfo.system.domain_code;
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -244,7 +248,6 @@ export function parseNode(importTimestamp: UnixTimestampSeconds, nodeData: any):
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Use sparkson for JSON parsing.
|
|
||||||
export function parseNodesJson(body: string): NodesParsingResult {
|
export function parseNodesJson(body: string): NodesParsingResult {
|
||||||
Logger.tag('monitoring', 'information-retrieval').debug('Parsing nodes.json...');
|
Logger.tag('monitoring', 'information-retrieval').debug('Parsing nodes.json...');
|
||||||
|
|
||||||
|
@ -556,6 +559,7 @@ async function retrieveNodeInformationForUrls(urls: string[]): Promise<RetrieveN
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Replace any[] by type.
|
||||||
export async function getAll(restParams: RestParams): Promise<{ total: number, monitoringStates: any[] }> {
|
export async function getAll(restParams: RestParams): Promise<{ total: number, monitoringStates: any[] }> {
|
||||||
const filterFields = [
|
const filterFields = [
|
||||||
'hostname',
|
'hostname',
|
||||||
|
@ -569,7 +573,7 @@ export async function getAll(restParams: RestParams): Promise<{ total: number, m
|
||||||
|
|
||||||
const row = await db.get<{ total: number }>(
|
const row = await db.get<{ total: number }>(
|
||||||
'SELECT count(*) AS total FROM node_state WHERE ' + where.query,
|
'SELECT count(*) AS total FROM node_state WHERE ' + where.query,
|
||||||
_.concat([], where.params),
|
where.params,
|
||||||
);
|
);
|
||||||
|
|
||||||
const total = row?.total || 0;
|
const total = row?.total || 0;
|
||||||
|
@ -583,7 +587,7 @@ export async function getAll(restParams: RestParams): Promise<{ total: number, m
|
||||||
|
|
||||||
const monitoringStates = await db.all(
|
const monitoringStates = await db.all(
|
||||||
'SELECT * FROM node_state WHERE ' + filter.query,
|
'SELECT * FROM node_state WHERE ' + filter.query,
|
||||||
_.concat([], filter.params),
|
filter.params,
|
||||||
);
|
);
|
||||||
|
|
||||||
return {monitoringStates, total};
|
return {monitoringStates, total};
|
||||||
|
@ -601,7 +605,7 @@ export async function getByMacs(macs: MAC[]): Promise<Record<MAC, NodeStateData>
|
||||||
|
|
||||||
const rows = await db.all<NodeStateRow>(
|
const rows = await db.all<NodeStateRow>(
|
||||||
'SELECT * FROM node_state WHERE ' + inCondition.query,
|
'SELECT * FROM node_state WHERE ' + inCondition.query,
|
||||||
_.concat([], inCondition.params),
|
inCondition.params,
|
||||||
);
|
);
|
||||||
|
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
|
@ -611,8 +615,8 @@ export async function getByMacs(macs: MAC[]): Promise<Record<MAC, NodeStateData>
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeStateByMac[row.mac] = {
|
nodeStateByMac[row.mac] = {
|
||||||
site: row.site || "<unknown-site>" as Site, // FIXME: Handle this
|
site: row.site || undefined,
|
||||||
domain: row.domain || "<unknown-domain>" as Domain, // FIXME: Handle this
|
domain: row.domain || undefined,
|
||||||
state: onlineState,
|
state: onlineState,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -732,7 +736,7 @@ async function deleteNeverOnlineNodesBefore(deleteBefore: UnixTimestampSeconds):
|
||||||
deletionCandidates.length
|
deletionCandidates.length
|
||||||
);
|
);
|
||||||
|
|
||||||
const deletionCandidateMacs: MAC[] = _.map(deletionCandidates, node => node.mac);
|
const deletionCandidateMacs: MAC[] = deletionCandidates.map(node => node.mac);
|
||||||
const chunks: MAC[][] = _.chunk(deletionCandidateMacs, NEVER_ONLINE_NODES_DELETION_CHUNK_SIZE);
|
const chunks: MAC[][] = _.chunk(deletionCandidateMacs, NEVER_ONLINE_NODES_DELETION_CHUNK_SIZE);
|
||||||
|
|
||||||
Logger
|
Logger
|
||||||
|
@ -751,10 +755,7 @@ async function deleteNeverOnlineNodesBefore(deleteBefore: UnixTimestampSeconds):
|
||||||
' MACs for deletion.'
|
' MACs for deletion.'
|
||||||
);
|
);
|
||||||
|
|
||||||
const placeholders = _.join(
|
const placeholders = macs.map(() => '?').join(',');
|
||||||
_.map(macs, () => '?'),
|
|
||||||
','
|
|
||||||
);
|
|
||||||
|
|
||||||
const rows: { mac: MAC }[] = await db.all(
|
const rows: { mac: MAC }[] = await db.all(
|
||||||
`SELECT * FROM node_state WHERE mac IN (${placeholders})`,
|
`SELECT * FROM node_state WHERE mac IN (${placeholders})`,
|
||||||
|
@ -771,7 +772,7 @@ async function deleteNeverOnlineNodesBefore(deleteBefore: UnixTimestampSeconds):
|
||||||
' nodes found in monitoring database. Those should be skipped.'
|
' nodes found in monitoring database. Those should be skipped.'
|
||||||
);
|
);
|
||||||
|
|
||||||
const seenMacs: MAC[] = _.map(rows, (row: { mac: MAC }) => row.mac as MAC);
|
const seenMacs: MAC[] = rows.map(row => row.mac);
|
||||||
const neverSeenMacs = _.difference(macs, seenMacs);
|
const neverSeenMacs = _.difference(macs, seenMacs);
|
||||||
|
|
||||||
Logger
|
Logger
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import _ from "lodash";
|
|
||||||
import async from "async";
|
import async from "async";
|
||||||
import crypto from "crypto";
|
import crypto from "crypto";
|
||||||
import oldFs, {promises as fs} from "graceful-fs";
|
import oldFs, {promises as fs} from "graceful-fs";
|
||||||
|
@ -18,7 +17,12 @@ import {
|
||||||
EmailAddress,
|
EmailAddress,
|
||||||
FastdKey,
|
FastdKey,
|
||||||
Hostname,
|
Hostname,
|
||||||
|
isFastdKey,
|
||||||
|
isHostname,
|
||||||
|
isMAC,
|
||||||
|
isMonitoringToken,
|
||||||
isStoredNode,
|
isStoredNode,
|
||||||
|
isToken,
|
||||||
MAC,
|
MAC,
|
||||||
MailType,
|
MailType,
|
||||||
MonitoringState,
|
MonitoringState,
|
||||||
|
@ -29,6 +33,7 @@ import {
|
||||||
StoredNode,
|
StoredNode,
|
||||||
Token,
|
Token,
|
||||||
toUnixTimestampSeconds,
|
toUnixTimestampSeconds,
|
||||||
|
TypeGuard,
|
||||||
unhandledEnumField,
|
unhandledEnumField,
|
||||||
UnixTimestampMilliseconds,
|
UnixTimestampMilliseconds,
|
||||||
UnixTimestampSeconds
|
UnixTimestampSeconds
|
||||||
|
@ -38,21 +43,19 @@ import util from "util";
|
||||||
const pglob = util.promisify(glob);
|
const pglob = util.promisify(glob);
|
||||||
|
|
||||||
type NodeFilter = {
|
type NodeFilter = {
|
||||||
// TODO: Newtype
|
hostname?: Hostname,
|
||||||
hostname?: string,
|
|
||||||
mac?: MAC,
|
mac?: MAC,
|
||||||
key?: FastdKey,
|
key?: FastdKey,
|
||||||
token?: Token,
|
token?: Token,
|
||||||
monitoringToken?: MonitoringToken,
|
monitoringToken?: MonitoringToken,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Newtypes?
|
|
||||||
type NodeFilenameParsed = {
|
type NodeFilenameParsed = {
|
||||||
hostname?: string,
|
hostname?: Hostname,
|
||||||
mac?: string,
|
mac?: MAC,
|
||||||
key?: string,
|
key?: FastdKey,
|
||||||
token?: string,
|
token?: Token,
|
||||||
monitoringToken?: string,
|
monitoringToken?: MonitoringToken,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum LINE_PREFIX {
|
enum LINE_PREFIX {
|
||||||
|
@ -66,7 +69,6 @@ enum LINE_PREFIX {
|
||||||
MONITORING_TOKEN = "# Monitoring-Token: ",
|
MONITORING_TOKEN = "# Monitoring-Token: ",
|
||||||
}
|
}
|
||||||
|
|
||||||
const filenameParts = ['hostname', 'mac', 'key', 'token', 'monitoringToken'];
|
|
||||||
|
|
||||||
function generateToken<Type extends string & { readonly __tag: symbol } = never>(): Type {
|
function generateToken<Type extends string & { readonly __tag: symbol } = never>(): Type {
|
||||||
return crypto.randomBytes(8).toString('hex') as Type;
|
return crypto.randomBytes(8).toString('hex') as Type;
|
||||||
|
@ -109,16 +111,20 @@ async function findFilesInPeersPath(): Promise<string[]> {
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseNodeFilename(filename: string): NodeFilenameParsed {
|
function parseNodeFilename(filename: string): NodeFilenameParsed {
|
||||||
const parts = _.split(filename, '@', filenameParts.length);
|
const parts = filename.split('@', 5);
|
||||||
const parsed: { [key: string]: string | undefined } = {};
|
|
||||||
const zippedParts = _.zip<string, string>(filenameParts, parts);
|
function get<T>(isT: TypeGuard<T>, index: number): T | undefined {
|
||||||
_.each(zippedParts, part => {
|
const value = index >= 0 && index < parts.length ? parts[index] : undefined;
|
||||||
const key = part[0];
|
return isT(value) ? value : undefined;
|
||||||
if (key) {
|
}
|
||||||
parsed[key] = part[1];
|
|
||||||
}
|
return {
|
||||||
});
|
hostname: get(isHostname, 0),
|
||||||
return parsed;
|
mac: get(isMAC, 1),
|
||||||
|
key: get(isFastdKey, 2),
|
||||||
|
token: get(isToken, 3),
|
||||||
|
monitoringToken: get(isMonitoringToken, 4),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function isDuplicate(filter: NodeFilter, token?: Token): boolean {
|
function isDuplicate(filter: NodeFilter, token?: Token): boolean {
|
||||||
|
@ -447,7 +453,7 @@ export async function updateNode(token: Token, node: CreateOrUpdateNode): Promis
|
||||||
// monitoring just has been enabled
|
// monitoring just has been enabled
|
||||||
monitoringState = MonitoringState.PENDING;
|
monitoringState = MonitoringState.PENDING;
|
||||||
monitoringToken = generateToken<MonitoringToken>();
|
monitoringToken = generateToken<MonitoringToken>();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case MonitoringState.PENDING:
|
case MonitoringState.PENDING:
|
||||||
case MonitoringState.ACTIVE:
|
case MonitoringState.ACTIVE:
|
||||||
|
@ -461,7 +467,7 @@ export async function updateNode(token: Token, node: CreateOrUpdateNode): Promis
|
||||||
monitoringState = currentNode.monitoringState;
|
monitoringState = currentNode.monitoringState;
|
||||||
monitoringToken = nodeSecrets.monitoringToken || generateToken<MonitoringToken>();
|
monitoringToken = nodeSecrets.monitoringToken || generateToken<MonitoringToken>();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
unhandledEnumField(currentNode.monitoringState);
|
unhandledEnumField(currentNode.monitoringState);
|
||||||
|
@ -566,14 +572,14 @@ export async function fixNodeFilenames(): Promise<void> {
|
||||||
|
|
||||||
export async function findNodesModifiedBefore(timestamp: UnixTimestampSeconds): Promise<StoredNode[]> {
|
export async function findNodesModifiedBefore(timestamp: UnixTimestampSeconds): Promise<StoredNode[]> {
|
||||||
const nodes = await getAllNodes();
|
const nodes = await getAllNodes();
|
||||||
return _.filter(nodes, node => node.modifiedAt < timestamp);
|
return nodes.filter(node => node.modifiedAt < timestamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getNodeStatistics(): Promise<NodeStatistics> {
|
export async function getNodeStatistics(): Promise<NodeStatistics> {
|
||||||
const nodes = await getAllNodes();
|
const nodes = await getAllNodes();
|
||||||
|
|
||||||
const nodeStatistics: NodeStatistics = {
|
const nodeStatistics: NodeStatistics = {
|
||||||
registered: _.size(nodes),
|
registered: nodes.length,
|
||||||
withVPN: 0,
|
withVPN: 0,
|
||||||
withCoords: 0,
|
withCoords: 0,
|
||||||
monitoring: {
|
monitoring: {
|
||||||
|
|
|
@ -1,17 +1,21 @@
|
||||||
import {ArrayField, Field, RawJsonField} from "sparkson"
|
import {ArrayField, Field, RawJsonField} from "sparkson"
|
||||||
import {ClientConfig, JSONObject, Url} from "./shared";
|
import {ClientConfig, DurationMilliseconds, isString, toIsNewtype, Url} from "./shared";
|
||||||
|
|
||||||
// TODO: Replace string types by more specific types like URL, Password, etc.
|
|
||||||
|
|
||||||
export type Username = string & { readonly __tag: unique symbol };
|
export type Username = string & { readonly __tag: unique symbol };
|
||||||
|
export const isUsername = toIsNewtype(isString, "" as Username);
|
||||||
|
|
||||||
export type CleartextPassword = string & { readonly __tag: unique symbol };
|
export type CleartextPassword = string & { readonly __tag: unique symbol };
|
||||||
|
export const isCleartextPassword = toIsNewtype(isString, "" as CleartextPassword);
|
||||||
|
|
||||||
export type PasswordHash = string & { readonly __tag: unique symbol };
|
export type PasswordHash = string & { readonly __tag: unique symbol };
|
||||||
|
export const isPasswordHash = toIsNewtype(isString, "" as PasswordHash);
|
||||||
|
|
||||||
export class UsersConfig {
|
export class UsersConfig {
|
||||||
constructor(
|
constructor(
|
||||||
@Field("user") public username: Username,
|
@Field("user") public username: Username,
|
||||||
@Field("passwordHash") public passwordHash: PasswordHash,
|
@Field("passwordHash") public passwordHash: PasswordHash,
|
||||||
) {}
|
) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class LoggingConfig {
|
export class LoggingConfig {
|
||||||
|
@ -19,51 +23,79 @@ export class LoggingConfig {
|
||||||
@Field("enabled") public enabled: boolean,
|
@Field("enabled") public enabled: boolean,
|
||||||
@Field("debug") public debug: boolean,
|
@Field("debug") public debug: boolean,
|
||||||
@Field("profile") public profile: boolean,
|
@Field("profile") public profile: boolean,
|
||||||
) {}
|
) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class InternalConfig {
|
export class InternalConfig {
|
||||||
constructor(
|
constructor(
|
||||||
@Field("active") public active: boolean,
|
@Field("active") public active: boolean,
|
||||||
@ArrayField("users", UsersConfig) public users: UsersConfig[],
|
@ArrayField("users", UsersConfig) public users: UsersConfig[],
|
||||||
) {}
|
) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SMTPAuthConfig {
|
||||||
|
constructor(
|
||||||
|
@Field("user") public user: Username,
|
||||||
|
@Field("pass") public pass: CleartextPassword,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For details see: https://nodemailer.com/smtp/
|
||||||
|
export class SMTPConfig {
|
||||||
|
constructor(
|
||||||
|
@Field("host") public host?: string,
|
||||||
|
@Field("port") public port?: number,
|
||||||
|
@Field("auth") public auth?: SMTPAuthConfig,
|
||||||
|
@Field("secure") public secure?: boolean,
|
||||||
|
@Field("ignoreTLS") public ignoreTLS?: boolean,
|
||||||
|
@Field("requireTLS") public requireTLS?: boolean,
|
||||||
|
@Field("opportunisticTLS") public opportunisticTLS?: boolean,
|
||||||
|
@Field("name") public name?: string,
|
||||||
|
@Field("localAddress") public localAddress?: string,
|
||||||
|
@Field("connectionTimeout") public connectionTimeout?: DurationMilliseconds,
|
||||||
|
@Field("greetingTimeout") public greetingTimeout?: DurationMilliseconds,
|
||||||
|
@Field("socketTimeout") public socketTimeout?: DurationMilliseconds,
|
||||||
|
) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class EmailConfig {
|
export class EmailConfig {
|
||||||
constructor(
|
constructor(
|
||||||
@Field("from") public from: string,
|
@Field("from") public from: string,
|
||||||
|
@RawJsonField("smtp") public smtp: SMTPConfig,
|
||||||
// For details see: https://nodemailer.com/2-0-0-beta/setup-smtp/
|
) {
|
||||||
@RawJsonField("smtp") public smtp: JSONObject,
|
}
|
||||||
) {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class ServerMapConfig {
|
export class ServerMapConfig {
|
||||||
constructor(
|
constructor(
|
||||||
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: Url[],
|
@ArrayField("nodesJsonUrl", String) public nodesJsonUrl: Url[],
|
||||||
) {}
|
) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class ServerConfig {
|
export class ServerConfig {
|
||||||
constructor(
|
constructor(
|
||||||
@Field("baseUrl") public baseUrl: Url,
|
@Field("baseUrl") public baseUrl: Url,
|
||||||
@Field("port") public port: number,
|
@Field("port") public port: number,
|
||||||
|
|
||||||
@Field("databaseFile") public databaseFile: string,
|
@Field("databaseFile") public databaseFile: string,
|
||||||
@Field("peersPath") public peersPath: string,
|
@Field("peersPath") public peersPath: string,
|
||||||
|
|
||||||
@Field("logging") public logging: LoggingConfig,
|
@Field("logging") public logging: LoggingConfig,
|
||||||
@Field("internal") public internal: InternalConfig,
|
@Field("internal") public internal: InternalConfig,
|
||||||
@Field("email") public email: EmailConfig,
|
@Field("email") public email: EmailConfig,
|
||||||
@Field("map") public map: ServerMapConfig,
|
@Field("map") public map: ServerMapConfig,
|
||||||
|
|
||||||
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
@Field("rootPath", true, undefined, "/") public rootPath: string,
|
||||||
) {}
|
) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Config {
|
export class Config {
|
||||||
constructor(
|
constructor(
|
||||||
@Field("server") public server: ServerConfig,
|
@Field("server") public server: ServerConfig,
|
||||||
@Field("client") public client: ClientConfig,
|
@Field("client") public client: ClientConfig,
|
||||||
) {}
|
) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@ import {
|
||||||
Domain,
|
Domain,
|
||||||
DomainSpecificNodeResponse,
|
DomainSpecificNodeResponse,
|
||||||
EmailAddress,
|
EmailAddress,
|
||||||
|
isNumber,
|
||||||
JSONObject,
|
JSONObject,
|
||||||
MonitoringResponse,
|
MonitoringResponse,
|
||||||
MonitoringState,
|
MonitoringState,
|
||||||
|
@ -13,6 +14,7 @@ import {
|
||||||
Site,
|
Site,
|
||||||
StoredNode,
|
StoredNode,
|
||||||
toIsEnum,
|
toIsEnum,
|
||||||
|
toIsNewtype,
|
||||||
} from "./shared";
|
} from "./shared";
|
||||||
|
|
||||||
export * from "./config";
|
export * from "./config";
|
||||||
|
@ -21,8 +23,8 @@ export * from "./logger";
|
||||||
export * from "./shared";
|
export * from "./shared";
|
||||||
|
|
||||||
export type NodeStateData = {
|
export type NodeStateData = {
|
||||||
site: Site,
|
site?: Site,
|
||||||
domain: Domain,
|
domain?: Domain,
|
||||||
state: OnlineState,
|
state: OnlineState,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,12 +92,13 @@ export function toMonitoringResponse(node: StoredNode): MonitoringResponse {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Complete interface / class declaration.
|
|
||||||
export type NodeSecrets = {
|
export type NodeSecrets = {
|
||||||
monitoringToken?: MonitoringToken,
|
monitoringToken?: MonitoringToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
export type MailId = number & { readonly __tag: unique symbol };
|
export type MailId = number & { readonly __tag: unique symbol };
|
||||||
|
export const isMailId = toIsNewtype(isNumber, NaN as MailId);
|
||||||
|
|
||||||
export type MailData = JSONObject;
|
export type MailData = JSONObject;
|
||||||
|
|
||||||
export enum MailType {
|
export enum MailType {
|
||||||
|
@ -108,12 +111,11 @@ export enum MailType {
|
||||||
|
|
||||||
export const isMailType = toIsEnum(MailType);
|
export const isMailType = toIsEnum(MailType);
|
||||||
|
|
||||||
export interface Mail {
|
export type Mail = {
|
||||||
id: MailId,
|
id: MailId;
|
||||||
email: MailType,
|
email: MailType;
|
||||||
sender: EmailAddress,
|
sender: EmailAddress;
|
||||||
recipient: EmailAddress,
|
recipient: EmailAddress;
|
||||||
data: MailData,
|
data: MailData;
|
||||||
failures: number,
|
failures: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -85,6 +85,13 @@ export function isString(arg: unknown): arg is string {
|
||||||
return typeof arg === "string"
|
return typeof arg === "string"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function toIsNewtype<
|
||||||
|
Type extends Value & { readonly __tag: symbol },
|
||||||
|
Value,
|
||||||
|
>(isValue: TypeGuard<Value>, _example: Type): TypeGuard<Type> {
|
||||||
|
return (arg: unknown): arg is Type => isValue(arg);
|
||||||
|
}
|
||||||
|
|
||||||
export function isNumber(arg: unknown): arg is number {
|
export function isNumber(arg: unknown): arg is number {
|
||||||
return typeof arg === "number"
|
return typeof arg === "number"
|
||||||
}
|
}
|
||||||
|
@ -109,18 +116,22 @@ export function toIsEnum<E>(enumDef: E): EnumTypeGuard<E> {
|
||||||
return (arg): arg is EnumValue<E> => Object.values(enumDef).includes(arg as [keyof E]);
|
return (arg): arg is EnumValue<E> => Object.values(enumDef).includes(arg as [keyof E]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isRegExp(arg: unknown): arg is RegExp {
|
||||||
|
return isObject(arg) && arg instanceof RegExp;
|
||||||
|
}
|
||||||
|
|
||||||
export function isOptional<T>(arg: unknown, isT: TypeGuard<T>): arg is (T | undefined) {
|
export function isOptional<T>(arg: unknown, isT: TypeGuard<T>): arg is (T | undefined) {
|
||||||
return arg === undefined || isT(arg);
|
return arg === undefined || isT(arg);
|
||||||
}
|
}
|
||||||
|
|
||||||
export type Url = string & { readonly __tag: unique symbol };
|
export type Url = string & { readonly __tag: unique symbol };
|
||||||
export const isUrl = isString;
|
export const isUrl = toIsNewtype(isString, "" as Url);
|
||||||
|
|
||||||
export type Version = string & { readonly __tag: unique symbol };
|
export type Version = string & { readonly __tag: unique symbol };
|
||||||
export const isVersion = isString;
|
export const isVersion = toIsNewtype(isString, "" as Version);
|
||||||
|
|
||||||
export type EmailAddress = string & { readonly __tag: unique symbol };
|
export type EmailAddress = string & { readonly __tag: unique symbol };
|
||||||
export const isEmailAddress = isString;
|
export const isEmailAddress = toIsNewtype(isString, "" as EmailAddress);
|
||||||
|
|
||||||
export type NodeStatistics = {
|
export type NodeStatistics = {
|
||||||
registered: number;
|
registered: number;
|
||||||
|
@ -321,31 +332,33 @@ export function isClientConfig(arg: unknown): arg is ClientConfig {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Token type.
|
|
||||||
export type Token = string & { readonly __tag: unique symbol };
|
export type Token = string & { readonly __tag: unique symbol };
|
||||||
export const isToken = isString;
|
export const isToken = toIsNewtype(isString, "" as Token);
|
||||||
|
|
||||||
export type FastdKey = string & { readonly __tag: unique symbol };
|
export type FastdKey = string & { readonly __tag: unique symbol };
|
||||||
export const isFastdKey = isString;
|
export const isFastdKey = toIsNewtype(isString, "" as FastdKey);
|
||||||
|
|
||||||
export type MAC = string & { readonly __tag: unique symbol };
|
export type MAC = string & { readonly __tag: unique symbol };
|
||||||
export const isMAC = isString;
|
export const isMAC = toIsNewtype(isString, "" as MAC);
|
||||||
|
|
||||||
export type DurationSeconds = number & { readonly __tag: unique symbol };
|
export type DurationSeconds = number & { readonly __tag: unique symbol };
|
||||||
export const isDurationSeconds = isNumber;
|
export const isDurationSeconds = toIsNewtype(isNumber, NaN as DurationSeconds);
|
||||||
|
|
||||||
|
export type DurationMilliseconds = number & { readonly __tag: unique symbol };
|
||||||
|
export const isDurationMilliseconds = toIsNewtype(isNumber, NaN as DurationMilliseconds);
|
||||||
|
|
||||||
export type UnixTimestampSeconds = number & { readonly __tag: unique symbol };
|
export type UnixTimestampSeconds = number & { readonly __tag: unique symbol };
|
||||||
export const isUnixTimestampSeconds = isNumber;
|
export const isUnixTimestampSeconds = toIsNewtype(isNumber, NaN as UnixTimestampSeconds);
|
||||||
|
|
||||||
export type UnixTimestampMilliseconds = number & { readonly __tag: unique symbol };
|
export type UnixTimestampMilliseconds = number & { readonly __tag: unique symbol };
|
||||||
export const isUnixTimestampMilliseconds = isNumber;
|
export const isUnixTimestampMilliseconds = toIsNewtype(isNumber, NaN as UnixTimestampMilliseconds);
|
||||||
|
|
||||||
export function toUnixTimestampSeconds(ms: UnixTimestampMilliseconds): UnixTimestampSeconds {
|
export function toUnixTimestampSeconds(ms: UnixTimestampMilliseconds): UnixTimestampSeconds {
|
||||||
return Math.floor(ms) as UnixTimestampSeconds;
|
return Math.floor(ms) as UnixTimestampSeconds;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type MonitoringToken = string & { readonly __tag: unique symbol };
|
export type MonitoringToken = string & { readonly __tag: unique symbol };
|
||||||
export const isMonitoringToken = isString;
|
export const isMonitoringToken = toIsNewtype(isString, "" as MonitoringToken);
|
||||||
|
|
||||||
export enum MonitoringState {
|
export enum MonitoringState {
|
||||||
ACTIVE = "active",
|
ACTIVE = "active",
|
||||||
|
@ -356,15 +369,16 @@ export enum MonitoringState {
|
||||||
export const isMonitoringState = toIsEnum(MonitoringState);
|
export const isMonitoringState = toIsEnum(MonitoringState);
|
||||||
|
|
||||||
export type NodeId = string & { readonly __tag: unique symbol };
|
export type NodeId = string & { readonly __tag: unique symbol };
|
||||||
|
export const isNodeId = toIsNewtype(isString, "" as NodeId);
|
||||||
|
|
||||||
export type Hostname = string & { readonly __tag: unique symbol };
|
export type Hostname = string & { readonly __tag: unique symbol }
|
||||||
export const isHostname = isString;
|
export const isHostname = toIsNewtype(isString, "" as Hostname);
|
||||||
|
|
||||||
export type Nickname = string & { readonly __tag: unique symbol };
|
export type Nickname = string & { readonly __tag: unique symbol };
|
||||||
export const isNickname = isString;
|
export const isNickname = toIsNewtype(isString, "" as Nickname);
|
||||||
|
|
||||||
export type Coordinates = string & { readonly __tag: unique symbol };
|
export type Coordinates = string & { readonly __tag: unique symbol };
|
||||||
export const isCoordinates = isString;
|
export const isCoordinates = toIsNewtype(isString, "" as Coordinates);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Basic node data.
|
* Basic node data.
|
||||||
|
@ -473,10 +487,10 @@ export enum OnlineState {
|
||||||
export const isOnlineState = toIsEnum(OnlineState);
|
export const isOnlineState = toIsEnum(OnlineState);
|
||||||
|
|
||||||
export type Site = string & { readonly __tag: unique symbol };
|
export type Site = string & { readonly __tag: unique symbol };
|
||||||
export const isSite = isString;
|
export const isSite = toIsNewtype(isString, "" as Site);
|
||||||
|
|
||||||
export type Domain = string & { readonly __tag: unique symbol };
|
export type Domain = string & { readonly __tag: unique symbol };
|
||||||
export const isDomain = isString;
|
export const isDomain = toIsNewtype(isString, "" as Domain);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents a node in the context of a Freifunk site and domain.
|
* Represents a node in the context of a Freifunk site and domain.
|
||||||
|
|
|
@ -2,7 +2,7 @@ import _ from "lodash";
|
||||||
|
|
||||||
export function inCondition<T>(field: string, list: T[]): {query: string, params: T[]} {
|
export function inCondition<T>(field: string, list: T[]): {query: string, params: T[]} {
|
||||||
return {
|
return {
|
||||||
query: '(' + field + ' IN (' + _.join(_.times(list.length, _.constant('?')), ', ') + '))',
|
query: '(' + field + ' IN (' + _.times(list.length, () =>'?').join(', ') + '))',
|
||||||
params: list,
|
params: list,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,8 +9,12 @@ import {
|
||||||
EnumTypeGuard,
|
EnumTypeGuard,
|
||||||
EnumValue,
|
EnumValue,
|
||||||
type GenericSortField,
|
type GenericSortField,
|
||||||
isJSONObject, isNumber, isString, isUndefined,
|
isJSONObject,
|
||||||
|
isNumber,
|
||||||
|
isString,
|
||||||
|
isUndefined,
|
||||||
JSONObject,
|
JSONObject,
|
||||||
|
JSONValue,
|
||||||
SortDirection,
|
SortDirection,
|
||||||
TypeGuard
|
TypeGuard
|
||||||
} from "../types";
|
} from "../types";
|
||||||
|
@ -36,7 +40,9 @@ export type OrderByClause = { query: string, params: any[] };
|
||||||
export type LimitOffsetClause = { query: string, params: any[] };
|
export type LimitOffsetClause = { query: string, params: any[] };
|
||||||
export type FilterClause = { query: string, params: any[] };
|
export type FilterClause = { query: string, params: any[] };
|
||||||
|
|
||||||
function respond(res: Response, httpCode: number, data: any, type: string): void {
|
function respond(res: Response, httpCode: number, data: string, type: "html"): void;
|
||||||
|
function respond(res: Response, httpCode: number, data: JSONValue, type: "json"): void;
|
||||||
|
function respond(res: Response, httpCode: number, data: JSONValue, type: "html" | "json"): void {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case 'html':
|
case 'html':
|
||||||
res.writeHead(httpCode, {'Content-Type': 'text/html'});
|
res.writeHead(httpCode, {'Content-Type': 'text/html'});
|
||||||
|
@ -184,7 +190,7 @@ export function filter<E>(entities: E[], allowedFilterFields: string[], restPara
|
||||||
if (!query) {
|
if (!query) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return _.some(allowedFilterFields, (field: string): boolean => {
|
return allowedFilterFields.some((field: string): boolean => {
|
||||||
if (!query) {
|
if (!query) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -209,15 +215,15 @@ export function filter<E>(entities: E[], allowedFilterFields: string[], restPara
|
||||||
const filters = restParams.filters;
|
const filters = restParams.filters;
|
||||||
|
|
||||||
function filtersMatch(entity: Entity): boolean {
|
function filtersMatch(entity: Entity): boolean {
|
||||||
if (_.isEmpty(filters)) {
|
if (isUndefined(filters) || _.isEmpty(filters)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return _.every(filters, (value: any, key: string): boolean => {
|
return Object.entries(filters).every(([key, value]) => {
|
||||||
if (isUndefined(value)) {
|
if (isUndefined(value)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (_.startsWith(key, 'has')) {
|
if (key.startsWith('has')) {
|
||||||
const entityKey = key.substring(3, 4).toLowerCase() + key.substring(4);
|
const entityKey = key.substring(3, 4).toLowerCase() + key.substring(4);
|
||||||
return _.isEmpty(entity[entityKey]).toString() !== value;
|
return _.isEmpty(entity[entityKey]).toString() !== value;
|
||||||
}
|
}
|
||||||
|
@ -225,9 +231,7 @@ export function filter<E>(entities: E[], allowedFilterFields: string[], restPara
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return _.filter(entities, function (entity) {
|
return entities.filter(entity => queryMatches(entity) && filtersMatch(entity));
|
||||||
return queryMatches(entity) && filtersMatch(entity);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sort<T extends Record<S, any>, S extends string>(entities: T[], isSortField: TypeGuard<S>, restParams: RestParams): T[] {
|
export function sort<T extends Record<S, any>, S extends string>(entities: T[], isSortField: TypeGuard<S>, restParams: RestParams): T[] {
|
||||||
|
@ -251,8 +255,7 @@ export function sort<T extends Record<S, any>, S extends string>(entities: T[],
|
||||||
let order = 0;
|
let order = 0;
|
||||||
if (as < bs) {
|
if (as < bs) {
|
||||||
order = -1;
|
order = -1;
|
||||||
}
|
} else if (bs > as) {
|
||||||
else if (bs > as) {
|
|
||||||
order = 1;
|
order = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,7 +265,7 @@ export function sort<T extends Record<S, any>, S extends string>(entities: T[],
|
||||||
return sorted;
|
return sorted;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getPageEntities(entities: Entity[], restParams: RestParams) {
|
export function getPageEntities<Entity>(entities: Entity[], restParams: RestParams): Entity[] {
|
||||||
const page = restParams._page;
|
const page = restParams._page;
|
||||||
const perPage = restParams._perPage;
|
const perPage = restParams._perPage;
|
||||||
|
|
||||||
|
@ -291,11 +294,11 @@ export function filterClause<S>(
|
||||||
|
|
||||||
return {
|
return {
|
||||||
query: filter.query + ' ' + orderBy.query + ' ' + limitOffset.query,
|
query: filter.query + ' ' + orderBy.query + ' ' + limitOffset.query,
|
||||||
params: _.concat(filter.params, orderBy.params, limitOffset.params)
|
params: [...filter.params, ...orderBy.params, ...limitOffset.params]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function success(res: Response, data: any) {
|
export function success(res: Response, data: JSONValue) {
|
||||||
respond(res, 200, data, 'json');
|
respond(res, 200, data, 'json');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -303,7 +306,7 @@ export function successHtml(res: Response, html: string) {
|
||||||
respond(res, 200, html, 'html');
|
respond(res, 200, html, 'html');
|
||||||
}
|
}
|
||||||
|
|
||||||
export function error(res: Response, err: { data: any, type: { code: number } }) {
|
export function error(res: Response, err: { data: JSONValue, type: { code: number } }) {
|
||||||
respond(res, err.type.code, err.data, 'json');
|
respond(res, err.type.code, err.data, 'json');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import _ from "lodash"
|
import _ from "lodash"
|
||||||
import {MAC} from "../types";
|
import {isString, MAC} from "../types";
|
||||||
|
|
||||||
export function normalizeString(str: string): string {
|
export function normalizeString(str: string): string {
|
||||||
return _.isString(str) ? str.trim().replace(/\s+/g, ' ') : str;
|
return isString(str) ? str.trim().replace(/\s+/g, ' ') : str;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function normalizeMac(mac: MAC): MAC {
|
export function normalizeMac(mac: MAC): MAC {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import {DurationSeconds, UnixTimestampSeconds} from "../types";
|
import {DurationSeconds, isString, UnixTimestampSeconds} from "../types";
|
||||||
import _ from "lodash";
|
|
||||||
import moment, {Moment} from "moment";
|
import moment, {Moment} from "moment";
|
||||||
|
|
||||||
export function now(): UnixTimestampSeconds {
|
export function now(): UnixTimestampSeconds {
|
||||||
|
@ -45,7 +44,7 @@ export function formatTimestamp(timestamp: UnixTimestampSeconds): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parseTimestamp(timestamp: any): UnixTimestampSeconds | null {
|
export function parseTimestamp(timestamp: any): UnixTimestampSeconds | null {
|
||||||
if (!_.isString(timestamp)) {
|
if (!isString(timestamp)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
const parsed = moment.utc(timestamp);
|
const parsed = moment.utc(timestamp);
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import _ from "lodash"
|
|
||||||
import {config} from "../config"
|
import {config} from "../config"
|
||||||
import {MonitoringToken, Url} from "../types"
|
import {MonitoringToken, Url} from "../types"
|
||||||
|
|
||||||
|
@ -12,15 +11,10 @@ function formUrl(route: string, queryParams?: { [key: string]: string }): Url {
|
||||||
}
|
}
|
||||||
if (queryParams) {
|
if (queryParams) {
|
||||||
url += '?';
|
url += '?';
|
||||||
url += _.join(
|
url +=
|
||||||
_.map(
|
Object.entries(queryParams)
|
||||||
queryParams,
|
.map(([key, value]) => encodeURIComponent(key) + '=' + encodeURIComponent(value))
|
||||||
function (value, key) {
|
.join("&");
|
||||||
return encodeURIComponent(key) + '=' + encodeURIComponent(value);
|
|
||||||
}
|
|
||||||
),
|
|
||||||
'&'
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
return url as Url;
|
return url as Url;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
// ATTENTION: Those constraints are no longer the same file as for the client / admin interface.
|
// ATTENTION: Those constraints are no longer the same file as for the client / admin interface.
|
||||||
// Make sure changes are also reflected in /shared/validation/constraints.js.
|
// Make sure changes are also reflected in /shared/validation/constraints.js.
|
||||||
|
|
||||||
|
// noinspection RegExpSimplifiable
|
||||||
const CONSTRAINTS = {
|
const CONSTRAINTS = {
|
||||||
id:{
|
id:{
|
||||||
type: 'string',
|
type: 'string',
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import _ from "lodash";
|
|
||||||
|
|
||||||
import {parseInteger} from "../utils/strings";
|
import {parseInteger} from "../utils/strings";
|
||||||
import Logger from "../logger";
|
import Logger from "../logger";
|
||||||
|
import {isBoolean, isNumber, isObject, isOptional, isRegExp, isString, toIsArray} from "../types";
|
||||||
|
|
||||||
export interface Constraint {
|
export interface Constraint {
|
||||||
type: string,
|
type: string,
|
||||||
|
@ -18,114 +17,80 @@ export interface Constraint {
|
||||||
regex?: RegExp,
|
regex?: RegExp,
|
||||||
}
|
}
|
||||||
|
|
||||||
export type Constraints = {[key: string]: Constraint};
|
export type Constraints = { [key: string]: Constraint };
|
||||||
export type Values = {[key: string]: any};
|
export type Values = { [key: string]: any };
|
||||||
|
|
||||||
function isStringArray(arr: any): arr is string[] {
|
export function isConstraint(arg: unknown): arg is Constraint {
|
||||||
return _.isArray(arr) && _.every(arr, (val: any) => _.isString(val));
|
if (!isObject(arg)) {
|
||||||
}
|
|
||||||
|
|
||||||
export function isConstraint(val: any): val is Constraint {
|
|
||||||
if (!_.isObject(val)) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const constraint = val as {[key: string]: any};
|
const constraint = arg as Constraint;
|
||||||
|
return (
|
||||||
if (!("type" in constraint) || !_.isString(constraint.type)) {
|
isString(constraint.type) &&
|
||||||
return false;
|
// default?: any
|
||||||
}
|
isOptional(constraint.optional, isBoolean) &&
|
||||||
|
isOptional(constraint.allowed, toIsArray(isString)) &&
|
||||||
if ("optional" in constraint
|
isOptional(constraint.min, isNumber) &&
|
||||||
&& !_.isUndefined(constraint.optional)
|
isOptional(constraint.max, isNumber) &&
|
||||||
&& !_.isBoolean(constraint.optional)) {
|
isOptional(constraint.regex, isRegExp)
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("allowed" in constraint
|
|
||||||
&& !_.isUndefined(constraint.allowed)
|
|
||||||
&& !isStringArray(constraint.allowed)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("min" in constraint
|
|
||||||
&& !_.isUndefined(constraint.min)
|
|
||||||
&& !_.isNumber(constraint.min)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("max" in constraint
|
|
||||||
&& !_.isUndefined(constraint.max)
|
|
||||||
&& !_.isNumber(constraint.max)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// noinspection RedundantIfStatementJS
|
|
||||||
if ("regex" in constraint
|
|
||||||
&& !_.isUndefined(constraint.regex)
|
|
||||||
&& !_.isRegExp(constraint.regex)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isConstraints(constraints: any): constraints is Constraints {
|
|
||||||
if (!_.isObject(constraints)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return _.every(
|
|
||||||
constraints,
|
|
||||||
(constraint: any, key: any) => _.isString(key) && isConstraint(constraint)
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isConstraints(constraints: unknown): constraints is Constraints {
|
||||||
|
if (!isObject(constraints)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Object.entries(constraints).every(([key, constraint]) => isString(key) && isConstraint(constraint));
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: sanitize input for further processing as specified by constraints (correct types, trimming, etc.)
|
// TODO: sanitize input for further processing as specified by constraints (correct types, trimming, etc.)
|
||||||
|
|
||||||
function isValidBoolean(value: any): boolean {
|
function isValidBoolean(value: unknown): boolean {
|
||||||
return _.isBoolean(value) || value === 'true' || value === 'false';
|
return isBoolean(value) || value === 'true' || value === 'false';
|
||||||
}
|
}
|
||||||
|
|
||||||
function isValidNumber(constraint: Constraint, value: any): boolean {
|
function isValidNumber(constraint: Constraint, value: unknown): boolean {
|
||||||
if (_.isString(value)) {
|
if (isString(value)) {
|
||||||
value = parseInteger(value);
|
value = parseInteger(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_.isNumber(value)) {
|
if (!isNumber(value)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_.isNaN(value) || !_.isFinite(value)) {
|
if (isNaN(value) || !isFinite(value)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_.isNumber(constraint.min) && value < constraint.min) {
|
if (isNumber(constraint.min) && value < constraint.min) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// noinspection RedundantIfStatementJS
|
// noinspection RedundantIfStatementJS
|
||||||
if (_.isNumber(constraint.max) && value > constraint.max) {
|
if (isNumber(constraint.max) && value > constraint.max) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isValidEnum(constraint: Constraint, value: any): boolean {
|
function isValidEnum(constraint: Constraint, value: unknown): boolean {
|
||||||
if (!_.isString(value)) {
|
if (!isString(value)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return _.indexOf(constraint.allowed, value) >= 0;
|
const allowed = constraint.allowed || [];
|
||||||
|
return allowed.indexOf(value) >= 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isValidString(constraint: Constraint, value: any): boolean {
|
function isValidString(constraint: Constraint, value: unknown): boolean {
|
||||||
if (!constraint.regex) {
|
if (!constraint.regex) {
|
||||||
throw new Error("String constraints must have regex set: " + constraint);
|
throw new Error("String constraints must have regex set: " + constraint);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!_.isString(value)) {
|
if (!isString(value)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,7 +98,7 @@ function isValidString(constraint: Constraint, value: any): boolean {
|
||||||
return (trimmed === '' && constraint.optional) || constraint.regex.test(trimmed);
|
return (trimmed === '' && constraint.optional) || constraint.regex.test(trimmed);
|
||||||
}
|
}
|
||||||
|
|
||||||
function isValid(constraint: Constraint, acceptUndefined: boolean, value: any): boolean {
|
function isValid(constraint: Constraint, acceptUndefined: boolean, value: unknown): boolean {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
return acceptUndefined || constraint.optional === true;
|
return acceptUndefined || constraint.optional === true;
|
||||||
}
|
}
|
||||||
|
@ -174,10 +139,10 @@ function areValid(constraints: Constraints, acceptUndefined: boolean, values: Va
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function forConstraint (constraint: Constraint, acceptUndefined: boolean): (value: any) => boolean {
|
export function forConstraint(constraint: Constraint, acceptUndefined: boolean): (value: unknown) => boolean {
|
||||||
return ((value: any): boolean => isValid(constraint, acceptUndefined, value));
|
return ((value: unknown): boolean => isValid(constraint, acceptUndefined, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
export function forConstraints (constraints: Constraints, acceptUndefined: boolean): (values: Values) => boolean {
|
export function forConstraints(constraints: Constraints, acceptUndefined: boolean): (values: Values) => boolean {
|
||||||
return ((values: Values): boolean => areValid(constraints, acceptUndefined, values));
|
return ((values: Values): boolean => areValid(constraints, acceptUndefined, values));
|
||||||
}
|
}
|
||||||
|
|
26
yarn.lock
26
yarn.lock
|
@ -935,10 +935,10 @@
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.34.tgz#3b0b6a50ff797280b8d000c6281d229f9c538cef"
|
resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.34.tgz#3b0b6a50ff797280b8d000c6281d229f9c538cef"
|
||||||
integrity sha512-XImEz7XwTvDBtzlTnm8YvMqGW/ErMWBsKZ+hMTvnDIjGCKxwK5Xpc+c/oQjOauwq8M4OS11hEkpjX8rrI/eEgA==
|
integrity sha512-XImEz7XwTvDBtzlTnm8YvMqGW/ErMWBsKZ+hMTvnDIjGCKxwK5Xpc+c/oQjOauwq8M4OS11hEkpjX8rrI/eEgA==
|
||||||
|
|
||||||
"@types/node@^18.0.6":
|
"@types/node@^18.6.2":
|
||||||
version "18.0.6"
|
version "18.6.2"
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.6.tgz#0ba49ac517ad69abe7a1508bc9b3a5483df9d5d7"
|
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.6.2.tgz#ffc5f0f099d27887c8d9067b54e55090fcd54126"
|
||||||
integrity sha512-/xUq6H2aQm261exT6iZTMifUySEt4GR5KX8eYyY+C4MSNPqSh9oNIP7tz2GLKTlFaiBbgZNxffoR3CVRG+cljw==
|
integrity sha512-KcfkBq9H4PI6Vpu5B/KoPeuVDAbmi+2mDBqGPGUgoL7yXQtcWGu2vJWmmRkneWK3Rh0nIAX192Aa87AqKHYChQ==
|
||||||
|
|
||||||
"@types/nodemailer@^6.4.4":
|
"@types/nodemailer@^6.4.4":
|
||||||
version "6.4.4"
|
version "6.4.4"
|
||||||
|
@ -1962,10 +1962,10 @@ commander@^7.2.0:
|
||||||
resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7"
|
resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7"
|
||||||
integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==
|
integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==
|
||||||
|
|
||||||
commander@^9.3.0:
|
commander@^9.4.0:
|
||||||
version "9.3.0"
|
version "9.4.0"
|
||||||
resolved "https://registry.yarnpkg.com/commander/-/commander-9.3.0.tgz#f619114a5a2d2054e0d9ff1b31d5ccf89255e26b"
|
resolved "https://registry.yarnpkg.com/commander/-/commander-9.4.0.tgz#bc4a40918fefe52e22450c111ecd6b7acce6f11c"
|
||||||
integrity sha512-hv95iU5uXPbK83mjrJKuZyFM/LBAoCV/XhVGkS5Je6tl7sxr6A0ITMw5WoRV46/UaJ46Nllm3Xt7IaJhXTIkzw==
|
integrity sha512-sRPT+umqkz90UA8M1yqYfnHlZA7fF6nSphDtxeywPZ49ysjxDQybzk13CL+mXekDRG92skbcqCLVovuCusNmFw==
|
||||||
|
|
||||||
commondir@^1.0.1:
|
commondir@^1.0.1:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
|
@ -7287,10 +7287,10 @@ yargs@^17.3.1:
|
||||||
y18n "^5.0.5"
|
y18n "^5.0.5"
|
||||||
yargs-parser "^21.0.0"
|
yargs-parser "^21.0.0"
|
||||||
|
|
||||||
yarn-audit-fix@^9.3.2:
|
yarn-audit-fix@^9.3.3:
|
||||||
version "9.3.2"
|
version "9.3.3"
|
||||||
resolved "https://registry.yarnpkg.com/yarn-audit-fix/-/yarn-audit-fix-9.3.2.tgz#9268aeaf70faafd6d8b8a71d0b8c8d97d6b809ec"
|
resolved "https://registry.yarnpkg.com/yarn-audit-fix/-/yarn-audit-fix-9.3.3.tgz#05e1fab4fb6dd137db6c31006d569dc7144d41c3"
|
||||||
integrity sha512-hRPu2FRTLF5kL+fgq6NZDVgvGV7zEO6ghgfXoFmseDtDzqBIfKbGVNL+XqJ1fIil70x6XyrQwyARyyrMZtxpaw==
|
integrity sha512-EFKcjEi3GSQ3QL7dV835ovfcw8of6RbiRdyeA2n67r7dcJYctxOqqmFHQPBoEvQSDXuYH/Zwk/8J6QUT4R8E5A==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/find-cache-dir" "^3.2.1"
|
"@types/find-cache-dir" "^3.2.1"
|
||||||
"@types/fs-extra" "^9.0.13"
|
"@types/fs-extra" "^9.0.13"
|
||||||
|
@ -7299,7 +7299,7 @@ yarn-audit-fix@^9.3.2:
|
||||||
"@types/yarnpkg__lockfile" "^1.1.5"
|
"@types/yarnpkg__lockfile" "^1.1.5"
|
||||||
"@yarnpkg/lockfile" "^1.1.0"
|
"@yarnpkg/lockfile" "^1.1.0"
|
||||||
chalk "^5.0.1"
|
chalk "^5.0.1"
|
||||||
commander "^9.3.0"
|
commander "^9.4.0"
|
||||||
find-cache-dir "^3.3.2"
|
find-cache-dir "^3.3.2"
|
||||||
find-up "^6.3.0"
|
find-up "^6.3.0"
|
||||||
fs-extra "^10.1.0"
|
fs-extra "^10.1.0"
|
||||||
|
|
Loading…
Reference in a new issue