feat: Normalized server logging (#2567)

* feat: Normalize logging

* Remove scattered console.error + Sentry.captureException

* Remove mention of debug

* cleanup dev output

* Edge cases, docs

* Refactor: Move logger, metrics, sentry under 'logging' folder.
Trying to reduce the amount of things under generic 'utils'

* cleanup, last few console calls
This commit is contained in:
Tom Moor 2021-09-14 18:04:35 -07:00 committed by GitHub
parent 6c605cf720
commit 83a61b87ed
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
36 changed files with 508 additions and 264 deletions

View File

@ -120,9 +120,9 @@ WEB_CONCURRENCY=1
# especially large Word documents with embedded imagery
MAXIMUM_IMPORT_SIZE=5120000
# You may enable or disable debugging categories to increase the noisiness of
# logs. The default is a good balance
DEBUG=cache,presenters,events,emails,mailer,utils,http,server,processors
# You can remove this line if your reverse proxy already logs incoming http
# requests and this ends up being duplicative
DEBUG=http
# Comma separated list of domains to be allowed to signin to the wiki. If not
# set, all domains are allowed by default when using Google OAuth to signin

View File

@ -122,11 +122,9 @@ please refer to the [architecture document](docs/ARCHITECTURE.md) first for a hi
## Debugging
Outline uses [debug](https://www.npmjs.com/package/debug). To enable debugging output, the following categories are available:
In development Outline outputs simple logging to the console, prefixed by categories. In production it outputs JSON logs, these can be easily parsed by your preferred log ingestion pipeline.
```
DEBUG=sql,cache,presenters,events,importer,exporter,emails,mailer
```
HTTP logging is disabled by default, but can be enabled by setting the `DEBUG=http` environment variable.
## Tests

View File

@ -15,8 +15,8 @@ import ScrollToTop from "components/ScrollToTop";
import Theme from "components/Theme";
import Toasts from "components/Toasts";
import Routes from "./routes";
import { initSentry } from "./utils/sentry";
import env from "env";
import { initSentry } from "utils/sentry";
initI18n();

View File

@ -9,6 +9,8 @@ declare var process: {
env: {
[string]: string,
},
stdout: Stream,
stderr: Stream,
};
declare var EDITOR_VERSION: string;

View File

@ -6,11 +6,11 @@
"scripts": {
"clean": "rimraf build",
"build:i18n": "i18next --silent 'app/**/*.js' 'server/**/*.js' && mkdir -p ./build/shared/i18n && cp -R ./shared/i18n/locales ./build/shared/i18n",
"build:server": "babel -d ./build/server ./server && babel -d ./build/shared ./shared && cp ./server/collaboration/Procfile ./build/server/collaboration/Procfile && cp package.json ./build && ln -sf \"$(pwd)/webpack.config.dev.js\" ./build",
"build:server": "babel --quiet -d ./build/server ./server && babel --quiet -d./build/shared ./shared && cp ./server/collaboration/Procfile ./build/server/collaboration/Procfile && cp package.json ./build && ln -sf \"$(pwd)/webpack.config.dev.js\" ./build",
"build:webpack": "webpack --config webpack.config.prod.js",
"build": "yarn clean && yarn build:webpack && yarn build:i18n && yarn build:server",
"start": "node ./build/server/index.js",
"dev": "yarn concurrently --kill-others -n server,multiplayer \"node --inspect=0.0.0.0 build/server/index.js --services=websockets,admin,web,worker\" \"node build/server/index.js --services=collaboration --port=4000\"",
"dev": "yarn concurrently --kill-others -n api,collaboration -c \"blue,magenta\" \"node --inspect=0.0.0.0 build/server/index.js --services=websockets,admin,web,worker\" \"node build/server/index.js --services=collaboration --port=4000\"",
"dev:watch": "nodemon --exec \"yarn build:server && yarn build:i18n && yarn dev\" -e js --ignore build/ --ignore app/ --ignore flow-typed/",
"lint": "eslint app server shared",
"deploy": "git push heroku master",
@ -73,7 +73,6 @@
"datadog-metrics": "^0.9.3",
"date-fns": "2.22.1",
"dd-trace": "^0.32.2",
"debug": "^4.1.1",
"dotenv": "^4.0.0",
"emoji-regex": "^6.5.1",
"es6-error": "^4.1.1",
@ -175,6 +174,7 @@
"utf8": "^3.0.0",
"uuid": "^8.3.2",
"validator": "5.2.0",
"winston": "^3.3.3",
"y-indexeddb": "^9.0.6",
"y-prosemirror": "^1.0.9",
"yjs": "^13.5.12"

View File

@ -1,22 +1,19 @@
// @flow
import debug from "debug";
const log = debug("server");
export default class Logger {
import Logger from "../logging/logger";
export default class CollaborationLogger {
async onCreateDocument(data: { documentName: string }) {
log(`Created document "${data.documentName}"`);
Logger.info("collaboration", `Created document "${data.documentName}"`);
}
async onConnect(data: { documentName: string }) {
log(`New connection to "${data.documentName}"`);
Logger.info("collaboration", `New connection to "${data.documentName}"`);
}
async onDisconnect(data: { documentName: string }) {
log(`Connection to "${data.documentName}" closed`);
Logger.info("collaboration", `Connection to "${data.documentName}" closed`);
}
async onUpgrade() {
log("Upgrading connection");
Logger.info("collaboration", "Upgrading connection");
}
}

View File

@ -1,12 +1,11 @@
// @flow
import debug from "debug";
import { debounce } from "lodash";
import * as Y from "yjs";
import documentUpdater from "../commands/documentUpdater";
import Logger from "../logging/logger";
import { Document, User } from "../models";
import markdownToYDoc from "./utils/markdownToYDoc";
const log = debug("server");
const DELAY = 3000;
export default class Persistence {
@ -30,12 +29,18 @@ export default class Persistence {
if (document.state) {
const ydoc = new Y.Doc();
log(`Document ${documentId} is already in state`);
Logger.info(
"collaboration",
`Document ${documentId} is in database state`
);
Y.applyUpdate(ydoc, document.state);
return ydoc;
}
log(`Document ${documentId} is not in state, creating state from markdown`);
Logger.info(
"collaboration",
`Document ${documentId} is not in state, creating from markdown`
);
const ydoc = markdownToYDoc(document.text, fieldName);
const state = Y.encodeStateAsUpdate(ydoc);
@ -55,7 +60,7 @@ export default class Persistence {
}) => {
const [, documentId] = documentName.split(".");
log(`persisting ${documentId}`);
Logger.info("collaboration", `Persisting ${documentId}`);
await documentUpdater({
documentId,

View File

@ -1,40 +1,40 @@
// @flow
import * as metrics from "../utils/metrics";
import Metrics from "../logging/metrics";
let count = 0;
export default class Tracing {
async onCreateDocument({ documentName }: { documentName: string }) {
metrics.increment("collaboration.create_document", { documentName });
Metrics.increment("collaboration.create_document", { documentName });
// TODO: Waiting for `instance` available in payload
// metrics.gaugePerInstance(
// Metrics.gaugePerInstance(
// "collaboration.documents_count",
// instance.documents.size()
// );
}
async onAuthenticationFailed({ documentName }: { documentName: string }) {
metrics.increment("collaboration.authentication_failed", { documentName });
Metrics.increment("collaboration.authentication_failed", { documentName });
}
async onConnect({ documentName }: { documentName: string }) {
metrics.increment("collaboration.connect", { documentName });
metrics.gaugePerInstance("collaboration.connections_count", ++count);
Metrics.increment("collaboration.connect", { documentName });
Metrics.gaugePerInstance("collaboration.connections_count", ++count);
}
async onDisconnect({ documentName }: { documentName: string }) {
metrics.increment("collaboration.disconnect", { documentName });
metrics.gaugePerInstance("collaboration.connections_count", --count);
Metrics.increment("collaboration.disconnect", { documentName });
Metrics.gaugePerInstance("collaboration.connections_count", --count);
// TODO: Waiting for `instance` available in payload
// metrics.gaugePerInstance(
// Metrics.gaugePerInstance(
// "collaboration.documents_count",
// instance.documents.size()
// );
}
async onChange({ documentName }: { documentName: string }) {
metrics.increment("collaboration.change", { documentName });
Metrics.increment("collaboration.change", { documentName });
}
}

View File

@ -2,20 +2,18 @@
import fs from "fs";
import os from "os";
import path from "path";
import debug from "debug";
import File from "formidable/lib/file";
import invariant from "invariant";
import { values, keys } from "lodash";
import { v4 as uuidv4 } from "uuid";
import { parseOutlineExport } from "../../shared/utils/zip";
import { FileImportError } from "../errors";
import Logger from "../logging/logger";
import { Attachment, Event, Document, Collection, User } from "../models";
import attachmentCreator from "./attachmentCreator";
import documentCreator from "./documentCreator";
import documentImporter from "./documentImporter";
const log = debug("commands");
export default async function collectionImporter({
file,
type,
@ -155,7 +153,7 @@ export default async function collectionImporter({
continue;
}
log(`Skipped importing ${item.path}`);
Logger.info("commands", `Skipped importing ${item.path}`);
}
// All collections, documents, and attachments have been created time to

View File

@ -1,11 +1,9 @@
// @flow
import debug from "debug";
import Logger from "../logging/logger";
import { Document, Attachment } from "../models";
import { sequelize } from "../sequelize";
import parseAttachmentIds from "../utils/parseAttachmentIds";
const log = debug("commands");
export async function documentPermanentDeleter(documents: Document[]) {
const activeDocument = documents.find((doc) => !doc.deletedAt);
@ -47,9 +45,9 @@ export async function documentPermanentDeleter(documents: Document[]) {
if (attachment) {
await attachment.destroy();
log(`Attachment ${attachmentId} deleted`);
Logger.info("commands", `Attachment ${attachmentId} deleted`);
} else {
log(`Unknown attachment ${attachmentId} ignored`);
Logger.info("commands", `Unknown attachment ${attachmentId} ignored`);
}
}
}

View File

@ -1,13 +1,11 @@
// @flow
import debug from "debug";
import { MaximumTeamsError } from "../errors";
import Logger from "../logging/logger";
import { Team, AuthenticationProvider } from "../models";
import { sequelize } from "../sequelize";
import { getAllowedDomains } from "../utils/authentication";
import { generateAvatarUrl } from "../utils/avatars";
const log = debug("server");
type TeamCreatorResult = {|
team: Team,
authenticationProvider: AuthenticationProvider,
@ -111,7 +109,10 @@ export default async function teamCreator({
try {
await team.provisionSubdomain(subdomain);
} catch (err) {
log(`Provisioning subdomain failed: ${err.message}`);
Logger.error("Provisioning subdomain failed", err, {
teamId: team.id,
subdomain,
});
}
return {

View File

@ -3,7 +3,6 @@ import env from "./env"; // eslint-disable-line import/order
import "./tracing"; // must come before importing any instrumented module
import http from "http";
import debug from "debug";
import Koa from "koa";
import compress from "koa-compress";
import helmet from "koa-helmet";
@ -13,9 +12,10 @@ import Router from "koa-router";
import { uniq } from "lodash";
import stoppable from "stoppable";
import throng from "throng";
import Logger from "./logging/logger";
import { requestErrorHandler } from "./logging/sentry";
import services from "./services";
import { getArg } from "./utils/args";
import { requestErrorHandler } from "./utils/sentry";
import { checkEnv, checkMigrations } from "./utils/startup";
import { checkUpdates } from "./utils/updates";
@ -55,12 +55,12 @@ async function start(id: string, disconnect: () => void) {
const app = new Koa();
const server = stoppable(http.createServer(app.callback()));
const httpLogger = debug("http");
const log = debug("server");
const router = new Router();
// install basic middleware shared by all services
app.use(logger((str, args) => httpLogger(str)));
if ((env.DEBUG || "").includes("http")) {
app.use(logger((str, args) => Logger.info("http", str)));
}
app.use(compress());
app.use(helmet());
@ -87,7 +87,7 @@ async function start(id: string, disconnect: () => void) {
throw new Error(`Unknown service ${name}`);
}
log(`Starting ${name} service`);
Logger.info("lifecycle", `Starting ${name} service`);
const init = services[name];
await init(app, server);
}
@ -98,7 +98,7 @@ async function start(id: string, disconnect: () => void) {
server.on("listening", () => {
const address = server.address();
console.log(`\n> Listening on http://localhost:${address.port}\n`);
Logger.info("lifecycle", `Listening on http://localhost:${address.port}`);
});
server.listen(normalizedPortFlag || env.PORT || "3000");
@ -107,7 +107,7 @@ async function start(id: string, disconnect: () => void) {
process.once("SIGINT", shutdown);
function shutdown() {
console.log("\n> Stopping server");
Logger.info("lifecycle", "Stopping server");
server.stop(disconnect);
}
}

117
server/logging/logger.js Normal file
View File

@ -0,0 +1,117 @@
// @flow
import chalk from "chalk";
import winston from "winston";
import env from "../env";
import Metrics from "../logging/metrics";
import Sentry from "../logging/sentry";
const isProduction = env.NODE_ENV === "production";
type LogCategory =
| "lifecycle"
| "collaboration"
| "http"
| "commands"
| "processor"
| "email"
| "queue"
| "database"
| "utils";
type Extra = { [key: string]: any };
class Logger {
output: any;
constructor() {
this.output = winston.createLogger();
this.output.add(
new winston.transports.Console({
format: isProduction
? winston.format.json()
: winston.format.combine(
winston.format.colorize(),
winston.format.printf(
({ message, label }) =>
`${label ? chalk.bold("[" + label + "] ") : ""}${message}`
)
),
})
);
}
/**
* Log information
*
* @param category A log message category that will be prepended
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
info(label: LogCategory, message: string, extra?: Extra) {
this.output.info(message, { ...extra, label });
}
/**
* Debug information
*
* @param category A log message category that will be prepended
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
debug(label: LogCategory, message: string, extra?: Extra) {
this.output.debug(message, { ...extra, label });
}
/**
* Log a warning
*
* @param message A warning message
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
warn(message: string, extra?: Extra) {
Metrics.increment("logger.warning");
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
for (const key in extra) {
scope.setExtra(key, extra[key]);
scope.setLevel(Sentry.Severity.Warning);
}
Sentry.captureMessage(message);
});
}
if (isProduction) {
this.output.warn(message, extra);
} else {
console.warn(message, extra);
}
}
/**
* Report a runtime error
*
* @param message A description of the error
* @param error The error that occurred
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
error(message: string, error: Error, extra?: Extra) {
Metrics.increment("logger.error");
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
for (const key in extra) {
scope.setExtra(key, extra[key]);
scope.setLevel(Sentry.Severity.Error);
}
Sentry.captureException(error);
});
}
if (isProduction) {
this.output.error(message, { error: error.message, stack: error.stack });
} else {
console.error(message, { error, extra });
}
}
}
export default new Logger();

51
server/logging/metrics.js Normal file
View File

@ -0,0 +1,51 @@
// @flow
import ddMetrics from "datadog-metrics";
class Metrics {
enabled: boolean = !!process.env.DD_API_KEY;
constructor() {
if (!this.enabled) {
return;
}
ddMetrics.init({
apiKey: process.env.DD_API_KEY,
prefix: "outline.",
defaultTags: [`env:${process.env.DD_ENV || process.env.NODE_ENV}`],
});
}
gauge(key: string, value: number, tags?: string[]): void {
if (!this.enabled) {
return;
}
return ddMetrics.gauge(key, value, tags);
}
gaugePerInstance(key: string, value: number, tags?: string[] = []): void {
if (!this.enabled) {
return;
}
const instanceId = process.env.INSTANCE_ID || process.env.HEROKU_DYNO_ID;
if (!instanceId) {
throw new Error(
"INSTANCE_ID or HEROKU_DYNO_ID must be set when using DataDog"
);
}
return ddMetrics.gauge(key, value, [...tags, `instance:${instanceId}`]);
}
increment(key: string, tags?: { [string]: string }): void {
if (!this.enabled) {
return;
}
return ddMetrics.increment(key, tags);
}
}
export default new Metrics();

View File

@ -1,6 +1,4 @@
// @flow
import * as Sentry from "@sentry/node";
import debug from "debug";
import nodemailer from "nodemailer";
import Oy from "oy-vey";
import * as React from "react";
@ -31,9 +29,9 @@ import {
import { SigninEmail, signinEmailText } from "./emails/SigninEmail";
import { WelcomeEmail, welcomeEmailText } from "./emails/WelcomeEmail";
import { baseStyles } from "./emails/components/EmailLayout";
import Logger from "./logging/logger";
import { emailsQueue } from "./queues";
const log = debug("emails");
const useTestEmailService =
process.env.NODE_ENV === "development" && !process.env.SMTP_USERNAME;
@ -101,7 +99,10 @@ export class Mailer {
}
if (useTestEmailService) {
log("SMTP_USERNAME not provided, generating test account…");
Logger.info(
"email",
"SMTP_USERNAME not provided, generating test account…"
);
try {
let testAccount = await nodemailer.createTestAccount();
@ -118,7 +119,10 @@ export class Mailer {
this.transporter = nodemailer.createTransport(smtpConfig);
} catch (err) {
log(`Could not generate test account: ${err.message}`);
Logger.error(
"Couldn't generate a test account with ethereal.email",
err
);
}
}
}
@ -134,7 +138,7 @@ export class Mailer {
});
try {
log(`Sending email "${data.title}" to ${data.to}`);
Logger.info("email", `Sending email "${data.title}" to ${data.to}`);
const info = await transporter.sendMail({
from: process.env.SMTP_FROM_EMAIL,
replyTo: process.env.SMTP_REPLY_EMAIL || process.env.SMTP_FROM_EMAIL,
@ -145,12 +149,13 @@ export class Mailer {
});
if (useTestEmailService) {
log("Email Preview URL: %s", nodemailer.getTestMessageUrl(info));
Logger.info(
"email",
`Preview Url: ${nodemailer.getTestMessageUrl(info)}`
);
}
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
}
Logger.error(`Error sending email to ${data.to}`, err);
throw err; // Re-throw for queue to re-try
}
}

View File

@ -2,6 +2,7 @@
import passport from "@outlinewiki/koa-passport";
import { type Context } from "koa";
import type { AccountProvisionerResult } from "../commands/accountProvisioner";
import Logger from "../logging/logger";
import { signIn } from "../utils/authentication";
export default function createMiddleware(providerName: string) {
@ -11,7 +12,7 @@ export default function createMiddleware(providerName: string) {
{ session: false },
async (err, user, result: AccountProvisionerResult) => {
if (err) {
console.error(err);
Logger.error("Error during authentication", err);
if (err.id) {
const notice = err.id.replace(/_/g, "-");
@ -36,7 +37,10 @@ export default function createMiddleware(providerName: string) {
// Correlation ID, Timestamp in these two query string parameters.
const { error, error_description } = ctx.request.query;
if (error && error_description) {
console.error(error_description);
Logger.error(
"Error from Azure during authentication",
new Error(error_description)
);
// Display only the descriptive message to the user, log the rest
const description = error_description.split("Trace ID")[0];

View File

@ -8,6 +8,7 @@ import {
stripSubdomain,
RESERVED_SUBDOMAINS,
} from "../../shared/utils/domains";
import Logger from "../logging/logger";
import { DataTypes, sequelize, Op } from "../sequelize";
import { generateAvatarUrl } from "../utils/avatars";
import { publicS3Endpoint, uploadToS3FromUrl } from "../utils/s3";
@ -134,8 +135,7 @@ const uploadAvatar = async (model) => {
);
if (newUrl) model.avatarUrl = newUrl;
} catch (err) {
// we can try again next time
console.error(err);
Logger.error("Error uploading avatar to S3", err, { url: avatarUrl });
}
}
};

View File

@ -5,6 +5,7 @@ import JWT from "jsonwebtoken";
import { v4 as uuidv4 } from "uuid";
import { languages } from "../../shared/i18n";
import { ValidationError } from "../errors";
import Logger from "../logging/logger";
import { DataTypes, sequelize, encryptedFields, Op } from "../sequelize";
import { DEFAULT_AVATAR_HOST } from "../utils/avatars";
import { palette } from "../utils/color";
@ -195,8 +196,9 @@ const uploadAvatar = async (model) => {
);
if (newUrl) model.avatarUrl = newUrl;
} catch (err) {
// we can try again next time
console.error(err);
Logger.error("Couldn't upload user avatar image to S3", err, {
url: avatarUrl,
});
}
}
};

View File

@ -1,14 +1,12 @@
// @flow
import fs from "fs";
import debug from "debug";
import Logger from "../../logging/logger";
import mailer from "../../mailer";
import { FileOperation, Collection, Event, Team, User } from "../../models";
import type { Event as TEvent } from "../../types";
import { uploadToS3FromBuffer } from "../../utils/s3";
import { archiveCollections } from "../../utils/zip";
const log = debug("commands");
export default class ExportsProcessor {
async on(event: TEvent) {
switch (event.name) {
@ -30,7 +28,10 @@ export default class ExportsProcessor {
});
// heavy lifting of creating the zip file
log(`Archiving collections for file operation ${exportData.id}`);
Logger.info(
"processor",
`Archiving collections for file operation ${exportData.id}`
);
const filePath = await archiveCollections(collections);
let url, state;
@ -43,7 +44,10 @@ export default class ExportsProcessor {
size: stat.size,
});
log(`Uploading archive for file operation ${exportData.id}`);
Logger.info(
"processor",
`Uploading archive for file operation ${exportData.id}`
);
url = await uploadToS3FromBuffer(
readBuffer,
"application/zip",
@ -51,10 +55,15 @@ export default class ExportsProcessor {
"private"
);
log(`Upload complete for file operation ${exportData.id}`);
Logger.info(
"processor",
`Upload complete for file operation ${exportData.id}`
);
state = "complete";
} catch (e) {
log("Failed to export data", e);
} catch (error) {
Logger.error("Error exporting collection data", error, {
fileOperationId: exportData.id,
});
state = "error";
url = null;
} finally {

View File

@ -1,5 +1,5 @@
// @flow
import debug from "debug";
import Logger from "../../logging/logger";
import mailer from "../../mailer";
import {
View,
@ -12,8 +12,6 @@ import {
import { Op } from "../../sequelize";
import type { DocumentEvent, CollectionEvent, Event } from "../../types";
const log = debug("services");
export default class NotificationsProcessor {
async on(event: Event) {
switch (event.name) {
@ -98,7 +96,8 @@ export default class NotificationsProcessor {
});
if (view) {
log(
Logger.info(
"processor",
`suppressing notification to ${setting.userId} because update viewed`
);
continue;

View File

@ -1,10 +1,11 @@
// @flow
import Redis from "ioredis";
import Logger from "./logging/logger";
const options = {
maxRetriesPerRequest: 20,
retryStrategy(times) {
console.warn(`Retrying redis connection: attempt ${times}`);
Logger.warn(`Retrying redis connection: attempt ${times}`);
return Math.min(times * 100, 3000);
},
// support Heroku Redis, see:

View File

@ -1,14 +1,13 @@
// @flow
import { subDays } from "date-fns";
import debug from "debug";
import Router from "koa-router";
import { documentPermanentDeleter } from "../../commands/documentPermanentDeleter";
import { AuthenticationError } from "../../errors";
import Logger from "../../logging/logger";
import { Document, FileOperation } from "../../models";
import { Op } from "../../sequelize";
const router = new Router();
const log = debug("utils");
router.post("utils.gc", async (ctx) => {
const { token, limit = 500 } = ctx.body;
@ -17,7 +16,10 @@ router.post("utils.gc", async (ctx) => {
throw new AuthenticationError("Invalid secret token");
}
log(`Permanently destroying upto ${limit} documents older than 30 days…`);
Logger.info(
"utils",
`Permanently destroying upto ${limit} documents older than 30 days…`
);
const documents = await Document.scope("withUnpublished").findAll({
attributes: ["id", "teamId", "text", "deletedAt"],
@ -32,9 +34,12 @@ router.post("utils.gc", async (ctx) => {
const countDeletedDocument = await documentPermanentDeleter(documents);
log(`Destroyed ${countDeletedDocument} documents`);
Logger.info("utils", `Destroyed ${countDeletedDocument} documents`);
log(`Expiring all the collection export older than 30 days…`);
Logger.info(
"utils",
`Expiring all the collection export older than 30 days…`
);
const exports = await FileOperation.unscoped().findAll({
where: {

View File

@ -1,7 +1,6 @@
// @flow
import passport from "@outlinewiki/koa-passport";
import { addMonths } from "date-fns";
import debug from "debug";
import Koa from "koa";
import bodyParser from "koa-body";
import Router from "koa-router";
@ -11,7 +10,6 @@ import validation from "../../middlewares/validation";
import { Collection, Team, View } from "../../models";
import providers from "./providers";
const log = debug("server");
const app = new Koa();
const router = new Router();
@ -21,7 +19,6 @@ router.use(passport.initialize());
providers.forEach((provider) => {
if (provider.enabled) {
router.use("/", provider.router.routes());
log(`loaded ${provider.name} auth provider`);
}
});

View File

@ -1,6 +1,6 @@
// @flow
import "./bootstrap";
import debug from "debug";
import Logger from "../logging/logger";
import {
Team,
User,
@ -9,14 +9,13 @@ import {
} from "../models";
import { Op } from "../sequelize";
const log = debug("server");
const cache = {};
let page = 0;
let limit = 100;
export default async function main(exit = false) {
const work = async (page: number) => {
log(`Migrating authentication data… page ${page}`);
Logger.info("database", "Starting authentication migration");
const users = await User.findAll({
limit,
@ -42,13 +41,15 @@ export default async function main(exit = false) {
const provider = user.service;
const providerId = user.team[`${provider}Id`];
if (!providerId) {
console.error(
`user ${user.id} has serviceId ${user.serviceId}, but team ${provider}Id missing`
Logger.info(
"database",
`User ${user.id} has serviceId ${user.serviceId}, but team ${provider}Id missing`
);
continue;
}
if (providerId.startsWith("transferred")) {
console.log(
Logger.info(
"database",
`skipping previously transferred ${user.team.name} (${user.team.id})`
);
continue;
@ -78,7 +79,8 @@ export default async function main(exit = false) {
userId: user.id,
});
} catch (err) {
console.error(
Logger.info(
"database",
`serviceId ${user.serviceId} exists, for user ${user.id}`
);
continue;
@ -91,7 +93,7 @@ export default async function main(exit = false) {
await work(page);
if (exit) {
log("Migration complete");
Logger.info("database", "Migration complete");
process.exit(0);
}
}

View File

@ -1,7 +1,7 @@
// @flow
import debug from "debug";
import Sequelize from "sequelize";
import EncryptedField from "sequelize-encrypted";
import Logger from "./logging/logger";
const isProduction = process.env.NODE_ENV === "production";
const isSSLDisabled = process.env.PGSSLMODE === "disable";
@ -15,7 +15,7 @@ export const Op = Sequelize.Op;
export const sequelize = new Sequelize(
process.env.DATABASE_URL || process.env.DATABASE_CONNECTION_POOL_URL,
{
logging: debug("sql"),
logging: (msg) => Logger.debug("database", msg),
typeValidation: true,
dialectOptions: {
ssl:

View File

@ -10,6 +10,7 @@ import mount from "koa-mount";
import enforceHttps from "koa-sslify";
import emails from "../emails";
import env from "../env";
import Logger from "../logging/logger";
import routes from "../routes";
import api from "../routes/api";
import auth from "../routes/auth";
@ -44,7 +45,7 @@ export default function init(app: Koa = new Koa(), server?: http.Server): Koa {
})
);
} else {
console.warn("Enforced https was disabled with FORCE_HTTPS env variable");
Logger.warn("Enforced https was disabled with FORCE_HTTPS env variable");
}
// trust header fields set by our proxy. eg X-Forwarded-For
@ -90,7 +91,7 @@ export default function init(app: Koa = new Koa(), server?: http.Server): Koa {
app.use(
convert(
hotMiddleware(compile, {
log: console.log, // eslint-disable-line
log: (...args) => Logger.info("lifecycle", ...args),
path: "/__webpack_hmr",
heartbeat: 10 * 1000,
})

View File

@ -4,15 +4,14 @@ import Koa from "koa";
import IO from "socket.io";
import socketRedisAdapter from "socket.io-redis";
import SocketAuth from "socketio-auth";
import env from "../env";
import Logger from "../logging/logger";
import Metrics from "../logging/metrics";
import { Document, Collection, View } from "../models";
import policy from "../policies";
import { websocketsQueue } from "../queues";
import WebsocketsProcessor from "../queues/processors/websockets";
import { client, subscriber } from "../redis";
import { getUserForJWT } from "../utils/jwt";
import * as metrics from "../utils/metrics";
import Sentry from "../utils/sentry";
const { can } = policy;
@ -37,23 +36,23 @@ export default function init(app: Koa, server: http.Server) {
io.of("/").adapter.on("error", (err) => {
if (err.name === "MaxRetriesPerRequestError") {
console.error(`Redis error: ${err.message}. Shutting down now.`);
Logger.error("Redis maximum retries exceeded in socketio adapter", err);
throw err;
} else {
console.error(`Redis error: ${err.message}`);
Logger.error("Redis error in socketio adapter", err);
}
});
io.on("connection", (socket) => {
metrics.increment("websockets.connected");
metrics.gaugePerInstance(
Metrics.increment("websockets.connected");
Metrics.gaugePerInstance(
"websockets.count",
socket.client.conn.server.clientsCount
);
socket.on("disconnect", () => {
metrics.increment("websockets.disconnected");
metrics.gaugePerInstance(
Metrics.increment("websockets.disconnected");
Metrics.gaugePerInstance(
"websockets.count",
socket.client.conn.server.clientsCount
);
@ -106,7 +105,7 @@ export default function init(app: Koa, server: http.Server) {
if (can(user, "read", collection)) {
socket.join(`collection-${event.collectionId}`, () => {
metrics.increment("websockets.collections.join");
Metrics.increment("websockets.collections.join");
});
}
}
@ -127,7 +126,7 @@ export default function init(app: Koa, server: http.Server) {
);
socket.join(room, () => {
metrics.increment("websockets.documents.join");
Metrics.increment("websockets.documents.join");
// let everyone else in the room know that a new user joined
io.to(room).emit("user.join", {
@ -139,14 +138,9 @@ export default function init(app: Koa, server: http.Server) {
// let this user know who else is already present in the room
io.in(room).clients(async (err, sockets) => {
if (err) {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("clients", sockets);
Sentry.captureException(err);
});
} else {
console.error(err);
}
Logger.error("Error getting clients for room", err, {
sockets,
});
return;
}
@ -173,13 +167,13 @@ export default function init(app: Koa, server: http.Server) {
socket.on("leave", (event) => {
if (event.collectionId) {
socket.leave(`collection-${event.collectionId}`, () => {
metrics.increment("websockets.collections.leave");
Metrics.increment("websockets.collections.leave");
});
}
if (event.documentId) {
const room = `document-${event.documentId}`;
socket.leave(room, () => {
metrics.increment("websockets.documents.leave");
Metrics.increment("websockets.documents.leave");
io.to(room).emit("user.leave", {
userId: user.id,
@ -204,7 +198,7 @@ export default function init(app: Koa, server: http.Server) {
});
socket.on("presence", async (event) => {
metrics.increment("websockets.presence");
Metrics.increment("websockets.presence");
const room = `document-${event.documentId}`;
@ -232,14 +226,7 @@ export default function init(app: Koa, server: http.Server) {
websocketsQueue.process(async function websocketEventsProcessor(job) {
const event = job.data;
websockets.on(event, io).catch((error) => {
if (env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
Logger.error("Error processing websocket event", error, { event });
});
});
}

View File

@ -1,7 +1,7 @@
// @flow
import http from "http";
import debug from "debug";
import Koa from "koa";
import Logger from "../logging/logger";
import {
globalEventQueue,
processorEventQueue,
@ -16,9 +16,6 @@ import Imports from "../queues/processors/imports";
import Notifications from "../queues/processors/notifications";
import Revisions from "../queues/processors/revisions";
import Slack from "../queues/processors/slack";
import Sentry from "../utils/sentry";
const log = debug("queue");
const EmailsProcessor = new Emails();
@ -46,24 +43,22 @@ export default function init(app: Koa, server?: http.Server) {
const event = job.data;
const processor = eventProcessors[event.service];
if (!processor) {
console.warn(
`Received event for processor that isn't registered (${event.service})`
);
Logger.warn(`Received event for processor that isn't registered`, event);
return;
}
if (processor.on) {
log(`${event.service} processing ${event.name}`);
Logger.info("processor", `${event.service} processing ${event.name}`, {
name: event.name,
modelId: event.modelId,
});
processor.on(event).catch((error) => {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
Logger.error(
`Error processing ${event.name} in ${event.service}`,
error,
event
);
});
}
});
@ -72,14 +67,11 @@ export default function init(app: Koa, server?: http.Server) {
const event = job.data;
EmailsProcessor.on(event).catch((error) => {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
Logger.error(
`Error processing ${event.name} in emails processor`,
error,
event
);
});
});
}

View File

@ -1,9 +1,9 @@
// @flow
import querystring from "querystring";
import * as Sentry from "@sentry/node";
import { addMonths } from "date-fns";
import { type Context } from "koa";
import { pick } from "lodash";
import Logger from "../logging/logger";
import { User, Event, Team, Collection, View } from "../models";
import { getCookieDomain } from "../utils/domains";
@ -37,8 +37,8 @@ export async function signIn(
["ref", "utm_content", "utm_medium", "utm_source", "utm_campaign"]
);
await team.update({ signupQueryParams });
} catch (err) {
Sentry.captureException(err);
} catch (error) {
Logger.error(`Error persisting signup query params`, error);
}
}
}

View File

@ -1,45 +0,0 @@
// @flow
import metrics from "datadog-metrics";
if (process.env.DD_API_KEY) {
metrics.init({
apiKey: process.env.DD_API_KEY,
prefix: "outline.",
defaultTags: [`env:${process.env.DD_ENV || process.env.NODE_ENV}`],
});
}
export function gauge(key: string, value: number, tags?: string[]): void {
if (!process.env.DD_API_KEY) {
return;
}
return metrics.gauge(key, value, tags);
}
export function gaugePerInstance(
key: string,
value: number,
tags?: string[] = []
): void {
if (!process.env.DD_API_KEY) {
return;
}
const instanceId = process.env.INSTANCE_ID || process.env.HEROKU_DYNO_ID;
if (!instanceId) {
throw new Error(
"INSTANCE_ID or HEROKU_DYNO_ID must be set when using Datadog"
);
}
return metrics.gauge(key, value, [...tags, `instance:${instanceId}`]);
}
export function increment(key: string, tags?: { [string]: string }): void {
if (!process.env.DD_API_KEY) {
return;
}
return metrics.increment(key, tags);
}

View File

@ -2,9 +2,8 @@
import Queue from "bull";
import Redis from "ioredis";
import { snakeCase } from "lodash";
import Metrics from "../logging/metrics";
import { client, subscriber } from "../redis";
import * as metrics from "../utils/metrics";
import Sentry from "./sentry";
export function createQueue(name: string) {
const prefix = `queue.${snakeCase(name)}`;
@ -26,29 +25,24 @@ export function createQueue(name: string) {
});
queue.on("stalled", () => {
metrics.increment(`${prefix}.jobs.stalled`);
Metrics.increment(`${prefix}.jobs.stalled`);
});
queue.on("completed", () => {
metrics.increment(`${prefix}.jobs.completed`);
Metrics.increment(`${prefix}.jobs.completed`);
});
queue.on("error", (err) => {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
} else {
console.error(err);
}
metrics.increment(`${prefix}.jobs.errored`);
Metrics.increment(`${prefix}.jobs.errored`);
});
queue.on("failed", () => {
metrics.increment(`${prefix}.jobs.failed`);
Metrics.increment(`${prefix}.jobs.failed`);
});
setInterval(async () => {
metrics.gauge(`${prefix}.count`, await queue.count());
metrics.gauge(`${prefix}.delayed_count`, await queue.getDelayedCount());
Metrics.gauge(`${prefix}.count`, await queue.count());
Metrics.gauge(`${prefix}.delayed_count`, await queue.getDelayedCount());
}, 5 * 1000);
return queue;

View File

@ -1,10 +1,10 @@
// @flow
import crypto from "crypto";
import * as Sentry from "@sentry/node";
import AWS from "aws-sdk";
import { addHours, format } from "date-fns";
import fetch from "fetch-with-proxy";
import { v4 as uuidv4 } from "uuid";
import Logger from "../logging/logger";
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
@ -147,15 +147,11 @@ export const uploadToS3FromUrl = async (
const endpoint = publicS3Endpoint(true);
return `${endpoint}/${key}`;
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err, {
extra: {
url,
},
});
} else {
throw err;
}
Logger.error("Error uploading to S3 from URL", err, {
url,
key,
acl,
});
}
};
@ -198,10 +194,8 @@ export const getFileByKey = async (key: string) => {
const data = await s3.getObject(params).promise();
return data.Body;
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
} else {
throw err;
}
Logger.error("Error getting file from S3 by key", err, {
key,
});
}
};

View File

@ -1,5 +1,6 @@
// @flow
import chalk from "chalk";
import Logger from "../logging/logger";
import { Team, AuthenticationProvider } from "../models";
export async function checkMigrations() {
@ -11,12 +12,14 @@ export async function checkMigrations() {
const providers = await AuthenticationProvider.count();
if (teams && !providers) {
console.error(`
Logger.warn(
`
This version of Outline cannot start until a data migration is complete.
Backup your database, run the database migrations and the following script:
$ node ./build/server/scripts/20210226232041-migrate-authentication.js
`);
`
);
process.exit(1);
}
}
@ -92,18 +95,16 @@ export function checkEnv() {
}
if (errors.length) {
console.log(
chalk.bold.red(
"\n\nThe server could not start, please fix the following configuration errors and try again:\n"
)
Logger.warn(
"\n\nThe server could not start, please fix the following configuration errors and try again:\n" +
errors.map((e) => `- ${e}`).join("\n")
);
errors.map((text) => console.log(` - ${text}`));
console.log("\n");
process.exit(1);
}
if (process.env.NODE_ENV === "production") {
console.log(
Logger.info(
"lifecycle",
chalk.green(
`
Is your team enjoying Outline? Consider supporting future development by sponsoring the project:\n\nhttps://github.com/sponsors/outline
@ -111,12 +112,12 @@ Is your team enjoying Outline? Consider supporting future development by sponsor
)
);
} else if (process.env.NODE_ENV === "development") {
console.log(
chalk.yellow(
`\nRunning Outline in development mode. To run Outline in production mode set the ${chalk.bold(
"NODE_ENV"
)} env variable to "production"\n`
)
Logger.warn(
`Running Outline in ${chalk.bold(
"development mode"
)}. To run Outline in production mode set the ${chalk.bold(
"NODE_ENV"
)} env variable to "production"`
);
}
}

View File

@ -1,8 +1,8 @@
// @flow
import fs from "fs";
import * as Sentry from "@sentry/node";
import JSZip from "jszip";
import tmp from "tmp";
import Logger from "../logging/logger";
import { Attachment, Collection, Document } from "../models";
import { serializeFilename } from "./fs";
import { getFileByKey } from "./s3";
@ -47,11 +47,9 @@ async function addImageToArchive(zip, key) {
const img = await getFileByKey(key);
zip.file(key, img, { createFolders: true });
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
}
// error during file retrieval
console.error(err);
Logger.error("Error loading image attachment from S3", err, {
key,
});
}
}

147
yarn.lock
View File

@ -1070,6 +1070,15 @@
exec-sh "^0.3.2"
minimist "^1.2.0"
"@dabh/diagnostics@^2.0.2":
version "2.0.2"
resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.2.tgz#290d08f7b381b8f94607dc8f471a12c675f9db31"
integrity sha512-+A1YivoVDNNVCdfozHSR8v/jyuuLTMXwjWuxPFlFlUapXoGc+Gj9mDlTDDfrwl7rXCl2tNZ0kE8sIBO6YOn96Q==
dependencies:
colorspace "1.1.x"
enabled "2.0.x"
kuler "^2.0.0"
"@emotion/is-prop-valid@^0.8.2", "@emotion/is-prop-valid@^0.8.8":
version "0.8.8"
resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz#db28b1c4368a259b60a97311d6a952d4fd01ac1a"
@ -3248,6 +3257,11 @@ async@0.9.x:
resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d"
integrity sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=
async@^3.1.0:
version "3.2.1"
resolved "https://registry.yarnpkg.com/async/-/async-3.2.1.tgz#d3274ec66d107a47476a4c49136aacdb00665fc8"
integrity sha512-XdD5lRO/87udXCMC9meWdYiR+Nq6ZjUfXidViUZGu2F1MO4T3XwZ1et0hb2++BgLfhyJwy44BGB/yx80ABx8hg==
asynckit@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
@ -4468,7 +4482,7 @@ collection-visit@^1.0.0:
map-visit "^1.0.0"
object-visit "^1.0.0"
color-convert@^1.9.0:
color-convert@^1.9.0, color-convert@^1.9.1:
version "1.9.3"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
@ -4487,17 +4501,33 @@ color-name@1.1.3:
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
color-name@~1.1.4:
color-name@^1.0.0, color-name@~1.1.4:
version "1.1.4"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
color-string@^1.5.2:
version "1.6.0"
resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.6.0.tgz#c3915f61fe267672cb7e1e064c9d692219f6c312"
integrity sha512-c/hGS+kRWJutUBEngKKmk4iH3sD59MBkoxVapS/0wgpCz2u7XsNloxknyvBhzwEs1IbV36D9PwqLPJ2DTu3vMA==
dependencies:
color-name "^1.0.0"
simple-swizzle "^0.2.2"
color@3.0.x:
version "3.0.0"
resolved "https://registry.yarnpkg.com/color/-/color-3.0.0.tgz#d920b4328d534a3ac8295d68f7bd4ba6c427be9a"
integrity sha512-jCpd5+s0s0t7p3pHQKpnJ0TpQKKdleP71LWcA0aqiljpiuAkOSUFN/dyH8ZwF0hRmFlrIuRhufds1QyEP9EB+w==
dependencies:
color-convert "^1.9.1"
color-string "^1.5.2"
colorette@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94"
integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w==
colors@^1.4.0:
colors@^1.2.1, colors@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78"
integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==
@ -4507,6 +4537,14 @@ colors@~1.2.0-rc0:
resolved "https://registry.yarnpkg.com/colors/-/colors-1.2.5.tgz#89c7ad9a374bc030df8013241f68136ed8835afc"
integrity sha512-erNRLao/Y3Fv54qUa0LBB+//Uf3YwMUmdJinN20yMXm9zdKKqH9wt7R9IIVZ+K7ShzfpLV/Zg8+VyrBJYB4lpg==
colorspace@1.1.x:
version "1.1.2"
resolved "https://registry.yarnpkg.com/colorspace/-/colorspace-1.1.2.tgz#e0128950d082b86a2168580796a0aa5d6c68d8c5"
integrity sha512-vt+OoIP2d76xLhjwbBaucYlNSpPsrJWPlBTtwCpQKIu6/CSMutyzX93O/Do0qzpH3YoHEes8YEFXyZ797rEhzQ==
dependencies:
color "3.0.x"
text-hex "1.0.x"
combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6:
version "1.0.8"
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
@ -5597,6 +5635,11 @@ emojis-list@^3.0.0:
resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78"
integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==
enabled@2.0.x:
version "2.0.0"
resolved "https://registry.yarnpkg.com/enabled/-/enabled-2.0.0.tgz#f9dd92ec2d6f4bbc0d5d1e64e21d61cd4665e7c2"
integrity sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==
encodeurl@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
@ -6307,6 +6350,11 @@ fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6:
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=
fast-safe-stringify@^2.0.4:
version "2.1.1"
resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884"
integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==
fb-watchman@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85"
@ -6319,6 +6367,11 @@ feature-policy@0.3.0:
resolved "https://registry.yarnpkg.com/feature-policy/-/feature-policy-0.3.0.tgz#7430e8e54a40da01156ca30aaec1a381ce536069"
integrity sha512-ZtijOTFN7TzCujt1fnNhfWPFPSHeZkesff9AXZj+UEjYBynWNUIYpC87Ve4wHzyexQsImicLu7WsC2LHq7/xrQ==
fecha@^4.2.0:
version "4.2.1"
resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.1.tgz#0a83ad8f86ef62a091e22bb5a039cd03d23eecce"
integrity sha512-MMMQ0ludy/nBs1/o0zVOiKTpG7qMbonKUzjJgQFEuvq6INZ1OraKPRAWkBq5vlKLOUMpmNYG1JoN3oDPUQ9m3Q==
fetch-retry@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/fetch-retry/-/fetch-retry-4.1.1.tgz#fafe0bb22b54f4d0a9c788dff6dd7f8673ca63f3"
@ -6521,6 +6574,11 @@ flush-write-stream@^1.0.0, flush-write-stream@^1.0.2:
inherits "^2.0.3"
readable-stream "^2.3.6"
fn.name@1.x.x:
version "1.1.0"
resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc"
integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==
focus-visible@^5.1.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/focus-visible/-/focus-visible-5.2.0.tgz#3a9e41fccf587bd25dcc2ef045508284f0a4d6b3"
@ -7693,6 +7751,11 @@ is-arrayish@^0.2.1:
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
is-arrayish@^0.3.1:
version "0.3.2"
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03"
integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==
is-bigint@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.2.tgz#ffb381442503235ad245ea89e45b3dbff040ee5a"
@ -9089,6 +9152,11 @@ koalas@^1.0.2:
resolved "https://registry.yarnpkg.com/koalas/-/koalas-1.0.2.tgz#318433f074235db78fae5661a02a8ca53ee295cd"
integrity sha1-MYQz8HQjXbePrlZhoCqMpT7ilc0=
kuler@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3"
integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==
language-subtag-registry@~0.3.2:
version "0.3.21"
resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz#04ac218bea46f04cb039084602c6da9e788dd45a"
@ -9515,6 +9583,17 @@ lodash@^4.0.1, lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.1
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
logform@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/logform/-/logform-2.2.0.tgz#40f036d19161fc76b68ab50fdc7fe495544492f2"
integrity sha512-N0qPlqfypFx7UHNn4B3lzS/b0uLqt2hmuoa+PpuXNYgozdJYAyauF5Ky0BWVjrxDlMWiT3qN4zPq3vVAfZy7Yg==
dependencies:
colors "^1.2.1"
fast-safe-stringify "^2.0.4"
fecha "^4.2.0"
ms "^2.1.1"
triple-beam "^1.3.0"
long@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28"
@ -10034,9 +10113,9 @@ mz@2, mz@^2.4.0, mz@^2.6.0:
thenify-all "^1.0.0"
nan@^2.12.1:
version "2.14.2"
resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.2.tgz#f5376400695168f4cc694ac9393d0c9585eeea19"
integrity sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==
version "2.15.0"
resolved "https://registry.yarnpkg.com/nan/-/nan-2.15.0.tgz#3f34a473ff18e15c1b5626b62903b5ad6e665fee"
integrity sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ==
nanomatch@^1.2.9:
version "1.2.13"
@ -10423,6 +10502,13 @@ once@^1.3.0, once@^1.3.1, once@^1.3.2, once@^1.4.0:
dependencies:
wrappy "1"
one-time@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/one-time/-/one-time-1.0.0.tgz#e06bc174aed214ed58edede573b433bbf827cb45"
integrity sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==
dependencies:
fn.name "1.x.x"
onetime@^5.1.0:
version "5.1.2"
resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e"
@ -11866,7 +11952,7 @@ read-pkg@^5.2.0:
parse-json "^5.0.0"
type-fest "^0.6.0"
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.6:
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@^2.3.7, readable-stream@~2.3.6:
version "2.3.7"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
@ -11889,7 +11975,7 @@ readable-stream@1.1.x:
isarray "0.0.1"
string_decoder "~0.10.x"
readable-stream@^3.0.0, readable-stream@^3.1.1, readable-stream@^3.6.0:
readable-stream@^3.0.0, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
@ -12796,6 +12882,13 @@ signal-exit@^3.0.0, signal-exit@^3.0.2:
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c"
integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==
simple-swizzle@^0.2.2:
version "0.2.2"
resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a"
integrity sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=
dependencies:
is-arrayish "^0.3.1"
sisteransi@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed"
@ -13143,6 +13236,11 @@ ssri@^8.0.0:
dependencies:
minipass "^3.1.1"
stack-trace@0.0.x:
version "0.0.10"
resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0"
integrity sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=
stack-utils@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.2.tgz#5cf48b4557becb4638d0bc4f21d23f5d19586593"
@ -13623,6 +13721,11 @@ test-exclude@^6.0.0:
glob "^7.1.4"
minimatch "^3.0.4"
text-hex@1.0.x:
version "1.0.0"
resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5"
integrity sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==
text-table@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
@ -13894,6 +13997,11 @@ tree-kill@^1.2.2:
resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc"
integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==
triple-beam@^1.2.0, triple-beam@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9"
integrity sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==
tsconfig-paths@^3.9.0:
version "3.9.0"
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz#098547a6c4448807e8fcb8eae081064ee9a3c90b"
@ -14788,6 +14896,29 @@ windows-release@^3.1.0:
dependencies:
execa "^1.0.0"
winston-transport@^4.4.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.4.0.tgz#17af518daa690d5b2ecccaa7acf7b20ca7925e59"
integrity sha512-Lc7/p3GtqtqPBYYtS6KCN3c77/2QCev51DvcJKbkFPQNoj1sinkGwLGFDxkXY9J6p9+EPnYs+D90uwbnaiURTw==
dependencies:
readable-stream "^2.3.7"
triple-beam "^1.2.0"
winston@^3.3.3:
version "3.3.3"
resolved "https://registry.yarnpkg.com/winston/-/winston-3.3.3.tgz#ae6172042cafb29786afa3d09c8ff833ab7c9170"
integrity sha512-oEXTISQnC8VlSAKf1KYSSd7J6IWuRPQqDdo8eoRNaYKLvwSb5+79Z3Yi1lrl6KDpU6/VWaxpakDAtb1oQ4n9aw==
dependencies:
"@dabh/diagnostics" "^2.0.2"
async "^3.1.0"
is-stream "^2.0.0"
logform "^2.2.0"
one-time "^1.0.0"
readable-stream "^3.4.0"
stack-trace "0.0.x"
triple-beam "^1.3.0"
winston-transport "^4.4.0"
wkx@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/wkx/-/wkx-0.5.0.tgz#c6c37019acf40e517cc6b94657a25a3d4aa33e8c"