feat: Normalized server logging (#2567)

* feat: Normalize logging

* Remove scattered console.error + Sentry.captureException

* Remove mention of debug

* cleanup dev output

* Edge cases, docs

* Refactor: Move logger, metrics, sentry under 'logging' folder.
Trying to reduce the amount of things under generic 'utils'

* cleanup, last few console calls
This commit is contained in:
Tom Moor
2021-09-14 18:04:35 -07:00
committed by GitHub
parent 6c605cf720
commit 83a61b87ed
36 changed files with 508 additions and 264 deletions

View File

@ -1,22 +1,19 @@
// @flow
import debug from "debug";
const log = debug("server");
export default class Logger {
import Logger from "../logging/logger";
export default class CollaborationLogger {
async onCreateDocument(data: { documentName: string }) {
log(`Created document "${data.documentName}"`);
Logger.info("collaboration", `Created document "${data.documentName}"`);
}
async onConnect(data: { documentName: string }) {
log(`New connection to "${data.documentName}"`);
Logger.info("collaboration", `New connection to "${data.documentName}"`);
}
async onDisconnect(data: { documentName: string }) {
log(`Connection to "${data.documentName}" closed`);
Logger.info("collaboration", `Connection to "${data.documentName}" closed`);
}
async onUpgrade() {
log("Upgrading connection");
Logger.info("collaboration", "Upgrading connection");
}
}

View File

@ -1,12 +1,11 @@
// @flow
import debug from "debug";
import { debounce } from "lodash";
import * as Y from "yjs";
import documentUpdater from "../commands/documentUpdater";
import Logger from "../logging/logger";
import { Document, User } from "../models";
import markdownToYDoc from "./utils/markdownToYDoc";
const log = debug("server");
const DELAY = 3000;
export default class Persistence {
@ -30,12 +29,18 @@ export default class Persistence {
if (document.state) {
const ydoc = new Y.Doc();
log(`Document ${documentId} is already in state`);
Logger.info(
"collaboration",
`Document ${documentId} is in database state`
);
Y.applyUpdate(ydoc, document.state);
return ydoc;
}
log(`Document ${documentId} is not in state, creating state from markdown`);
Logger.info(
"collaboration",
`Document ${documentId} is not in state, creating from markdown`
);
const ydoc = markdownToYDoc(document.text, fieldName);
const state = Y.encodeStateAsUpdate(ydoc);
@ -55,7 +60,7 @@ export default class Persistence {
}) => {
const [, documentId] = documentName.split(".");
log(`persisting ${documentId}`);
Logger.info("collaboration", `Persisting ${documentId}`);
await documentUpdater({
documentId,

View File

@ -1,40 +1,40 @@
// @flow
import * as metrics from "../utils/metrics";
import Metrics from "../logging/metrics";
let count = 0;
export default class Tracing {
async onCreateDocument({ documentName }: { documentName: string }) {
metrics.increment("collaboration.create_document", { documentName });
Metrics.increment("collaboration.create_document", { documentName });
// TODO: Waiting for `instance` available in payload
// metrics.gaugePerInstance(
// Metrics.gaugePerInstance(
// "collaboration.documents_count",
// instance.documents.size()
// );
}
async onAuthenticationFailed({ documentName }: { documentName: string }) {
metrics.increment("collaboration.authentication_failed", { documentName });
Metrics.increment("collaboration.authentication_failed", { documentName });
}
async onConnect({ documentName }: { documentName: string }) {
metrics.increment("collaboration.connect", { documentName });
metrics.gaugePerInstance("collaboration.connections_count", ++count);
Metrics.increment("collaboration.connect", { documentName });
Metrics.gaugePerInstance("collaboration.connections_count", ++count);
}
async onDisconnect({ documentName }: { documentName: string }) {
metrics.increment("collaboration.disconnect", { documentName });
metrics.gaugePerInstance("collaboration.connections_count", --count);
Metrics.increment("collaboration.disconnect", { documentName });
Metrics.gaugePerInstance("collaboration.connections_count", --count);
// TODO: Waiting for `instance` available in payload
// metrics.gaugePerInstance(
// Metrics.gaugePerInstance(
// "collaboration.documents_count",
// instance.documents.size()
// );
}
async onChange({ documentName }: { documentName: string }) {
metrics.increment("collaboration.change", { documentName });
Metrics.increment("collaboration.change", { documentName });
}
}

View File

@ -2,20 +2,18 @@
import fs from "fs";
import os from "os";
import path from "path";
import debug from "debug";
import File from "formidable/lib/file";
import invariant from "invariant";
import { values, keys } from "lodash";
import { v4 as uuidv4 } from "uuid";
import { parseOutlineExport } from "../../shared/utils/zip";
import { FileImportError } from "../errors";
import Logger from "../logging/logger";
import { Attachment, Event, Document, Collection, User } from "../models";
import attachmentCreator from "./attachmentCreator";
import documentCreator from "./documentCreator";
import documentImporter from "./documentImporter";
const log = debug("commands");
export default async function collectionImporter({
file,
type,
@ -155,7 +153,7 @@ export default async function collectionImporter({
continue;
}
log(`Skipped importing ${item.path}`);
Logger.info("commands", `Skipped importing ${item.path}`);
}
// All collections, documents, and attachments have been created time to

View File

@ -1,11 +1,9 @@
// @flow
import debug from "debug";
import Logger from "../logging/logger";
import { Document, Attachment } from "../models";
import { sequelize } from "../sequelize";
import parseAttachmentIds from "../utils/parseAttachmentIds";
const log = debug("commands");
export async function documentPermanentDeleter(documents: Document[]) {
const activeDocument = documents.find((doc) => !doc.deletedAt);
@ -47,9 +45,9 @@ export async function documentPermanentDeleter(documents: Document[]) {
if (attachment) {
await attachment.destroy();
log(`Attachment ${attachmentId} deleted`);
Logger.info("commands", `Attachment ${attachmentId} deleted`);
} else {
log(`Unknown attachment ${attachmentId} ignored`);
Logger.info("commands", `Unknown attachment ${attachmentId} ignored`);
}
}
}

View File

@ -1,13 +1,11 @@
// @flow
import debug from "debug";
import { MaximumTeamsError } from "../errors";
import Logger from "../logging/logger";
import { Team, AuthenticationProvider } from "../models";
import { sequelize } from "../sequelize";
import { getAllowedDomains } from "../utils/authentication";
import { generateAvatarUrl } from "../utils/avatars";
const log = debug("server");
type TeamCreatorResult = {|
team: Team,
authenticationProvider: AuthenticationProvider,
@ -111,7 +109,10 @@ export default async function teamCreator({
try {
await team.provisionSubdomain(subdomain);
} catch (err) {
log(`Provisioning subdomain failed: ${err.message}`);
Logger.error("Provisioning subdomain failed", err, {
teamId: team.id,
subdomain,
});
}
return {

View File

@ -3,7 +3,6 @@ import env from "./env"; // eslint-disable-line import/order
import "./tracing"; // must come before importing any instrumented module
import http from "http";
import debug from "debug";
import Koa from "koa";
import compress from "koa-compress";
import helmet from "koa-helmet";
@ -13,9 +12,10 @@ import Router from "koa-router";
import { uniq } from "lodash";
import stoppable from "stoppable";
import throng from "throng";
import Logger from "./logging/logger";
import { requestErrorHandler } from "./logging/sentry";
import services from "./services";
import { getArg } from "./utils/args";
import { requestErrorHandler } from "./utils/sentry";
import { checkEnv, checkMigrations } from "./utils/startup";
import { checkUpdates } from "./utils/updates";
@ -55,12 +55,12 @@ async function start(id: string, disconnect: () => void) {
const app = new Koa();
const server = stoppable(http.createServer(app.callback()));
const httpLogger = debug("http");
const log = debug("server");
const router = new Router();
// install basic middleware shared by all services
app.use(logger((str, args) => httpLogger(str)));
if ((env.DEBUG || "").includes("http")) {
app.use(logger((str, args) => Logger.info("http", str)));
}
app.use(compress());
app.use(helmet());
@ -87,7 +87,7 @@ async function start(id: string, disconnect: () => void) {
throw new Error(`Unknown service ${name}`);
}
log(`Starting ${name} service`);
Logger.info("lifecycle", `Starting ${name} service`);
const init = services[name];
await init(app, server);
}
@ -98,7 +98,7 @@ async function start(id: string, disconnect: () => void) {
server.on("listening", () => {
const address = server.address();
console.log(`\n> Listening on http://localhost:${address.port}\n`);
Logger.info("lifecycle", `Listening on http://localhost:${address.port}`);
});
server.listen(normalizedPortFlag || env.PORT || "3000");
@ -107,7 +107,7 @@ async function start(id: string, disconnect: () => void) {
process.once("SIGINT", shutdown);
function shutdown() {
console.log("\n> Stopping server");
Logger.info("lifecycle", "Stopping server");
server.stop(disconnect);
}
}

117
server/logging/logger.js Normal file
View File

@ -0,0 +1,117 @@
// @flow
import chalk from "chalk";
import winston from "winston";
import env from "../env";
import Metrics from "../logging/metrics";
import Sentry from "../logging/sentry";
const isProduction = env.NODE_ENV === "production";
type LogCategory =
| "lifecycle"
| "collaboration"
| "http"
| "commands"
| "processor"
| "email"
| "queue"
| "database"
| "utils";
type Extra = { [key: string]: any };
class Logger {
output: any;
constructor() {
this.output = winston.createLogger();
this.output.add(
new winston.transports.Console({
format: isProduction
? winston.format.json()
: winston.format.combine(
winston.format.colorize(),
winston.format.printf(
({ message, label }) =>
`${label ? chalk.bold("[" + label + "] ") : ""}${message}`
)
),
})
);
}
/**
* Log information
*
* @param category A log message category that will be prepended
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
info(label: LogCategory, message: string, extra?: Extra) {
this.output.info(message, { ...extra, label });
}
/**
* Debug information
*
* @param category A log message category that will be prepended
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
debug(label: LogCategory, message: string, extra?: Extra) {
this.output.debug(message, { ...extra, label });
}
/**
* Log a warning
*
* @param message A warning message
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
warn(message: string, extra?: Extra) {
Metrics.increment("logger.warning");
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
for (const key in extra) {
scope.setExtra(key, extra[key]);
scope.setLevel(Sentry.Severity.Warning);
}
Sentry.captureMessage(message);
});
}
if (isProduction) {
this.output.warn(message, extra);
} else {
console.warn(message, extra);
}
}
/**
* Report a runtime error
*
* @param message A description of the error
* @param error The error that occurred
* @param extra Arbitrary data to be logged that will appear in prod logs
*/
error(message: string, error: Error, extra?: Extra) {
Metrics.increment("logger.error");
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
for (const key in extra) {
scope.setExtra(key, extra[key]);
scope.setLevel(Sentry.Severity.Error);
}
Sentry.captureException(error);
});
}
if (isProduction) {
this.output.error(message, { error: error.message, stack: error.stack });
} else {
console.error(message, { error, extra });
}
}
}
export default new Logger();

51
server/logging/metrics.js Normal file
View File

@ -0,0 +1,51 @@
// @flow
import ddMetrics from "datadog-metrics";
class Metrics {
enabled: boolean = !!process.env.DD_API_KEY;
constructor() {
if (!this.enabled) {
return;
}
ddMetrics.init({
apiKey: process.env.DD_API_KEY,
prefix: "outline.",
defaultTags: [`env:${process.env.DD_ENV || process.env.NODE_ENV}`],
});
}
gauge(key: string, value: number, tags?: string[]): void {
if (!this.enabled) {
return;
}
return ddMetrics.gauge(key, value, tags);
}
gaugePerInstance(key: string, value: number, tags?: string[] = []): void {
if (!this.enabled) {
return;
}
const instanceId = process.env.INSTANCE_ID || process.env.HEROKU_DYNO_ID;
if (!instanceId) {
throw new Error(
"INSTANCE_ID or HEROKU_DYNO_ID must be set when using DataDog"
);
}
return ddMetrics.gauge(key, value, [...tags, `instance:${instanceId}`]);
}
increment(key: string, tags?: { [string]: string }): void {
if (!this.enabled) {
return;
}
return ddMetrics.increment(key, tags);
}
}
export default new Metrics();

View File

@ -1,6 +1,4 @@
// @flow
import * as Sentry from "@sentry/node";
import debug from "debug";
import nodemailer from "nodemailer";
import Oy from "oy-vey";
import * as React from "react";
@ -31,9 +29,9 @@ import {
import { SigninEmail, signinEmailText } from "./emails/SigninEmail";
import { WelcomeEmail, welcomeEmailText } from "./emails/WelcomeEmail";
import { baseStyles } from "./emails/components/EmailLayout";
import Logger from "./logging/logger";
import { emailsQueue } from "./queues";
const log = debug("emails");
const useTestEmailService =
process.env.NODE_ENV === "development" && !process.env.SMTP_USERNAME;
@ -101,7 +99,10 @@ export class Mailer {
}
if (useTestEmailService) {
log("SMTP_USERNAME not provided, generating test account…");
Logger.info(
"email",
"SMTP_USERNAME not provided, generating test account…"
);
try {
let testAccount = await nodemailer.createTestAccount();
@ -118,7 +119,10 @@ export class Mailer {
this.transporter = nodemailer.createTransport(smtpConfig);
} catch (err) {
log(`Could not generate test account: ${err.message}`);
Logger.error(
"Couldn't generate a test account with ethereal.email",
err
);
}
}
}
@ -134,7 +138,7 @@ export class Mailer {
});
try {
log(`Sending email "${data.title}" to ${data.to}`);
Logger.info("email", `Sending email "${data.title}" to ${data.to}`);
const info = await transporter.sendMail({
from: process.env.SMTP_FROM_EMAIL,
replyTo: process.env.SMTP_REPLY_EMAIL || process.env.SMTP_FROM_EMAIL,
@ -145,12 +149,13 @@ export class Mailer {
});
if (useTestEmailService) {
log("Email Preview URL: %s", nodemailer.getTestMessageUrl(info));
Logger.info(
"email",
`Preview Url: ${nodemailer.getTestMessageUrl(info)}`
);
}
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
}
Logger.error(`Error sending email to ${data.to}`, err);
throw err; // Re-throw for queue to re-try
}
}

View File

@ -2,6 +2,7 @@
import passport from "@outlinewiki/koa-passport";
import { type Context } from "koa";
import type { AccountProvisionerResult } from "../commands/accountProvisioner";
import Logger from "../logging/logger";
import { signIn } from "../utils/authentication";
export default function createMiddleware(providerName: string) {
@ -11,7 +12,7 @@ export default function createMiddleware(providerName: string) {
{ session: false },
async (err, user, result: AccountProvisionerResult) => {
if (err) {
console.error(err);
Logger.error("Error during authentication", err);
if (err.id) {
const notice = err.id.replace(/_/g, "-");
@ -36,7 +37,10 @@ export default function createMiddleware(providerName: string) {
// Correlation ID, Timestamp in these two query string parameters.
const { error, error_description } = ctx.request.query;
if (error && error_description) {
console.error(error_description);
Logger.error(
"Error from Azure during authentication",
new Error(error_description)
);
// Display only the descriptive message to the user, log the rest
const description = error_description.split("Trace ID")[0];

View File

@ -8,6 +8,7 @@ import {
stripSubdomain,
RESERVED_SUBDOMAINS,
} from "../../shared/utils/domains";
import Logger from "../logging/logger";
import { DataTypes, sequelize, Op } from "../sequelize";
import { generateAvatarUrl } from "../utils/avatars";
import { publicS3Endpoint, uploadToS3FromUrl } from "../utils/s3";
@ -134,8 +135,7 @@ const uploadAvatar = async (model) => {
);
if (newUrl) model.avatarUrl = newUrl;
} catch (err) {
// we can try again next time
console.error(err);
Logger.error("Error uploading avatar to S3", err, { url: avatarUrl });
}
}
};

View File

@ -5,6 +5,7 @@ import JWT from "jsonwebtoken";
import { v4 as uuidv4 } from "uuid";
import { languages } from "../../shared/i18n";
import { ValidationError } from "../errors";
import Logger from "../logging/logger";
import { DataTypes, sequelize, encryptedFields, Op } from "../sequelize";
import { DEFAULT_AVATAR_HOST } from "../utils/avatars";
import { palette } from "../utils/color";
@ -195,8 +196,9 @@ const uploadAvatar = async (model) => {
);
if (newUrl) model.avatarUrl = newUrl;
} catch (err) {
// we can try again next time
console.error(err);
Logger.error("Couldn't upload user avatar image to S3", err, {
url: avatarUrl,
});
}
}
};

View File

@ -1,14 +1,12 @@
// @flow
import fs from "fs";
import debug from "debug";
import Logger from "../../logging/logger";
import mailer from "../../mailer";
import { FileOperation, Collection, Event, Team, User } from "../../models";
import type { Event as TEvent } from "../../types";
import { uploadToS3FromBuffer } from "../../utils/s3";
import { archiveCollections } from "../../utils/zip";
const log = debug("commands");
export default class ExportsProcessor {
async on(event: TEvent) {
switch (event.name) {
@ -30,7 +28,10 @@ export default class ExportsProcessor {
});
// heavy lifting of creating the zip file
log(`Archiving collections for file operation ${exportData.id}`);
Logger.info(
"processor",
`Archiving collections for file operation ${exportData.id}`
);
const filePath = await archiveCollections(collections);
let url, state;
@ -43,7 +44,10 @@ export default class ExportsProcessor {
size: stat.size,
});
log(`Uploading archive for file operation ${exportData.id}`);
Logger.info(
"processor",
`Uploading archive for file operation ${exportData.id}`
);
url = await uploadToS3FromBuffer(
readBuffer,
"application/zip",
@ -51,10 +55,15 @@ export default class ExportsProcessor {
"private"
);
log(`Upload complete for file operation ${exportData.id}`);
Logger.info(
"processor",
`Upload complete for file operation ${exportData.id}`
);
state = "complete";
} catch (e) {
log("Failed to export data", e);
} catch (error) {
Logger.error("Error exporting collection data", error, {
fileOperationId: exportData.id,
});
state = "error";
url = null;
} finally {

View File

@ -1,5 +1,5 @@
// @flow
import debug from "debug";
import Logger from "../../logging/logger";
import mailer from "../../mailer";
import {
View,
@ -12,8 +12,6 @@ import {
import { Op } from "../../sequelize";
import type { DocumentEvent, CollectionEvent, Event } from "../../types";
const log = debug("services");
export default class NotificationsProcessor {
async on(event: Event) {
switch (event.name) {
@ -98,7 +96,8 @@ export default class NotificationsProcessor {
});
if (view) {
log(
Logger.info(
"processor",
`suppressing notification to ${setting.userId} because update viewed`
);
continue;

View File

@ -1,10 +1,11 @@
// @flow
import Redis from "ioredis";
import Logger from "./logging/logger";
const options = {
maxRetriesPerRequest: 20,
retryStrategy(times) {
console.warn(`Retrying redis connection: attempt ${times}`);
Logger.warn(`Retrying redis connection: attempt ${times}`);
return Math.min(times * 100, 3000);
},
// support Heroku Redis, see:

View File

@ -1,14 +1,13 @@
// @flow
import { subDays } from "date-fns";
import debug from "debug";
import Router from "koa-router";
import { documentPermanentDeleter } from "../../commands/documentPermanentDeleter";
import { AuthenticationError } from "../../errors";
import Logger from "../../logging/logger";
import { Document, FileOperation } from "../../models";
import { Op } from "../../sequelize";
const router = new Router();
const log = debug("utils");
router.post("utils.gc", async (ctx) => {
const { token, limit = 500 } = ctx.body;
@ -17,7 +16,10 @@ router.post("utils.gc", async (ctx) => {
throw new AuthenticationError("Invalid secret token");
}
log(`Permanently destroying upto ${limit} documents older than 30 days…`);
Logger.info(
"utils",
`Permanently destroying upto ${limit} documents older than 30 days…`
);
const documents = await Document.scope("withUnpublished").findAll({
attributes: ["id", "teamId", "text", "deletedAt"],
@ -32,9 +34,12 @@ router.post("utils.gc", async (ctx) => {
const countDeletedDocument = await documentPermanentDeleter(documents);
log(`Destroyed ${countDeletedDocument} documents`);
Logger.info("utils", `Destroyed ${countDeletedDocument} documents`);
log(`Expiring all the collection export older than 30 days…`);
Logger.info(
"utils",
`Expiring all the collection export older than 30 days…`
);
const exports = await FileOperation.unscoped().findAll({
where: {

View File

@ -1,7 +1,6 @@
// @flow
import passport from "@outlinewiki/koa-passport";
import { addMonths } from "date-fns";
import debug from "debug";
import Koa from "koa";
import bodyParser from "koa-body";
import Router from "koa-router";
@ -11,7 +10,6 @@ import validation from "../../middlewares/validation";
import { Collection, Team, View } from "../../models";
import providers from "./providers";
const log = debug("server");
const app = new Koa();
const router = new Router();
@ -21,7 +19,6 @@ router.use(passport.initialize());
providers.forEach((provider) => {
if (provider.enabled) {
router.use("/", provider.router.routes());
log(`loaded ${provider.name} auth provider`);
}
});

View File

@ -1,6 +1,6 @@
// @flow
import "./bootstrap";
import debug from "debug";
import Logger from "../logging/logger";
import {
Team,
User,
@ -9,14 +9,13 @@ import {
} from "../models";
import { Op } from "../sequelize";
const log = debug("server");
const cache = {};
let page = 0;
let limit = 100;
export default async function main(exit = false) {
const work = async (page: number) => {
log(`Migrating authentication data… page ${page}`);
Logger.info("database", "Starting authentication migration");
const users = await User.findAll({
limit,
@ -42,13 +41,15 @@ export default async function main(exit = false) {
const provider = user.service;
const providerId = user.team[`${provider}Id`];
if (!providerId) {
console.error(
`user ${user.id} has serviceId ${user.serviceId}, but team ${provider}Id missing`
Logger.info(
"database",
`User ${user.id} has serviceId ${user.serviceId}, but team ${provider}Id missing`
);
continue;
}
if (providerId.startsWith("transferred")) {
console.log(
Logger.info(
"database",
`skipping previously transferred ${user.team.name} (${user.team.id})`
);
continue;
@ -78,7 +79,8 @@ export default async function main(exit = false) {
userId: user.id,
});
} catch (err) {
console.error(
Logger.info(
"database",
`serviceId ${user.serviceId} exists, for user ${user.id}`
);
continue;
@ -91,7 +93,7 @@ export default async function main(exit = false) {
await work(page);
if (exit) {
log("Migration complete");
Logger.info("database", "Migration complete");
process.exit(0);
}
}

View File

@ -1,7 +1,7 @@
// @flow
import debug from "debug";
import Sequelize from "sequelize";
import EncryptedField from "sequelize-encrypted";
import Logger from "./logging/logger";
const isProduction = process.env.NODE_ENV === "production";
const isSSLDisabled = process.env.PGSSLMODE === "disable";
@ -15,7 +15,7 @@ export const Op = Sequelize.Op;
export const sequelize = new Sequelize(
process.env.DATABASE_URL || process.env.DATABASE_CONNECTION_POOL_URL,
{
logging: debug("sql"),
logging: (msg) => Logger.debug("database", msg),
typeValidation: true,
dialectOptions: {
ssl:

View File

@ -10,6 +10,7 @@ import mount from "koa-mount";
import enforceHttps from "koa-sslify";
import emails from "../emails";
import env from "../env";
import Logger from "../logging/logger";
import routes from "../routes";
import api from "../routes/api";
import auth from "../routes/auth";
@ -44,7 +45,7 @@ export default function init(app: Koa = new Koa(), server?: http.Server): Koa {
})
);
} else {
console.warn("Enforced https was disabled with FORCE_HTTPS env variable");
Logger.warn("Enforced https was disabled with FORCE_HTTPS env variable");
}
// trust header fields set by our proxy. eg X-Forwarded-For
@ -90,7 +91,7 @@ export default function init(app: Koa = new Koa(), server?: http.Server): Koa {
app.use(
convert(
hotMiddleware(compile, {
log: console.log, // eslint-disable-line
log: (...args) => Logger.info("lifecycle", ...args),
path: "/__webpack_hmr",
heartbeat: 10 * 1000,
})

View File

@ -4,15 +4,14 @@ import Koa from "koa";
import IO from "socket.io";
import socketRedisAdapter from "socket.io-redis";
import SocketAuth from "socketio-auth";
import env from "../env";
import Logger from "../logging/logger";
import Metrics from "../logging/metrics";
import { Document, Collection, View } from "../models";
import policy from "../policies";
import { websocketsQueue } from "../queues";
import WebsocketsProcessor from "../queues/processors/websockets";
import { client, subscriber } from "../redis";
import { getUserForJWT } from "../utils/jwt";
import * as metrics from "../utils/metrics";
import Sentry from "../utils/sentry";
const { can } = policy;
@ -37,23 +36,23 @@ export default function init(app: Koa, server: http.Server) {
io.of("/").adapter.on("error", (err) => {
if (err.name === "MaxRetriesPerRequestError") {
console.error(`Redis error: ${err.message}. Shutting down now.`);
Logger.error("Redis maximum retries exceeded in socketio adapter", err);
throw err;
} else {
console.error(`Redis error: ${err.message}`);
Logger.error("Redis error in socketio adapter", err);
}
});
io.on("connection", (socket) => {
metrics.increment("websockets.connected");
metrics.gaugePerInstance(
Metrics.increment("websockets.connected");
Metrics.gaugePerInstance(
"websockets.count",
socket.client.conn.server.clientsCount
);
socket.on("disconnect", () => {
metrics.increment("websockets.disconnected");
metrics.gaugePerInstance(
Metrics.increment("websockets.disconnected");
Metrics.gaugePerInstance(
"websockets.count",
socket.client.conn.server.clientsCount
);
@ -106,7 +105,7 @@ export default function init(app: Koa, server: http.Server) {
if (can(user, "read", collection)) {
socket.join(`collection-${event.collectionId}`, () => {
metrics.increment("websockets.collections.join");
Metrics.increment("websockets.collections.join");
});
}
}
@ -127,7 +126,7 @@ export default function init(app: Koa, server: http.Server) {
);
socket.join(room, () => {
metrics.increment("websockets.documents.join");
Metrics.increment("websockets.documents.join");
// let everyone else in the room know that a new user joined
io.to(room).emit("user.join", {
@ -139,14 +138,9 @@ export default function init(app: Koa, server: http.Server) {
// let this user know who else is already present in the room
io.in(room).clients(async (err, sockets) => {
if (err) {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("clients", sockets);
Sentry.captureException(err);
});
} else {
console.error(err);
}
Logger.error("Error getting clients for room", err, {
sockets,
});
return;
}
@ -173,13 +167,13 @@ export default function init(app: Koa, server: http.Server) {
socket.on("leave", (event) => {
if (event.collectionId) {
socket.leave(`collection-${event.collectionId}`, () => {
metrics.increment("websockets.collections.leave");
Metrics.increment("websockets.collections.leave");
});
}
if (event.documentId) {
const room = `document-${event.documentId}`;
socket.leave(room, () => {
metrics.increment("websockets.documents.leave");
Metrics.increment("websockets.documents.leave");
io.to(room).emit("user.leave", {
userId: user.id,
@ -204,7 +198,7 @@ export default function init(app: Koa, server: http.Server) {
});
socket.on("presence", async (event) => {
metrics.increment("websockets.presence");
Metrics.increment("websockets.presence");
const room = `document-${event.documentId}`;
@ -232,14 +226,7 @@ export default function init(app: Koa, server: http.Server) {
websocketsQueue.process(async function websocketEventsProcessor(job) {
const event = job.data;
websockets.on(event, io).catch((error) => {
if (env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
Logger.error("Error processing websocket event", error, { event });
});
});
}

View File

@ -1,7 +1,7 @@
// @flow
import http from "http";
import debug from "debug";
import Koa from "koa";
import Logger from "../logging/logger";
import {
globalEventQueue,
processorEventQueue,
@ -16,9 +16,6 @@ import Imports from "../queues/processors/imports";
import Notifications from "../queues/processors/notifications";
import Revisions from "../queues/processors/revisions";
import Slack from "../queues/processors/slack";
import Sentry from "../utils/sentry";
const log = debug("queue");
const EmailsProcessor = new Emails();
@ -46,24 +43,22 @@ export default function init(app: Koa, server?: http.Server) {
const event = job.data;
const processor = eventProcessors[event.service];
if (!processor) {
console.warn(
`Received event for processor that isn't registered (${event.service})`
);
Logger.warn(`Received event for processor that isn't registered`, event);
return;
}
if (processor.on) {
log(`${event.service} processing ${event.name}`);
Logger.info("processor", `${event.service} processing ${event.name}`, {
name: event.name,
modelId: event.modelId,
});
processor.on(event).catch((error) => {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
Logger.error(
`Error processing ${event.name} in ${event.service}`,
error,
event
);
});
}
});
@ -72,14 +67,11 @@ export default function init(app: Koa, server?: http.Server) {
const event = job.data;
EmailsProcessor.on(event).catch((error) => {
if (process.env.SENTRY_DSN) {
Sentry.withScope(function (scope) {
scope.setExtra("event", event);
Sentry.captureException(error);
});
} else {
throw error;
}
Logger.error(
`Error processing ${event.name} in emails processor`,
error,
event
);
});
});
}

View File

@ -1,9 +1,9 @@
// @flow
import querystring from "querystring";
import * as Sentry from "@sentry/node";
import { addMonths } from "date-fns";
import { type Context } from "koa";
import { pick } from "lodash";
import Logger from "../logging/logger";
import { User, Event, Team, Collection, View } from "../models";
import { getCookieDomain } from "../utils/domains";
@ -37,8 +37,8 @@ export async function signIn(
["ref", "utm_content", "utm_medium", "utm_source", "utm_campaign"]
);
await team.update({ signupQueryParams });
} catch (err) {
Sentry.captureException(err);
} catch (error) {
Logger.error(`Error persisting signup query params`, error);
}
}
}

View File

@ -1,45 +0,0 @@
// @flow
import metrics from "datadog-metrics";
if (process.env.DD_API_KEY) {
metrics.init({
apiKey: process.env.DD_API_KEY,
prefix: "outline.",
defaultTags: [`env:${process.env.DD_ENV || process.env.NODE_ENV}`],
});
}
export function gauge(key: string, value: number, tags?: string[]): void {
if (!process.env.DD_API_KEY) {
return;
}
return metrics.gauge(key, value, tags);
}
export function gaugePerInstance(
key: string,
value: number,
tags?: string[] = []
): void {
if (!process.env.DD_API_KEY) {
return;
}
const instanceId = process.env.INSTANCE_ID || process.env.HEROKU_DYNO_ID;
if (!instanceId) {
throw new Error(
"INSTANCE_ID or HEROKU_DYNO_ID must be set when using Datadog"
);
}
return metrics.gauge(key, value, [...tags, `instance:${instanceId}`]);
}
export function increment(key: string, tags?: { [string]: string }): void {
if (!process.env.DD_API_KEY) {
return;
}
return metrics.increment(key, tags);
}

View File

@ -2,9 +2,8 @@
import Queue from "bull";
import Redis from "ioredis";
import { snakeCase } from "lodash";
import Metrics from "../logging/metrics";
import { client, subscriber } from "../redis";
import * as metrics from "../utils/metrics";
import Sentry from "./sentry";
export function createQueue(name: string) {
const prefix = `queue.${snakeCase(name)}`;
@ -26,29 +25,24 @@ export function createQueue(name: string) {
});
queue.on("stalled", () => {
metrics.increment(`${prefix}.jobs.stalled`);
Metrics.increment(`${prefix}.jobs.stalled`);
});
queue.on("completed", () => {
metrics.increment(`${prefix}.jobs.completed`);
Metrics.increment(`${prefix}.jobs.completed`);
});
queue.on("error", (err) => {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
} else {
console.error(err);
}
metrics.increment(`${prefix}.jobs.errored`);
Metrics.increment(`${prefix}.jobs.errored`);
});
queue.on("failed", () => {
metrics.increment(`${prefix}.jobs.failed`);
Metrics.increment(`${prefix}.jobs.failed`);
});
setInterval(async () => {
metrics.gauge(`${prefix}.count`, await queue.count());
metrics.gauge(`${prefix}.delayed_count`, await queue.getDelayedCount());
Metrics.gauge(`${prefix}.count`, await queue.count());
Metrics.gauge(`${prefix}.delayed_count`, await queue.getDelayedCount());
}, 5 * 1000);
return queue;

View File

@ -1,10 +1,10 @@
// @flow
import crypto from "crypto";
import * as Sentry from "@sentry/node";
import AWS from "aws-sdk";
import { addHours, format } from "date-fns";
import fetch from "fetch-with-proxy";
import { v4 as uuidv4 } from "uuid";
import Logger from "../logging/logger";
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
@ -147,15 +147,11 @@ export const uploadToS3FromUrl = async (
const endpoint = publicS3Endpoint(true);
return `${endpoint}/${key}`;
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err, {
extra: {
url,
},
});
} else {
throw err;
}
Logger.error("Error uploading to S3 from URL", err, {
url,
key,
acl,
});
}
};
@ -198,10 +194,8 @@ export const getFileByKey = async (key: string) => {
const data = await s3.getObject(params).promise();
return data.Body;
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
} else {
throw err;
}
Logger.error("Error getting file from S3 by key", err, {
key,
});
}
};

View File

@ -1,5 +1,6 @@
// @flow
import chalk from "chalk";
import Logger from "../logging/logger";
import { Team, AuthenticationProvider } from "../models";
export async function checkMigrations() {
@ -11,12 +12,14 @@ export async function checkMigrations() {
const providers = await AuthenticationProvider.count();
if (teams && !providers) {
console.error(`
Logger.warn(
`
This version of Outline cannot start until a data migration is complete.
Backup your database, run the database migrations and the following script:
$ node ./build/server/scripts/20210226232041-migrate-authentication.js
`);
`
);
process.exit(1);
}
}
@ -92,18 +95,16 @@ export function checkEnv() {
}
if (errors.length) {
console.log(
chalk.bold.red(
"\n\nThe server could not start, please fix the following configuration errors and try again:\n"
)
Logger.warn(
"\n\nThe server could not start, please fix the following configuration errors and try again:\n" +
errors.map((e) => `- ${e}`).join("\n")
);
errors.map((text) => console.log(` - ${text}`));
console.log("\n");
process.exit(1);
}
if (process.env.NODE_ENV === "production") {
console.log(
Logger.info(
"lifecycle",
chalk.green(
`
Is your team enjoying Outline? Consider supporting future development by sponsoring the project:\n\nhttps://github.com/sponsors/outline
@ -111,12 +112,12 @@ Is your team enjoying Outline? Consider supporting future development by sponsor
)
);
} else if (process.env.NODE_ENV === "development") {
console.log(
chalk.yellow(
`\nRunning Outline in development mode. To run Outline in production mode set the ${chalk.bold(
"NODE_ENV"
)} env variable to "production"\n`
)
Logger.warn(
`Running Outline in ${chalk.bold(
"development mode"
)}. To run Outline in production mode set the ${chalk.bold(
"NODE_ENV"
)} env variable to "production"`
);
}
}

View File

@ -1,8 +1,8 @@
// @flow
import fs from "fs";
import * as Sentry from "@sentry/node";
import JSZip from "jszip";
import tmp from "tmp";
import Logger from "../logging/logger";
import { Attachment, Collection, Document } from "../models";
import { serializeFilename } from "./fs";
import { getFileByKey } from "./s3";
@ -47,11 +47,9 @@ async function addImageToArchive(zip, key) {
const img = await getFileByKey(key);
zip.file(key, img, { createFolders: true });
} catch (err) {
if (process.env.SENTRY_DSN) {
Sentry.captureException(err);
}
// error during file retrieval
console.error(err);
Logger.error("Error loading image attachment from S3", err, {
key,
});
}
}