refactor: Upload file to storage, and then pass attachmentId to collections.import

This avoids having large file uploads going directly to the server and allows us to fetch it async into a worker process
This commit is contained in:
Tom Moor
2021-02-18 22:36:07 -08:00
parent 568e271738
commit df233c95a9
12 changed files with 70 additions and 54 deletions

View File

@ -1,4 +1,5 @@
// @flow // @flow
import invariant from "invariant";
import { observer } from "mobx-react"; import { observer } from "mobx-react";
import { CollectionIcon } from "outline-icons"; import { CollectionIcon } from "outline-icons";
import * as React from "react"; import * as React from "react";
@ -14,6 +15,7 @@ import PageTitle from "components/PageTitle";
import useCurrentUser from "hooks/useCurrentUser"; import useCurrentUser from "hooks/useCurrentUser";
import useStores from "hooks/useStores"; import useStores from "hooks/useStores";
import getDataTransferFiles from "utils/getDataTransferFiles"; import getDataTransferFiles from "utils/getDataTransferFiles";
import { uploadFile } from "utils/uploadFile";
function ImportExport() { function ImportExport() {
const { t } = useTranslation(); const { t } = useTranslation();
@ -23,7 +25,7 @@ function ImportExport() {
const { showToast } = ui; const { showToast } = ui;
const [isLoading, setLoading] = React.useState(false); const [isLoading, setLoading] = React.useState(false);
const [isImporting, setImporting] = React.useState(false); const [isImporting, setImporting] = React.useState(false);
const [importedDetails, setImported] = React.useState(false); const [isImported, setImported] = React.useState(false);
const [isExporting, setExporting] = React.useState(false); const [isExporting, setExporting] = React.useState(false);
const [file, setFile] = React.useState(); const [file, setFile] = React.useState();
const [importDetails, setImportDetails] = React.useState(); const [importDetails, setImportDetails] = React.useState();
@ -34,11 +36,13 @@ function ImportExport() {
setImporting(true); setImporting(true);
try { try {
const { documentCount, collectionCount } = await collections.import( invariant(file, "File must exist to upload");
file const attachment = await uploadFile(file, {
); name: file.name,
showToast(t("Import completed")); });
setImported({ documentCount, collectionCount }); await collections.import(attachment.id);
showToast(t("Import started"));
setImported(true);
} catch (err) { } catch (err) {
showToast(err.message); showToast(err.message);
} finally { } finally {
@ -121,14 +125,11 @@ function ImportExport() {
accept="application/zip" accept="application/zip"
/> />
</VisuallyHidden> </VisuallyHidden>
{importedDetails && ( {isImported && (
<Notice> <Notice>
<Trans <Trans>
count={importedDetails.documentCount} Your file has been uploaded and the import is being processed, you
i18nKey="importSuccessful" can safely leave this page.
>
Import successful, {{ count: importedDetails.documentCount }}{" "}
documents were imported to your knowledge base.
</Trans> </Trans>
</Notice> </Notice>
)} )}

View File

@ -1,5 +1,4 @@
// @flow // @flow
import invariant from "invariant";
import { concat, filter, last } from "lodash"; import { concat, filter, last } from "lodash";
import { computed, action } from "mobx"; import { computed, action } from "mobx";
import naturalSort from "shared/utils/naturalSort"; import naturalSort from "shared/utils/naturalSort";
@ -89,18 +88,11 @@ export default class CollectionsStore extends BaseStore<Collection> {
} }
@action @action
import = async (file: File) => { import = async (attachmentId: string) => {
const formData = new FormData(); await client.post("/collections.import", {
formData.append("type", "outline"); type: "outline",
formData.append("file", file); attachmentId,
});
const res = await client.post("/collections.import", formData);
invariant(res && res.data, "Data should be available");
this.addPolicies(res.policies);
res.data.collections.forEach(this.add);
return res.data;
}; };
async update(params: Object): Promise<Collection> { async update(params: Object): Promise<Collection> {

View File

@ -39,11 +39,13 @@ export const uploadFile = async (
formData.append("file", file); formData.append("file", file);
} }
await fetch(data.uploadUrl, { const uploadResponse = await fetch(data.uploadUrl, {
method: "post", method: "post",
body: formData, body: formData,
}); });
invariant(uploadResponse.ok, "Upload failed, try again?");
return attachment; return attachment;
}; };

View File

@ -38,7 +38,7 @@ router.post("attachments.create", auth(), async (ctx) => {
const key = `${bucket}/${user.id}/${s3Key}/${name}`; const key = `${bucket}/${user.id}/${s3Key}/${name}`;
const credential = makeCredential(); const credential = makeCredential();
const longDate = format(new Date(), "YYYYMMDDTHHmmss\\Z"); const longDate = format(new Date(), "YYYYMMDDTHHmmss\\Z");
const policy = makePolicy(credential, longDate, acl); const policy = makePolicy(credential, longDate, acl, contentType);
const endpoint = publicS3Endpoint(); const endpoint = publicS3Endpoint();
const url = `${endpoint}/${key}`; const url = `${endpoint}/${key}`;
@ -85,6 +85,7 @@ router.post("attachments.create", auth(), async (ctx) => {
documentId, documentId,
contentType, contentType,
name, name,
id: attachment.id,
url: attachment.redirectUrl, url: attachment.redirectUrl,
size, size,
}, },

View File

@ -1,8 +1,10 @@
// @flow // @flow
import fs from "fs"; import fs from "fs";
import os from "os";
import File from "formidable/lib/file";
import Router from "koa-router"; import Router from "koa-router";
import collectionImporter from "../commands/collectionImporter"; import collectionImporter from "../commands/collectionImporter";
import { ValidationError, InvalidRequestError } from "../errors"; import { ValidationError } from "../errors";
import { exportCollections } from "../logistics"; import { exportCollections } from "../logistics";
import auth from "../middlewares/authentication"; import auth from "../middlewares/authentication";
import { import {
@ -13,6 +15,7 @@ import {
Event, Event,
User, User,
Group, Group,
Attachment,
} from "../models"; } from "../models";
import policy from "../policies"; import policy from "../policies";
import { import {
@ -100,23 +103,27 @@ router.post("collections.info", auth(), async (ctx) => {
}); });
router.post("collections.import", auth(), async (ctx) => { router.post("collections.import", auth(), async (ctx) => {
const { type } = ctx.body; const { type, attachmentId } = ctx.body;
ctx.assertIn(type, ["outline"], "type must be one of 'outline'"); ctx.assertIn(type, ["outline"], "type must be one of 'outline'");
ctx.assertUuid(attachmentId, "attachmentId is required");
if (!ctx.is("multipart/form-data")) {
throw new InvalidRequestError("Request type must be multipart/form-data");
}
const file: any = Object.values(ctx.request.files)[0];
ctx.assertPresent(file, "file is required");
if (file.type !== "application/zip") {
throw new InvalidRequestError("File type must be a zip");
}
const user = ctx.state.user; const user = ctx.state.user;
authorize(user, "import", Collection); authorize(user, "import", Collection);
const attachment = await Attachment.findByPk(attachmentId);
authorize(user, "read", attachment);
const buffer = await attachment.buffer;
const tmpDir = os.tmpdir();
const tmpFilePath = `${tmpDir}/upload-${attachmentId}`;
await fs.promises.writeFile(tmpFilePath, buffer);
const file = new File({
name: attachment.name,
type: attachment.type,
path: tmpFilePath,
});
const { documents, attachments, collections } = await collectionImporter({ const { documents, attachments, collections } = await collectionImporter({
file, file,
user, user,

View File

@ -111,7 +111,7 @@ describe("#collections.list", () => {
}); });
describe("#collections.import", () => { describe("#collections.import", () => {
it("should error if no file is passed", async () => { it("should error if no attachmentId is passed", async () => {
const user = await buildUser(); const user = await buildUser();
const res = await server.post("/api/collections.import", { const res = await server.post("/api/collections.import", {
body: { body: {

View File

@ -9,7 +9,7 @@ import { values, keys } from "lodash";
import uuid from "uuid"; import uuid from "uuid";
import { parseOutlineExport } from "../../shared/utils/zip"; import { parseOutlineExport } from "../../shared/utils/zip";
import { FileImportError } from "../errors"; import { FileImportError } from "../errors";
import { Attachment, Document, Collection, User } from "../models"; import { Attachment, Event, Document, Collection, User } from "../models";
import attachmentCreator from "./attachmentCreator"; import attachmentCreator from "./attachmentCreator";
import documentCreator from "./documentCreator"; import documentCreator from "./documentCreator";
import documentImporter from "./documentImporter"; import documentImporter from "./documentImporter";
@ -66,12 +66,21 @@ export default async function collectionImporter({
// there is also a "Name (Imported)" but this is a case not worth dealing // there is also a "Name (Imported)" but this is a case not worth dealing
// with right now // with right now
if (!isCreated) { if (!isCreated) {
const name = `${item.name} (Imported)`;
collection = await Collection.create({ collection = await Collection.create({
teamId: user.teamId, teamId: user.teamId,
creatorId: user.id, creatorId: user.id,
name: `${item.name} (Imported)`, name,
private: false, private: false,
}); });
await Event.create({
name: "collections.create",
collectionId: collection.id,
teamId: collection.teamId,
actorId: user.id,
data: { name },
ip,
});
} }
collections[item.path] = collection; collections[item.path] = collection;

View File

@ -1,7 +1,7 @@
// @flow // @flow
import path from "path"; import path from "path";
import { DataTypes, sequelize } from "../sequelize"; import { DataTypes, sequelize } from "../sequelize";
import { deleteFromS3 } from "../utils/s3"; import { deleteFromS3, getFileByKey } from "../utils/s3";
const Attachment = sequelize.define( const Attachment = sequelize.define(
"attachment", "attachment",
@ -47,6 +47,9 @@ const Attachment = sequelize.define(
isPrivate: function () { isPrivate: function () {
return this.acl === "private"; return this.acl === "private";
}, },
buffer: function () {
return getFileByKey(this.key);
},
}, },
} }
); );

View File

@ -6,7 +6,7 @@ const { allow } = policy;
allow(User, "create", Attachment); allow(User, "create", Attachment);
allow(User, "delete", Attachment, (actor, attachment) => { allow(User, ["read", "delete"], Attachment, (actor, attachment) => {
if (!attachment || attachment.teamId !== actor.teamId) return false; if (!attachment || attachment.teamId !== actor.teamId) return false;
if (actor.isAdmin) return true; if (actor.isAdmin) return true;
if (actor.id === attachment.userId) return true; if (actor.id === attachment.userId) return true;

View File

@ -46,7 +46,8 @@ export const makeCredential = () => {
export const makePolicy = ( export const makePolicy = (
credential: string, credential: string,
longDate: string, longDate: string,
acl: string acl: string,
contentType: string = "image"
) => { ) => {
const tomorrow = addHours(new Date(), 24); const tomorrow = addHours(new Date(), 24);
const policy = { const policy = {
@ -55,7 +56,7 @@ export const makePolicy = (
["starts-with", "$key", ""], ["starts-with", "$key", ""],
{ acl }, { acl },
["content-length-range", 0, +process.env.AWS_S3_UPLOAD_MAX_SIZE], ["content-length-range", 0, +process.env.AWS_S3_UPLOAD_MAX_SIZE],
["starts-with", "$Content-Type", "image"], ["starts-with", "$Content-Type", contentType],
["starts-with", "$Cache-Control", ""], ["starts-with", "$Cache-Control", ""],
{ "x-amz-algorithm": "AWS4-HMAC-SHA256" }, { "x-amz-algorithm": "AWS4-HMAC-SHA256" },
{ "x-amz-credential": credential }, { "x-amz-credential": credential },
@ -177,7 +178,7 @@ export const getSignedImageUrl = async (key: string) => {
: s3.getSignedUrl("getObject", params); : s3.getSignedUrl("getObject", params);
}; };
export const getImageByKey = async (key: string) => { export const getFileByKey = async (key: string) => {
const params = { const params = {
Bucket: AWS_S3_UPLOAD_BUCKET_NAME, Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key, Key: key,

View File

@ -4,7 +4,7 @@ import * as Sentry from "@sentry/node";
import JSZip from "jszip"; import JSZip from "jszip";
import tmp from "tmp"; import tmp from "tmp";
import { Attachment, Collection, Document } from "../models"; import { Attachment, Collection, Document } from "../models";
import { getImageByKey } from "./s3"; import { getFileByKey } from "./s3";
async function addToArchive(zip, documents) { async function addToArchive(zip, documents) {
for (const doc of documents) { for (const doc of documents) {
@ -38,7 +38,7 @@ async function addToArchive(zip, documents) {
async function addImageToArchive(zip, key) { async function addImageToArchive(zip, key) {
try { try {
const img = await getImageByKey(key); const img = await getFileByKey(key);
zip.file(key, img, { createFolders: true }); zip.file(key, img, { createFolders: true });
} catch (err) { } catch (err) {
if (process.env.SENTRY_DSN) { if (process.env.SENTRY_DSN) {

View File

@ -312,12 +312,12 @@
"It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.": "It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.", "It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.": "It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.",
"importSuccessful": "Import successful, {{ count }} document was imported into your knowledge base.", "importSuccessful": "Import successful, {{ count }} document was imported into your knowledge base.",
"importSuccessful_plural": "Import successful, {{ count }} documents were imported into your knowledge base.", "importSuccessful_plural": "Import successful, {{ count }} documents were imported into your knowledge base.",
"Sorry, the file <1>{{fileName}}</1> is missing valid collections or documents.": "Sorry, the file <1>{{fileName}}</1> is missing valid collections or documents.", "Sorry, the file <em>{{ fileName }}</em> is missing valid collections or documents.": "Sorry, the file <em>{{ fileName }}</em> is missing valid collections or documents.",
"<0>{{fileName}}</0> looks good, the following collections and their documents will be imported:": "<0>{{fileName}}</0> looks good, the following collections and their documents will be imported:", "<em>{{ fileName }}</em> looks good, the following collections and their documents will be imported:": "<em>{{ fileName }}</em> looks good, the following collections and their documents will be imported:",
"Importing": "Importing", "Importing": "Importing",
"Confirm & Import": "Confirm & Import", "Confirm & Import": "Confirm & Import",
"Choose File…": "Choose File…", "Choose File…": "Choose File…",
"A full export might take some time, consider exporting a single document or collection if possible. Well put together a zip of all your documents in Markdown format and email it to <2>{{userEmail}}</2>.": "A full export might take some time, consider exporting a single document or collection if possible. Well put together a zip of all your documents in Markdown format and email it to <2>{{userEmail}}</2>.", "A full export might take some time, consider exporting a single document or collection if possible. Well put together a zip of all your documents in Markdown format and email it to <em>{{ userEmail }}</em>.": "A full export might take some time, consider exporting a single document or collection if possible. Well put together a zip of all your documents in Markdown format and email it to <em>{{ userEmail }}</em>.",
"Export Requested": "Export Requested", "Export Requested": "Export Requested",
"Requesting Export": "Requesting Export", "Requesting Export": "Requesting Export",
"Export Data": "Export Data", "Export Data": "Export Data",