stash
This commit is contained in:
parent
5012104a10
commit
b98e4bb1ff
|
@ -2,6 +2,7 @@
|
|||
import Router from "koa-router";
|
||||
import Sequelize from "sequelize";
|
||||
import { subtractDate } from "../../shared/utils/date";
|
||||
import documentBatchImporter from "../commands/documentBatchImporter";
|
||||
import documentCreator from "../commands/documentCreator";
|
||||
import documentImporter from "../commands/documentImporter";
|
||||
import documentMover from "../commands/documentMover";
|
||||
|
@ -1105,13 +1106,38 @@ router.post("documents.unpublish", auth(), async (ctx) => {
|
|||
};
|
||||
});
|
||||
|
||||
router.post("documents.batchImport", auth(), async (ctx) => {
|
||||
const { type } = ctx.body;
|
||||
ctx.assertIn(type, ["outline"], "type must be one of 'outline'");
|
||||
|
||||
if (!ctx.is("multipart/form-data")) {
|
||||
throw new InvalidRequestError("Request type must be multipart/form-data");
|
||||
}
|
||||
|
||||
const file: any = Object.values(ctx.request.files)[0];
|
||||
ctx.assertPresent(file, "file is required");
|
||||
|
||||
if (file.type !== "application/zip") {
|
||||
throw new InvalidRequestError("File type must be a zip");
|
||||
}
|
||||
|
||||
const user = ctx.state.user;
|
||||
authorize(user, "batchImport", Document);
|
||||
|
||||
await documentBatchImporter({
|
||||
file,
|
||||
user,
|
||||
type,
|
||||
ip: ctx.request.ip,
|
||||
});
|
||||
|
||||
ctx.body = {
|
||||
success: true,
|
||||
};
|
||||
});
|
||||
|
||||
router.post("documents.import", auth(), async (ctx) => {
|
||||
const {
|
||||
publish,
|
||||
collectionId,
|
||||
parentDocumentId,
|
||||
index,
|
||||
} = ctx.body;
|
||||
const { publish, collectionId, parentDocumentId, index } = ctx.body;
|
||||
|
||||
if (!ctx.is("multipart/form-data")) {
|
||||
throw new InvalidRequestError("Request type must be multipart/form-data");
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
// @flow
|
||||
import fs from "fs";
|
||||
import File from "formidable/lib/file";
|
||||
import JSZip from "jszip";
|
||||
import { Collection, User } from "../models";
|
||||
import documentCreator from "./documentCreator";
|
||||
import documentImporter from "./documentImporter";
|
||||
|
||||
export default async function documentBatchImporter({
|
||||
file,
|
||||
type,
|
||||
user,
|
||||
ip,
|
||||
}: {
|
||||
file: File,
|
||||
user: User,
|
||||
type: "outline",
|
||||
ip: string,
|
||||
}) {
|
||||
const zipData = await fs.promises.readFile(file.path, "utf8");
|
||||
const zip = await JSZip.loadAsync(zipData);
|
||||
|
||||
async function ingestDocuments(
|
||||
zip: JSZip,
|
||||
collectionId: string,
|
||||
parentDocumentId?: string
|
||||
) {
|
||||
const documents = [];
|
||||
|
||||
// TODO: attachments
|
||||
|
||||
// 2 passes, one for documents and then second for their nested documents
|
||||
zip.forEach(async function (filePath, item) {
|
||||
if (item.dir) return;
|
||||
|
||||
const fileData = await item.async("blob");
|
||||
const file = new File([fileData], item.name);
|
||||
|
||||
const { text, title } = await documentImporter({
|
||||
file,
|
||||
user,
|
||||
ip,
|
||||
});
|
||||
|
||||
const document = await documentCreator({
|
||||
title,
|
||||
text,
|
||||
publish: true,
|
||||
collectionId,
|
||||
parentDocumentId,
|
||||
user,
|
||||
ip,
|
||||
});
|
||||
|
||||
// Keep track of which documents have been created
|
||||
documents.push(document);
|
||||
});
|
||||
|
||||
zip.forEach(async function (filePath, item) {
|
||||
// treat items in here as nested documents
|
||||
if (!item.dir) return;
|
||||
if (item.name === "uploads") return;
|
||||
|
||||
const document = documents.find((doc) => doc.title === item.name);
|
||||
if (!document) {
|
||||
console.log(
|
||||
`Couldn't find a matching parent document for folder ${item.name}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// ensure document is created first, get parentDocumentId
|
||||
await ingestDocuments(zip.folder(filePath), collectionId, document.id);
|
||||
});
|
||||
}
|
||||
|
||||
zip.forEach(async function (folderPath, item) {
|
||||
// all top level items must be directories representing collections
|
||||
console.log("iterating over", folderPath);
|
||||
|
||||
// treat this as a collection
|
||||
if (item.dir) {
|
||||
// create collection if a collection with this name doesn't exist
|
||||
const [collection, isCreated] = await Collection.findOrCreate({
|
||||
where: {
|
||||
teamId: user.teamId,
|
||||
name: item.name,
|
||||
},
|
||||
defaults: {
|
||||
private: false,
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Collection ${item.name} ${isCreated ? "created" : "found"}`);
|
||||
|
||||
await ingestDocuments(zip.folder(folderPath), collection.id);
|
||||
}
|
||||
});
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
// @flow
|
||||
import path from "path";
|
||||
import File from "formidable/lib/file";
|
||||
import { buildUser } from "../test/factories";
|
||||
import { flushdb } from "../test/support";
|
||||
import documentBatchImporter from "./documentBatchImporter";
|
||||
|
||||
jest.mock("../utils/s3");
|
||||
|
||||
beforeEach(() => flushdb());
|
||||
|
||||
describe("documentBatchImporter", () => {
|
||||
const ip = "127.0.0.1";
|
||||
|
||||
it("should import documents in outline format", async () => {
|
||||
const user = await buildUser();
|
||||
const name = "outline.zip";
|
||||
const file = new File({
|
||||
name,
|
||||
type: "application/zip",
|
||||
path: path.resolve(__dirname, "..", "test", "fixtures", name),
|
||||
});
|
||||
|
||||
await documentBatchImporter({
|
||||
type: "outline",
|
||||
user,
|
||||
file,
|
||||
ip,
|
||||
});
|
||||
});
|
||||
});
|
|
@ -12,7 +12,7 @@ export default async function documentCreator({
|
|||
index,
|
||||
user,
|
||||
editorVersion,
|
||||
ip
|
||||
ip,
|
||||
}: {
|
||||
title: string,
|
||||
text: string,
|
||||
|
@ -24,7 +24,7 @@ export default async function documentCreator({
|
|||
index?: number,
|
||||
user: User,
|
||||
editorVersion?: string,
|
||||
ip: string
|
||||
ip: string,
|
||||
}): Document {
|
||||
const templateId = templateDocument ? templateDocument.id : undefined;
|
||||
let document = await Document.create({
|
||||
|
@ -71,4 +71,4 @@ export default async function documentCreator({
|
|||
return Document.findOne({
|
||||
where: { id: document.id, publishedAt: document.publishedAt },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// @flow
|
||||
import invariant from "invariant";
|
||||
import { AdminRequiredError } from "../errors";
|
||||
import { Document, Revision, User } from "../models";
|
||||
import policy from "./policy";
|
||||
|
||||
|
@ -7,6 +8,11 @@ const { allow, cannot } = policy;
|
|||
|
||||
allow(User, "create", Document);
|
||||
|
||||
allow(User, "batchImport", Document, actor => {
|
||||
if (actor.isAdmin) return true;
|
||||
throw new AdminRequiredError();
|
||||
});
|
||||
|
||||
allow(User, ["read", "download"], Document, (user, document) => {
|
||||
// existence of collection option is not required here to account for share tokens
|
||||
if (document.collection && cannot(user, "read", document.collection)) {
|
||||
|
|
Reference in New Issue