refactor: documents.batchImport -> collections.import

This commit is contained in:
Tom Moor
2020-12-28 18:51:12 -08:00
parent d79933887d
commit caee7afde2
10 changed files with 72 additions and 72 deletions

View File

@ -19,7 +19,7 @@ function ImportExport() {
const { t } = useTranslation(); const { t } = useTranslation();
const user = useCurrentUser(); const user = useCurrentUser();
const fileRef = React.useRef(); const fileRef = React.useRef();
const { ui, collections, documents } = useStores(); const { ui, collections } = useStores();
const { showToast } = ui; const { showToast } = ui;
const [isLoading, setLoading] = React.useState(false); const [isLoading, setLoading] = React.useState(false);
const [isImporting, setImporting] = React.useState(false); const [isImporting, setImporting] = React.useState(false);
@ -34,7 +34,7 @@ function ImportExport() {
setImporting(true); setImporting(true);
try { try {
const { documentCount, collectionCount } = await documents.batchImport( const { documentCount, collectionCount } = await collections.import(
file file
); );
showToast(t("Import completed")); showToast(t("Import completed"));
@ -50,7 +50,7 @@ function ImportExport() {
setImportDetails(undefined); setImportDetails(undefined);
} }
}, },
[t, file, documents, showToast] [t, file, collections, showToast]
); );
const handleFilePicked = React.useCallback(async (ev) => { const handleFilePicked = React.useCallback(async (ev) => {

View File

@ -1,6 +1,7 @@
// @flow // @flow
import invariant from "invariant";
import { concat, filter, last } from "lodash"; import { concat, filter, last } from "lodash";
import { computed } from "mobx"; import { action, computed } from "mobx";
import naturalSort from "shared/utils/naturalSort"; import naturalSort from "shared/utils/naturalSort";
import Collection from "models/Collection"; import Collection from "models/Collection";
@ -88,6 +89,21 @@ export default class CollectionsStore extends BaseStore<Collection> {
}); });
} }
@action
import = async (file: File) => {
const formData = new FormData();
formData.append("type", "outline");
formData.append("file", file);
const res = await client.post("/collections.import", formData);
invariant(res && res.data, "Data should be available");
this.addPolicies(res.policies);
res.data.collections.forEach(this.add);
return res.data;
};
getPathForDocument(documentId: string): ?DocumentPath { getPathForDocument(documentId: string): ?DocumentPath {
return this.pathsToDocuments.find((path) => path.id === documentId); return this.pathsToDocuments.find((path) => path.id === documentId);
} }

View File

@ -497,21 +497,6 @@ export default class DocumentsStore extends BaseStore<Document> {
return this.add(res.data); return this.add(res.data);
}; };
@action
batchImport = async (file: File) => {
const formData = new FormData();
formData.append("type", "outline");
formData.append("file", file);
const res = await client.post("/documents.batchImport", formData);
invariant(res && res.data, "Data should be available");
this.addPolicies(res.policies);
res.data.collections.forEach(this.rootStore.collections.add);
return res.data;
};
@action @action
import = async ( import = async (
file: File, file: File,

View File

@ -1,7 +1,8 @@
// @flow // @flow
import fs from "fs"; import fs from "fs";
import Router from "koa-router"; import Router from "koa-router";
import { ValidationError } from "../errors"; import collectionImporter from "../commands/collectionImporter";
import { ValidationError, InvalidRequestError } from "../errors";
import { exportCollections } from "../logistics"; import { exportCollections } from "../logistics";
import auth from "../middlewares/authentication"; import auth from "../middlewares/authentication";
import { import {
@ -89,6 +90,44 @@ router.post("collections.info", auth(), async (ctx) => {
}; };
}); });
router.post("collections.import", auth(), async (ctx) => {
const { type } = ctx.body;
ctx.assertIn(type, ["outline"], "type must be one of 'outline'");
if (!ctx.is("multipart/form-data")) {
throw new InvalidRequestError("Request type must be multipart/form-data");
}
const file: any = Object.values(ctx.request.files)[0];
ctx.assertPresent(file, "file is required");
if (file.type !== "application/zip") {
throw new InvalidRequestError("File type must be a zip");
}
const user = ctx.state.user;
authorize(user, "import", Collection);
const { documents, attachments, collections } = await collectionImporter({
file,
user,
type,
ip: ctx.request.ip,
});
ctx.body = {
data: {
attachmentCount: attachments.length,
documentCount: documents.length,
collectionCount: collections.length,
collections: collections.map((collection) =>
presentCollection(collection)
),
},
policies: presentPolicies(user, collections),
};
});
router.post("collections.add_group", auth(), async (ctx) => { router.post("collections.add_group", auth(), async (ctx) => {
const { id, groupId, permission = "read_write" } = ctx.body; const { id, groupId, permission = "read_write" } = ctx.body;
ctx.assertUuid(id, "id is required"); ctx.assertUuid(id, "id is required");

View File

@ -2,7 +2,6 @@
import Router from "koa-router"; import Router from "koa-router";
import Sequelize from "sequelize"; import Sequelize from "sequelize";
import { subtractDate } from "../../shared/utils/date"; import { subtractDate } from "../../shared/utils/date";
import documentBatchImporter from "../commands/documentBatchImporter";
import documentCreator from "../commands/documentCreator"; import documentCreator from "../commands/documentCreator";
import documentImporter from "../commands/documentImporter"; import documentImporter from "../commands/documentImporter";
import documentMover from "../commands/documentMover"; import documentMover from "../commands/documentMover";
@ -1106,44 +1105,6 @@ router.post("documents.unpublish", auth(), async (ctx) => {
}; };
}); });
router.post("documents.batchImport", auth(), async (ctx) => {
const { type } = ctx.body;
ctx.assertIn(type, ["outline"], "type must be one of 'outline'");
if (!ctx.is("multipart/form-data")) {
throw new InvalidRequestError("Request type must be multipart/form-data");
}
const file: any = Object.values(ctx.request.files)[0];
ctx.assertPresent(file, "file is required");
if (file.type !== "application/zip") {
throw new InvalidRequestError("File type must be a zip");
}
const user = ctx.state.user;
authorize(user, "batchImport", Document);
const { documents, attachments, collections } = await documentBatchImporter({
file,
user,
type,
ip: ctx.request.ip,
});
ctx.body = {
data: {
attachmentCount: attachments.length,
documentCount: documents.length,
collectionCount: collections.length,
collections: collections.map((collection) =>
presentCollection(collection)
),
},
policies: presentPolicies(user, collections),
};
});
router.post("documents.import", auth(), async (ctx) => { router.post("documents.import", auth(), async (ctx) => {
const { publish, collectionId, parentDocumentId, index } = ctx.body; const { publish, collectionId, parentDocumentId, index } = ctx.body;

View File

@ -16,7 +16,7 @@ import documentImporter from "./documentImporter";
const log = debug("commands"); const log = debug("commands");
export default async function documentBatchImporter({ export default async function collectionImporter({
file, file,
type, type,
user, user,

View File

@ -4,13 +4,13 @@ import File from "formidable/lib/file";
import { Attachment, Document, Collection } from "../models"; import { Attachment, Document, Collection } from "../models";
import { buildUser } from "../test/factories"; import { buildUser } from "../test/factories";
import { flushdb } from "../test/support"; import { flushdb } from "../test/support";
import documentBatchImporter from "./documentBatchImporter"; import collectionImporter from "./collectionImporter";
jest.mock("../utils/s3"); jest.mock("../utils/s3");
beforeEach(() => flushdb()); beforeEach(() => flushdb());
describe("documentBatchImporter", () => { describe("collectionImporter", () => {
const ip = "127.0.0.1"; const ip = "127.0.0.1";
it("should import documents in outline format", async () => { it("should import documents in outline format", async () => {
@ -22,7 +22,7 @@ describe("documentBatchImporter", () => {
path: path.resolve(__dirname, "..", "test", "fixtures", name), path: path.resolve(__dirname, "..", "test", "fixtures", name),
}); });
const response = await documentBatchImporter({ const response = await collectionImporter({
type: "outline", type: "outline",
user, user,
file, file,
@ -49,7 +49,7 @@ describe("documentBatchImporter", () => {
let error; let error;
try { try {
await documentBatchImporter({ await collectionImporter({
type: "outline", type: "outline",
user, user,
file, file,
@ -73,7 +73,7 @@ describe("documentBatchImporter", () => {
let error; let error;
try { try {
await documentBatchImporter({ await collectionImporter({
type: "outline", type: "outline",
user, user,
file, file,

View File

@ -9,6 +9,11 @@ const { allow } = policy;
allow(User, "create", Collection); allow(User, "create", Collection);
allow(User, "import", Collection, (actor) => {
if (actor.isAdmin) return true;
throw new AdminRequiredError();
});
allow(User, ["read", "export"], Collection, (user, collection) => { allow(User, ["read", "export"], Collection, (user, collection) => {
if (!collection || user.teamId !== collection.teamId) return false; if (!collection || user.teamId !== collection.teamId) return false;

View File

@ -1,6 +1,5 @@
// @flow // @flow
import invariant from "invariant"; import invariant from "invariant";
import { AdminRequiredError } from "../errors";
import { Document, Revision, User } from "../models"; import { Document, Revision, User } from "../models";
import policy from "./policy"; import policy from "./policy";
@ -8,11 +7,6 @@ const { allow, cannot } = policy;
allow(User, "create", Document); allow(User, "create", Document);
allow(User, "batchImport", Document, (actor) => {
if (actor.isAdmin) return true;
throw new AdminRequiredError();
});
allow(User, ["read", "download"], Document, (user, document) => { allow(User, ["read", "download"], Document, (user, document) => {
// existence of collection option is not required here to account for share tokens // existence of collection option is not required here to account for share tokens
if (document.collection && cannot(user, "read", document.collection)) { if (document.collection && cannot(user, "read", document.collection)) {

View File

@ -311,4 +311,4 @@
"Suspended": "Suspended", "Suspended": "Suspended",
"Edit Profile": "Edit Profile", "Edit Profile": "Edit Profile",
"{{ userName }} hasnt updated any documents yet.": "{{ userName }} hasnt updated any documents yet." "{{ userName }} hasnt updated any documents yet.": "{{ userName }} hasnt updated any documents yet."
} }