refactor, add preview
This commit is contained in:
@ -1,10 +1,14 @@
|
|||||||
// @flow
|
// @flow
|
||||||
import { observer } from "mobx-react";
|
import { observer } from "mobx-react";
|
||||||
|
import { CollectionIcon } from "outline-icons";
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import { useTranslation, Trans } from "react-i18next";
|
import { useTranslation, Trans } from "react-i18next";
|
||||||
|
import styled from "styled-components";
|
||||||
|
import { parseOutlineExport } from "shared/utils/zip";
|
||||||
import Button from "components/Button";
|
import Button from "components/Button";
|
||||||
import CenteredContent from "components/CenteredContent";
|
import CenteredContent from "components/CenteredContent";
|
||||||
import HelpText from "components/HelpText";
|
import HelpText from "components/HelpText";
|
||||||
|
import Notice from "components/Notice";
|
||||||
import PageTitle from "components/PageTitle";
|
import PageTitle from "components/PageTitle";
|
||||||
import VisuallyHidden from "components/VisuallyHidden";
|
import VisuallyHidden from "components/VisuallyHidden";
|
||||||
import useCurrentUser from "hooks/useCurrentUser";
|
import useCurrentUser from "hooks/useCurrentUser";
|
||||||
@ -20,14 +24,14 @@ function ImportExport() {
|
|||||||
const [isLoading, setLoading] = React.useState(false);
|
const [isLoading, setLoading] = React.useState(false);
|
||||||
const [isImporting, setImporting] = React.useState(false);
|
const [isImporting, setImporting] = React.useState(false);
|
||||||
const [isExporting, setExporting] = React.useState(false);
|
const [isExporting, setExporting] = React.useState(false);
|
||||||
|
const [file, setFile] = React.useState();
|
||||||
|
const [importDetails, setImportDetails] = React.useState();
|
||||||
|
|
||||||
const handleFilePicked = React.useCallback(
|
const handleImport = React.useCallback(
|
||||||
async (ev) => {
|
async (ev) => {
|
||||||
const files = getDataTransferFiles(ev);
|
|
||||||
setImporting(true);
|
setImporting(true);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const file = files[0];
|
|
||||||
await documents.batchImport(file);
|
await documents.batchImport(file);
|
||||||
showToast(t("Import completed"));
|
showToast(t("Import completed"));
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -37,16 +41,37 @@ function ImportExport() {
|
|||||||
fileRef.current.value = "";
|
fileRef.current.value = "";
|
||||||
}
|
}
|
||||||
setImporting(false);
|
setImporting(false);
|
||||||
|
setFile(undefined);
|
||||||
|
setImportDetails(undefined);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[t, documents, showToast]
|
[t, file, documents, showToast]
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleImport = React.useCallback(() => {
|
const handleFilePicked = React.useCallback(async (ev) => {
|
||||||
if (fileRef.current) {
|
ev.preventDefault();
|
||||||
fileRef.current.click();
|
|
||||||
|
const files = getDataTransferFiles(ev);
|
||||||
|
const file = files[0];
|
||||||
|
setFile(file);
|
||||||
|
|
||||||
|
try {
|
||||||
|
setImportDetails(await parseOutlineExport(file));
|
||||||
|
} catch (err) {
|
||||||
|
setImportDetails([]);
|
||||||
}
|
}
|
||||||
}, [fileRef]);
|
}, []);
|
||||||
|
|
||||||
|
const handlePickFile = React.useCallback(
|
||||||
|
(ev) => {
|
||||||
|
ev.preventDefault();
|
||||||
|
|
||||||
|
if (fileRef.current) {
|
||||||
|
fileRef.current.click();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[fileRef]
|
||||||
|
);
|
||||||
|
|
||||||
const handleExport = React.useCallback(
|
const handleExport = React.useCallback(
|
||||||
async (ev: SyntheticEvent<>) => {
|
async (ev: SyntheticEvent<>) => {
|
||||||
@ -64,6 +89,14 @@ function ImportExport() {
|
|||||||
[t, collections, showToast]
|
[t, collections, showToast]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const hasCollections = importDetails
|
||||||
|
? !!importDetails.filter((detail) => detail.type === "collection").length
|
||||||
|
: false;
|
||||||
|
const hasDocuments = importDetails
|
||||||
|
? !!importDetails.filter((detail) => detail.type === "document").length
|
||||||
|
: false;
|
||||||
|
const isImportable = hasCollections && hasDocuments;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<CenteredContent>
|
<CenteredContent>
|
||||||
<PageTitle title={t("Import / Export")} />
|
<PageTitle title={t("Import / Export")} />
|
||||||
@ -83,14 +116,46 @@ function ImportExport() {
|
|||||||
accept="application/zip"
|
accept="application/zip"
|
||||||
/>
|
/>
|
||||||
</VisuallyHidden>
|
</VisuallyHidden>
|
||||||
<Button
|
{file && !isImportable && (
|
||||||
type="submit"
|
<ImportPreview>
|
||||||
onClick={handleImport}
|
<Trans>
|
||||||
disabled={isImporting}
|
Sorry, the file <strong>{{ fileName: file.name }}</strong> is
|
||||||
primary
|
missing valid collections or documents.
|
||||||
>
|
</Trans>
|
||||||
{isImporting ? `${t("Importing")}…` : t("Import Data")}
|
</ImportPreview>
|
||||||
</Button>
|
)}
|
||||||
|
{file && importDetails && isImportable ? (
|
||||||
|
<>
|
||||||
|
<ImportPreview>
|
||||||
|
<Trans>
|
||||||
|
<strong>{{ fileName: file.name }}</strong> looks good, the
|
||||||
|
following collections and their documents will be imported:
|
||||||
|
</Trans>
|
||||||
|
<List>
|
||||||
|
{importDetails
|
||||||
|
.filter((detail) => detail.type === "collection")
|
||||||
|
.map((detail) => (
|
||||||
|
<ImportPreviewItem key={detail.path}>
|
||||||
|
<CollectionIcon />
|
||||||
|
<CollectionName>{detail.name}</CollectionName>
|
||||||
|
</ImportPreviewItem>
|
||||||
|
))}
|
||||||
|
</List>
|
||||||
|
</ImportPreview>
|
||||||
|
<Button
|
||||||
|
type="submit"
|
||||||
|
onClick={handleImport}
|
||||||
|
disabled={isImporting}
|
||||||
|
primary
|
||||||
|
>
|
||||||
|
{isImporting ? `${t("Importing")}…` : t("Confirm & Import")}
|
||||||
|
</Button>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<Button type="submit" onClick={handlePickFile} primary>
|
||||||
|
{t("Choose File…")}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
|
||||||
<h1>{t("Export")}</h1>
|
<h1>{t("Export")}</h1>
|
||||||
<HelpText>
|
<HelpText>
|
||||||
@ -117,4 +182,24 @@ function ImportExport() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const List = styled.ul`
|
||||||
|
padding: 0;
|
||||||
|
margin: 8px 0 0;
|
||||||
|
`;
|
||||||
|
|
||||||
|
const ImportPreview = styled(Notice)`
|
||||||
|
margin-bottom: 16px;
|
||||||
|
`;
|
||||||
|
|
||||||
|
const ImportPreviewItem = styled.li`
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
list-style: none;
|
||||||
|
`;
|
||||||
|
|
||||||
|
const CollectionName = styled.span`
|
||||||
|
font-weight: 500;
|
||||||
|
margin-left: 4px;
|
||||||
|
`;
|
||||||
|
|
||||||
export default observer(ImportExport);
|
export default observer(ImportExport);
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
// @flow
|
// @flow
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
import os from "os";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import debug from "debug";
|
import debug from "debug";
|
||||||
import File from "formidable/lib/file";
|
import File from "formidable/lib/file";
|
||||||
import invariant from "invariant";
|
import invariant from "invariant";
|
||||||
import JSZip from "jszip";
|
|
||||||
import { values, keys } from "lodash";
|
import { values, keys } from "lodash";
|
||||||
|
import uuid from "uuid";
|
||||||
|
import { parseOutlineExport } from "../../shared/utils/zip";
|
||||||
import { InvalidRequestError } from "../errors";
|
import { InvalidRequestError } from "../errors";
|
||||||
import { Attachment, Document, Collection, User } from "../models";
|
import { Attachment, Document, Collection, User } from "../models";
|
||||||
import attachmentCreator from "./attachmentCreator";
|
import attachmentCreator from "./attachmentCreator";
|
||||||
@ -27,57 +29,26 @@ export default async function documentBatchImporter({
|
|||||||
}) {
|
}) {
|
||||||
// load the zip structure into memory
|
// load the zip structure into memory
|
||||||
const zipData = await fs.promises.readFile(file.path);
|
const zipData = await fs.promises.readFile(file.path);
|
||||||
const zip = await JSZip.loadAsync(zipData);
|
|
||||||
|
let items;
|
||||||
|
try {
|
||||||
|
items = await await parseOutlineExport(zipData);
|
||||||
|
} catch (err) {
|
||||||
|
throw new InvalidRequestError(err.message);
|
||||||
|
}
|
||||||
|
|
||||||
// store progress and pointers
|
// store progress and pointers
|
||||||
let collections: { string: Collection } = {};
|
let collections: { string: Collection } = {};
|
||||||
let documents: { string: Document } = {};
|
let documents: { string: Document } = {};
|
||||||
let attachments: { string: Attachment } = {};
|
let attachments: { string: Attachment } = {};
|
||||||
|
|
||||||
// this is so we can use async / await a little easier
|
for (const item of items) {
|
||||||
let folders = [];
|
if (item.type === "collection") {
|
||||||
zip.forEach(async function (path, item) {
|
|
||||||
// known skippable items
|
|
||||||
if (path.startsWith("__MACOSX") || path.endsWith(".DS_Store")) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
folders.push([path, item]);
|
|
||||||
});
|
|
||||||
|
|
||||||
for (const [rawPath, item] of folders) {
|
|
||||||
const itemPath = rawPath.replace(/\/$/, "");
|
|
||||||
const depth = itemPath.split("/").length - 1;
|
|
||||||
|
|
||||||
if (depth === 0 && !item.dir) {
|
|
||||||
throw new InvalidRequestError(
|
|
||||||
"Root of zip file must only contain folders representing collections"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [rawPath, item] of folders) {
|
|
||||||
const itemPath = rawPath.replace(/\/$/, "");
|
|
||||||
const itemDir = path.dirname(itemPath);
|
|
||||||
const name = path.basename(item.name);
|
|
||||||
const depth = itemPath.split("/").length - 1;
|
|
||||||
|
|
||||||
// metadata
|
|
||||||
let metadata = {};
|
|
||||||
try {
|
|
||||||
metadata = item.comment ? JSON.parse(item.comment) : {};
|
|
||||||
} catch (err) {
|
|
||||||
log(
|
|
||||||
`ZIP comment found for ${item.name}, but could not be parsed as metadata: ${item.comment}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (depth === 0 && item.dir && name) {
|
|
||||||
// check if collection with name exists
|
// check if collection with name exists
|
||||||
let [collection, isCreated] = await Collection.findOrCreate({
|
let [collection, isCreated] = await Collection.findOrCreate({
|
||||||
where: {
|
where: {
|
||||||
teamId: user.teamId,
|
teamId: user.teamId,
|
||||||
name,
|
name: item.name,
|
||||||
},
|
},
|
||||||
defaults: {
|
defaults: {
|
||||||
creatorId: user.id,
|
creatorId: user.id,
|
||||||
@ -92,28 +63,31 @@ export default async function documentBatchImporter({
|
|||||||
collection = await Collection.create({
|
collection = await Collection.create({
|
||||||
teamId: user.teamId,
|
teamId: user.teamId,
|
||||||
creatorId: user.id,
|
creatorId: user.id,
|
||||||
name: `${name} (Imported)`,
|
name: `${item.name} (Imported)`,
|
||||||
private: false,
|
private: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
collections[itemPath] = collection;
|
collections[item.path] = collection;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (depth > 0 && !item.dir && item.name.endsWith(".md")) {
|
if (item.type === "document") {
|
||||||
const collectionDir = itemDir.split("/")[0];
|
const collectionDir = item.dir.split("/")[0];
|
||||||
const collection = collections[collectionDir];
|
const collection = collections[collectionDir];
|
||||||
invariant(collection, `Collection must exist for document ${itemDir}`);
|
invariant(collection, `Collection must exist for document ${item.dir}`);
|
||||||
|
|
||||||
// we have a document
|
// we have a document
|
||||||
const content = await item.async("string");
|
const content = await item.item.async("string");
|
||||||
const name = path.basename(item.name);
|
const name = path.basename(item.name);
|
||||||
await fs.promises.writeFile(`/tmp/${name}`, content);
|
const tmpDir = os.tmpdir();
|
||||||
|
const tmpFilePath = `${tmpDir}/upload-${uuid.v4()}`;
|
||||||
|
|
||||||
|
await fs.promises.writeFile(tmpFilePath, content);
|
||||||
const file = new File({
|
const file = new File({
|
||||||
name,
|
name,
|
||||||
type: "text/markdown",
|
type: "text/markdown",
|
||||||
path: `/tmp/${name}`,
|
path: tmpFilePath,
|
||||||
});
|
});
|
||||||
|
|
||||||
const { text, title } = await documentImporter({
|
const { text, title } = await documentImporter({
|
||||||
@ -124,9 +98,10 @@ export default async function documentBatchImporter({
|
|||||||
|
|
||||||
// must be a nested document, find and reference the parent document
|
// must be a nested document, find and reference the parent document
|
||||||
let parentDocumentId;
|
let parentDocumentId;
|
||||||
if (depth > 1) {
|
if (item.depth > 1) {
|
||||||
const parentDocument = documents[`${itemDir}.md`] || documents[itemDir];
|
const parentDocument =
|
||||||
invariant(parentDocument, `Document must exist for parent ${itemDir}`);
|
documents[`${item.dir}.md`] || documents[item.dir];
|
||||||
|
invariant(parentDocument, `Document must exist for parent ${item.dir}`);
|
||||||
parentDocumentId = parentDocument.id;
|
parentDocumentId = parentDocument.id;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,8 +110,8 @@ export default async function documentBatchImporter({
|
|||||||
text,
|
text,
|
||||||
publish: true,
|
publish: true,
|
||||||
collectionId: collection.id,
|
collectionId: collection.id,
|
||||||
createdAt: metadata.createdAt
|
createdAt: item.metadata.createdAt
|
||||||
? new Date(metadata.createdAt)
|
? new Date(item.metadata.createdAt)
|
||||||
: item.date,
|
: item.date,
|
||||||
updatedAt: item.date,
|
updatedAt: item.date,
|
||||||
parentDocumentId,
|
parentDocumentId,
|
||||||
@ -144,25 +119,24 @@ export default async function documentBatchImporter({
|
|||||||
ip,
|
ip,
|
||||||
});
|
});
|
||||||
|
|
||||||
documents[itemPath] = document;
|
documents[item.path] = document;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (depth > 0 && !item.dir && itemPath.includes("uploads")) {
|
if (item.type === "attachment") {
|
||||||
// we have an attachment
|
const buffer = await item.item.async("nodebuffer");
|
||||||
const buffer = await item.async("nodebuffer");
|
|
||||||
const attachment = await attachmentCreator({
|
const attachment = await attachmentCreator({
|
||||||
name,
|
name: item.name,
|
||||||
type,
|
type,
|
||||||
buffer,
|
buffer,
|
||||||
user,
|
user,
|
||||||
ip,
|
ip,
|
||||||
});
|
});
|
||||||
attachments[itemPath] = attachment;
|
attachments[item.path] = attachment;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
log(`Skipped importing ${itemPath}`);
|
log(`Skipped importing ${item.path}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// All collections, documents, and attachments have been created – time to
|
// All collections, documents, and attachments have been created – time to
|
||||||
|
@ -280,7 +280,8 @@
|
|||||||
"Import": "Import",
|
"Import": "Import",
|
||||||
"It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.": "It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.",
|
"It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.": "It is possible to import a zip file of folders and Markdown files previously exported from an Outline instance. Support will soon be added for importing from other services.",
|
||||||
"Importing": "Importing",
|
"Importing": "Importing",
|
||||||
"Import Data": "Import Data",
|
"Confirm & Import": "Confirm & Import",
|
||||||
|
"Choose File…": "Choose File…",
|
||||||
"A full export might take some time, consider exporting a single document or collection if possible. We’ll put together a zip of all your documents in Markdown format and email it to <2>{{userEmail}}</2>.": "A full export might take some time, consider exporting a single document or collection if possible. We’ll put together a zip of all your documents in Markdown format and email it to <2>{{userEmail}}</2>.",
|
"A full export might take some time, consider exporting a single document or collection if possible. We’ll put together a zip of all your documents in Markdown format and email it to <2>{{userEmail}}</2>.": "A full export might take some time, consider exporting a single document or collection if possible. We’ll put together a zip of all your documents in Markdown format and email it to <2>{{userEmail}}</2>.",
|
||||||
"Export Requested": "Export Requested",
|
"Export Requested": "Export Requested",
|
||||||
"Requesting Export": "Requesting Export",
|
"Requesting Export": "Requesting Export",
|
||||||
|
76
shared/utils/zip.js
Normal file
76
shared/utils/zip.js
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
// @flow
|
||||||
|
import path from "path";
|
||||||
|
import JSZip, { ZipObject } from "jszip";
|
||||||
|
|
||||||
|
export type Item = {|
|
||||||
|
path: string,
|
||||||
|
dir: string,
|
||||||
|
name: string,
|
||||||
|
depth: number,
|
||||||
|
metadata: Object,
|
||||||
|
type: "collection" | "document" | "attachment",
|
||||||
|
item: ZipObject,
|
||||||
|
|};
|
||||||
|
|
||||||
|
export async function parseOutlineExport(
|
||||||
|
input: File | Buffer
|
||||||
|
): Promise<Item[]> {
|
||||||
|
const zip = await JSZip.loadAsync(input);
|
||||||
|
|
||||||
|
// this is so we can use async / await a little easier
|
||||||
|
let items: Item[] = [];
|
||||||
|
zip.forEach(async function (rawPath, item) {
|
||||||
|
const itemPath = rawPath.replace(/\/$/, "");
|
||||||
|
const dir = path.dirname(itemPath);
|
||||||
|
const name = path.basename(item.name);
|
||||||
|
const depth = itemPath.split("/").length - 1;
|
||||||
|
|
||||||
|
// known skippable items
|
||||||
|
if (itemPath.startsWith("__MACOSX") || itemPath.endsWith(".DS_Store")) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// attempt to parse extra metadata from zip comment
|
||||||
|
let metadata = {};
|
||||||
|
try {
|
||||||
|
metadata = item.comment ? JSON.parse(item.comment) : {};
|
||||||
|
} catch (err) {
|
||||||
|
console.log(
|
||||||
|
`ZIP comment found for ${item.name}, but could not be parsed as metadata: ${item.comment}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (depth === 0 && !item.dir) {
|
||||||
|
throw new Error(
|
||||||
|
"Root of zip file must only contain folders representing collections"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let type;
|
||||||
|
if (depth === 0 && item.dir && name) {
|
||||||
|
type = "collection";
|
||||||
|
}
|
||||||
|
if (depth > 0 && !item.dir && item.name.endsWith(".md")) {
|
||||||
|
type = "document";
|
||||||
|
}
|
||||||
|
if (depth > 0 && !item.dir && itemPath.includes("uploads")) {
|
||||||
|
type = "attachment";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!type) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
items.push({
|
||||||
|
path: itemPath,
|
||||||
|
dir,
|
||||||
|
name,
|
||||||
|
depth,
|
||||||
|
type,
|
||||||
|
metadata,
|
||||||
|
item,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return items;
|
||||||
|
}
|
Reference in New Issue
Block a user