feat: private content (#1137)

* save images as private and serve via signed url from images.info api

* download private images to directory on export

* fix lint errors

* private s3 default, AWS.s3 module level scope, default s3 url expiry

* combine regex to one, and only replace when there are matches

* fix lint

* code not needed anymore, remove

* updates after pulling master

* revert the uploadToS3FromUrl url return

* use model gettr to compact code, rename to attachments api

* basic checking of document read permission to allow attachment viewing

* fix: Continue to upload avatars as public
fix: Allow redirect for non-private attachments

* add support for publicly shared documents

* catch errors which crash the app during zip export and user creation

* add tests

* enable AWS signature v4 for s3

* switch to use factories to build models for testing

* add isDocker flag for local serving of attachment redirect url

* fix redirect tests

Co-authored-by: Tom Moor <tom.moor@gmail.com>
This commit is contained in:
Huss 2020-02-13 03:40:44 +00:00 committed by GitHub
parent 064d8cea44
commit 8e2b19dc7a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 316 additions and 39 deletions

View File

@ -49,6 +49,9 @@ AWS_REGION=xx-xxxx-x
AWS_S3_UPLOAD_BUCKET_URL=http://s3:4569
AWS_S3_UPLOAD_BUCKET_NAME=bucket_name_here
AWS_S3_UPLOAD_MAX_SIZE=26214400
# uploaded s3 objects permission level, default is private
# set to "public-read" to allow public access
AWS_S3_ACL=private
# Emails configuration (optional)
SMTP_HOST=

View File

@ -49,10 +49,11 @@ class DropToImport extends React.Component<Props> {
const canvas = this.avatarEditorRef.getImage();
const imageBlob = dataUrlToBlob(canvas.toDataURL());
try {
const asset = await uploadFile(imageBlob, {
const attachment = await uploadFile(imageBlob, {
name: this.file.name,
public: true,
});
this.props.onSuccess(asset.url);
this.props.onSuccess(attachment.url);
} catch (err) {
this.props.onError(err.message);
} finally {

View File

@ -5,6 +5,7 @@ import invariant from 'invariant';
type Options = {
name?: string,
documentId?: string,
public?: boolean,
};
export const uploadFile = async (
@ -13,6 +14,7 @@ export const uploadFile = async (
) => {
const name = file instanceof File ? file.name : options.name;
const response = await client.post('/users.s3Upload', {
public: options.public,
documentId: options.documentId,
contentType: file.type,
size: file.size,

32
server/api/attachments.js Normal file
View File

@ -0,0 +1,32 @@
// @flow
import Router from 'koa-router';
import auth from '../middlewares/authentication';
import { Attachment, Document } from '../models';
import { getSignedImageUrl } from '../utils/s3';
import policy from '../policies';
const { authorize } = policy;
const router = new Router();
router.post('attachments.redirect', auth(), async ctx => {
const { id } = ctx.body;
ctx.assertPresent(id, 'id is required');
const user = ctx.state.user;
const attachment = await Attachment.findByPk(id);
if (attachment.isPrivate) {
const document = await Document.findByPk(attachment.documentId, {
userId: user.id,
});
authorize(user, 'read', document);
const accessUrl = await getSignedImageUrl(attachment.key);
ctx.redirect(accessUrl);
} else {
ctx.redirect(attachment.url);
}
});
export default router;

View File

@ -0,0 +1,86 @@
/* eslint-disable flowtype/require-valid-file-annotation */
import TestServer from 'fetch-test-server';
import app from '../app';
import { flushdb } from '../test/support';
import {
buildUser,
buildCollection,
buildAttachment,
buildDocument,
} from '../test/factories';
const server = new TestServer(app.callback());
beforeEach(flushdb);
afterAll(server.close);
describe('#attachments.redirect', async () => {
it('should require authentication', async () => {
const res = await server.post('/api/attachments.redirect');
expect(res.status).toEqual(401);
});
it('should return a redirect for an attachment belonging to a document user has access to', async () => {
const user = await buildUser();
const attachment = await buildAttachment({
teamId: user.teamId,
userId: user.id,
});
const res = await server.post('/api/attachments.redirect', {
body: { token: user.getJwtToken(), id: attachment.id },
redirect: 'manual',
});
expect(res.status).toEqual(302);
});
it('should always return a redirect for a public attachment', async () => {
const user = await buildUser();
const collection = await buildCollection({
teamId: user.teamId,
userId: user.id,
private: true,
});
const document = await buildDocument({
teamId: user.teamId,
userId: user.id,
collectionId: collection.id,
});
const attachment = await buildAttachment({
teamId: user.teamId,
userId: user.id,
documentId: document.id,
});
const res = await server.post('/api/attachments.redirect', {
body: { token: user.getJwtToken(), id: attachment.id },
redirect: 'manual',
});
expect(res.status).toEqual(302);
});
it('should not return a redirect for a private attachment belonging to a document user does not have access to', async () => {
const user = await buildUser();
const collection = await buildCollection({
private: true,
});
const document = await buildDocument({
teamId: collection.teamId,
userId: collection.userId,
collectionId: collection.id,
});
const attachment = await buildAttachment({
teamId: document.teamId,
userId: document.userId,
documentId: document.id,
acl: 'private',
});
const res = await server.post('/api/attachments.redirect', {
body: { token: user.getJwtToken(), id: attachment.id },
});
expect(res.status).toEqual(403);
});
});

View File

@ -16,6 +16,7 @@ import team from './team';
import integrations from './integrations';
import notificationSettings from './notificationSettings';
import utils from './utils';
import attachments from './attachments';
import { NotFoundError } from '../errors';
import errorHandling from '../middlewares/errorHandling';
@ -48,6 +49,7 @@ router.use('/', shares.routes());
router.use('/', team.routes());
router.use('/', integrations.routes());
router.use('/', notificationSettings.routes());
router.use('/', attachments.routes());
router.use('/', utils.routes());
router.post('*', ctx => {
ctx.throw(new NotFoundError('Endpoint not found'));

View File

@ -1,7 +1,6 @@
// @flow
import Router from 'koa-router';
import { Team } from '../models';
import { publicS3Endpoint } from '../utils/s3';
import auth from '../middlewares/authentication';
import { presentTeam, presentPolicies } from '../presenters';
@ -19,8 +18,6 @@ router.post('team.update', auth(), async ctx => {
guestSignin,
documentEmbeds,
} = ctx.body;
const endpoint = publicS3Endpoint();
const user = ctx.state.user;
const team = await Team.findByPk(user.teamId);
authorize(user, 'update', team);
@ -33,9 +30,7 @@ router.post('team.update', auth(), async ctx => {
if (sharing !== undefined) team.sharing = sharing;
if (documentEmbeds !== undefined) team.documentEmbeds = documentEmbeds;
if (guestSignin !== undefined) team.guestSignin = guestSignin;
if (avatarUrl && avatarUrl.startsWith(`${endpoint}/uploads/${user.id}`)) {
team.avatarUrl = avatarUrl;
}
if (avatarUrl !== undefined) team.avatarUrl = avatarUrl;
await team.save();
ctx.body = {

View File

@ -17,6 +17,7 @@ import userInviter from '../commands/userInviter';
import { presentUser } from '../presenters';
import policy from '../policies';
const AWS_S3_ACL = process.env.AWS_S3_ACL || 'private';
const { authorize } = policy;
const router = new Router();
@ -61,12 +62,9 @@ router.post('users.info', auth(), async ctx => {
router.post('users.update', auth(), async ctx => {
const { user } = ctx.state;
const { name, avatarUrl } = ctx.body;
const endpoint = publicS3Endpoint();
if (name) user.name = name;
if (avatarUrl && avatarUrl.startsWith(`${endpoint}/uploads/${user.id}`)) {
user.avatarUrl = avatarUrl;
}
if (avatarUrl) user.avatarUrl = avatarUrl;
await user.save();
@ -89,14 +87,19 @@ router.post('users.s3Upload', auth(), async ctx => {
const { user } = ctx.state;
const s3Key = uuid.v4();
const key = `uploads/${user.id}/${s3Key}/${name}`;
const acl =
ctx.body.public === undefined
? AWS_S3_ACL
: ctx.body.public ? 'public-read' : 'private';
const credential = makeCredential();
const longDate = format(new Date(), 'YYYYMMDDTHHmmss\\Z');
const policy = makePolicy(credential, longDate);
const policy = makePolicy(credential, longDate, acl);
const endpoint = publicS3Endpoint();
const url = `${endpoint}/${key}`;
await Attachment.create({
const attachment = await Attachment.create({
key,
acl,
size,
url,
contentType,
@ -120,7 +123,7 @@ router.post('users.s3Upload', auth(), async ctx => {
form: {
'Cache-Control': 'max-age=31557600',
'Content-Type': contentType,
acl: 'public-read',
acl,
key,
policy,
'x-amz-algorithm': 'AWS4-HMAC-SHA256',
@ -131,7 +134,7 @@ router.post('users.s3Upload', auth(), async ctx => {
asset: {
contentType,
name,
url,
url: attachment.redirectUrl,
size,
},
},

View File

@ -40,6 +40,12 @@ const Attachment = sequelize.define(
name: function() {
return path.parse(this.key).base;
},
redirectUrl: function() {
return `/api/attachments.redirect?id=${this.id}`;
},
isPrivate: function() {
return this.acl === 'private';
},
},
}
);

View File

@ -91,12 +91,18 @@ Team.associate = models => {
const uploadAvatar = async model => {
const endpoint = publicS3Endpoint();
const { avatarUrl } = model;
if (model.avatarUrl && !model.avatarUrl.startsWith(endpoint)) {
if (
avatarUrl &&
!avatarUrl.startsWith('/api') &&
!avatarUrl.startsWith(endpoint)
) {
try {
const newUrl = await uploadToS3FromUrl(
model.avatarUrl,
`avatars/${model.id}/${uuid.v4()}`
avatarUrl,
`avatars/${model.id}/${uuid.v4()}`,
'public-read'
);
if (newUrl) model.avatarUrl = newUrl;
} catch (err) {

View File

@ -128,14 +128,21 @@ const uploadAvatar = async model => {
if (
avatarUrl &&
!avatarUrl.startsWith('/api') &&
!avatarUrl.startsWith(endpoint) &&
!avatarUrl.startsWith(DEFAULT_AVATAR_HOST)
) {
const newUrl = await uploadToS3FromUrl(
avatarUrl,
`avatars/${model.id}/${uuid.v4()}`
);
if (newUrl) model.avatarUrl = newUrl;
try {
const newUrl = await uploadToS3FromUrl(
avatarUrl,
`avatars/${model.id}/${uuid.v4()}`,
'public-read'
);
if (newUrl) model.avatarUrl = newUrl;
} catch (err) {
// we can try again next time
console.error(err);
}
}
};

View File

@ -1,24 +1,46 @@
// @flow
import { takeRight } from 'lodash';
import { User, Document } from '../models';
import { User, Document, Attachment } from '../models';
import { getSignedImageUrl } from '../utils/s3';
import presentUser from './user';
type Options = {
isPublic?: boolean,
};
const attachmentRegex = /!\[.*\]\(\/api\/attachments\.redirect\?id=(?<id>.*)\)/gi;
// replaces attachments.redirect urls with signed/authenticated url equivalents
async function replaceImageAttachments(text) {
const attachmentIds = [...text.matchAll(attachmentRegex)].map(
match => match.groups && match.groups.id
);
for (const id of attachmentIds) {
const attachment = await Attachment.findByPk(id);
const accessUrl = await getSignedImageUrl(attachment.key);
text = text.replace(attachment.redirectUrl, accessUrl);
}
return text;
}
export default async function present(document: Document, options: ?Options) {
options = {
isPublic: false,
...options,
};
const text = options.isPublic
? await replaceImageAttachments(document.text)
: document.text;
const data = {
id: document.id,
url: document.url,
urlId: document.urlId,
title: document.title,
text: document.text,
text,
emoji: document.emoji,
createdAt: document.createdAt,
createdBy: undefined,

View File

@ -1,5 +1,13 @@
// @flow
import { Share, Team, User, Event, Document, Collection } from '../models';
import {
Share,
Team,
User,
Event,
Document,
Collection,
Attachment,
} from '../models';
import uuid from 'uuid';
let count = 0;
@ -104,3 +112,38 @@ export async function buildDocument(overrides: Object = {}) {
...overrides,
});
}
export async function buildAttachment(overrides: Object = {}) {
count++;
if (!overrides.teamId) {
const team = await buildTeam();
overrides.teamId = team.id;
}
if (!overrides.userId) {
const user = await buildUser();
overrides.userId = user.id;
}
if (!overrides.collectionId) {
const collection = await buildCollection(overrides);
overrides.collectionId = collection.id;
}
if (!overrides.documentId) {
const document = await buildDocument(overrides);
overrides.documentId = document.id;
}
return Attachment.create({
key: `uploads/key/to/file ${count}.png`,
url: `https://redirect.url.com/uploads/key/to/file ${count}.png`,
contentType: 'image/png',
size: 100,
acl: 'public-read',
createdAt: new Date('2018-01-02T00:00:00.000Z'),
updatedAt: new Date('2018-01-02T00:00:00.000Z'),
...overrides,
});
}

View File

@ -12,6 +12,14 @@ const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
const AWS_REGION = process.env.AWS_REGION;
const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME;
const s3 = new AWS.S3({
s3ForcePathStyle: true,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL),
signatureVersion: 'v4',
});
const hmac = (key: string, message: string, encoding: any) => {
return crypto
.createHmac('sha256', key)
@ -30,13 +38,17 @@ export const makeCredential = () => {
return credential;
};
export const makePolicy = (credential: string, longDate: string) => {
export const makePolicy = (
credential: string,
longDate: string,
acl: string
) => {
const tomorrow = addHours(new Date(), 24);
const policy = {
conditions: [
{ bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME },
['starts-with', '$key', ''],
{ acl: 'public-read' },
{ acl },
['content-length-range', 0, +process.env.AWS_S3_UPLOAD_MAX_SIZE],
['starts-with', '$Content-Type', 'image'],
['starts-with', '$Cache-Control', ''],
@ -77,13 +89,11 @@ export const publicS3Endpoint = (isServerUpload?: boolean) => {
}`;
};
export const uploadToS3FromUrl = async (url: string, key: string) => {
const s3 = new AWS.S3({
s3ForcePathStyle: true,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL),
});
export const uploadToS3FromUrl = async (
url: string,
key: string,
acl: string
) => {
invariant(AWS_S3_UPLOAD_BUCKET_NAME, 'AWS_S3_UPLOAD_BUCKET_NAME not set');
try {
@ -92,7 +102,7 @@ export const uploadToS3FromUrl = async (url: string, key: string) => {
const buffer = await res.buffer();
await s3
.putObject({
ACL: 'public-read',
ACL: acl,
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
ContentType: res.headers['content-type'],
@ -112,3 +122,36 @@ export const uploadToS3FromUrl = async (url: string, key: string) => {
}
}
};
export const getSignedImageUrl = async (key: string) => {
invariant(AWS_S3_UPLOAD_BUCKET_NAME, 'AWS_S3_UPLOAD_BUCKET_NAME not set');
const isDocker = process.env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
const params = {
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
Expires: 60,
};
return isDocker
? `${publicS3Endpoint()}/${key}`
: s3.getSignedUrl('getObject', params);
};
export const getImageByKey = async (key: string) => {
const params = {
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
};
try {
const data = await s3.getObject(params).promise();
return data.Body;
} catch (err) {
if (process.env.NODE_ENV === 'production') {
bugsnag.notify(err);
} else {
throw err;
}
}
};

View File

@ -3,13 +3,25 @@ import fs from 'fs';
import JSZip from 'jszip';
import tmp from 'tmp';
import unescape from '../../shared/utils/unescape';
import { Collection, Document } from '../models';
import { Attachment, Collection, Document } from '../models';
import { getImageByKey } from './s3';
import bugsnag from 'bugsnag';
async function addToArchive(zip, documents) {
for (const doc of documents) {
const document = await Document.findByPk(doc.id);
let text = unescape(document.text);
zip.file(`${document.title}.md`, unescape(document.text));
const attachments = await Attachment.findAll({
where: { documentId: document.id },
});
for (const attachment of attachments) {
await addImageToArchive(zip, attachment.key);
text = text.replace(attachment.redirectUrl, encodeURI(attachment.key));
}
zip.file(`${document.title}.md`, text);
if (doc.children && doc.children.length) {
const folder = zip.folder(document.title);
@ -18,6 +30,20 @@ async function addToArchive(zip, documents) {
}
}
async function addImageToArchive(zip, key) {
try {
const img = await getImageByKey(key);
zip.file(key, img, { createFolders: true });
} catch (err) {
if (process.env.NODE_ENV === 'production') {
bugsnag.notify(err);
} else {
// error during file retrieval
console.error(err);
}
}
}
async function archiveToPath(zip) {
return new Promise((resolve, reject) => {
tmp.file({ prefix: 'export-', postfix: '.zip' }, (err, path) => {