FakeS3 support

This commit is contained in:
Tom Moor
2017-12-10 22:58:52 -08:00
parent ec86b9fe8c
commit ced80b6723
5 changed files with 38 additions and 20 deletions

View File

@ -16,7 +16,7 @@ GOOGLE_ANALYTICS_ID=
AWS_ACCESS_KEY_ID=notcheckedindev
AWS_SECRET_ACCESS_KEY=notcheckedindev
AWS_S3_UPLOAD_BUCKET_URL=http://localhost:4569
AWS_S3_UPLOAD_BUCKET_URL=http://s3:4569
AWS_S3_UPLOAD_BUCKET_NAME=outline-dev
AWS_S3_UPLOAD_MAX_SIZE=26214400

1
.gitignore vendored
View File

@ -3,3 +3,4 @@ node_modules/*
.env
npm-debug.log
.DS_Store
fakes3/*

View File

@ -16,6 +16,8 @@ services:
image: lphoward/fake-s3
ports:
- "4569:4569"
volumes:
- ./fakes3:/fakes3_root
outline:
image: outline:v001
command: yarn dev

View File

@ -1,8 +1,7 @@
// @flow
import uuid from 'uuid';
import Router from 'koa-router';
import { makePolicy, signPolicy } from '../utils/s3';
import { makePolicy, signPolicy, publicS3Endpoint } from '../utils/s3';
import auth from './middlewares/authentication';
import { presentUser } from '../presenters';
@ -21,11 +20,12 @@ router.post('user.s3Upload', auth(), async ctx => {
const s3Key = uuid.v4();
const key = `uploads/${ctx.state.user.id}/${s3Key}/${filename}`;
const policy = makePolicy();
const endpoint = publicS3Endpoint();
ctx.body = {
data: {
maxUploadSize: process.env.AWS_S3_UPLOAD_MAX_SIZE,
uploadUrl: process.env.AWS_S3_UPLOAD_BUCKET_URL,
uploadUrl: endpoint,
form: {
AWSAccessKeyId: process.env.AWS_ACCESS_KEY_ID,
'Cache-Control': 'max-age=31557600',
@ -37,7 +37,7 @@ router.post('user.s3Upload', auth(), async ctx => {
},
asset: {
contentType: kind,
url: `${process.env.AWS_S3_UPLOAD_BUCKET_URL}${key}`,
url: `${endpoint}/${key}`,
name: filename,
size,
},

View File

@ -1,21 +1,15 @@
// @flow
import crypto from 'crypto';
import moment from 'moment';
import path from 'path';
import AWS from 'aws-sdk';
import invariant from 'invariant';
import fetch from 'isomorphic-fetch';
import bugsnag from 'bugsnag';
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
});
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME;
const makePolicy = () => {
export const makePolicy = () => {
const policy = {
conditions: [
{ bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME },
@ -33,7 +27,7 @@ const makePolicy = () => {
return new Buffer(JSON.stringify(policy)).toString('base64');
};
const signPolicy = (policy: any) => {
export const signPolicy = (policy: any) => {
invariant(AWS_SECRET_ACCESS_KEY, 'AWS_SECRET_ACCESS_KEY not set');
const signature = crypto
.createHmac('sha1', AWS_SECRET_ACCESS_KEY)
@ -43,8 +37,24 @@ const signPolicy = (policy: any) => {
return signature;
};
const uploadToS3FromUrl = async (url: string, key: string) => {
const s3 = new AWS.S3();
export const publicS3Endpoint = () => {
// lose trailing slash if there is one and convert fake-s3 url to localhost
// for access outside of docker containers in local development
const host = process.env.AWS_S3_UPLOAD_BUCKET_URL.replace(
's3:',
'localhost:'
).replace(/\/$/, '');
return `${host}/${process.env.AWS_S3_UPLOAD_BUCKET_NAME}`;
};
export const uploadToS3FromUrl = async (url: string, key: string) => {
const s3 = new AWS.S3({
s3ForcePathStyle: true,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL),
});
invariant(AWS_S3_UPLOAD_BUCKET_NAME, 'AWS_S3_UPLOAD_BUCKET_NAME not set');
try {
@ -53,6 +63,7 @@ const uploadToS3FromUrl = async (url: string, key: string) => {
const buffer = await res.buffer();
await s3
.putObject({
ACL: 'public-read',
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
ContentType: res.headers['content-type'],
@ -60,10 +71,14 @@ const uploadToS3FromUrl = async (url: string, key: string) => {
Body: buffer,
})
.promise();
return path.join(process.env.AWS_S3_UPLOAD_BUCKET_URL, key);
} catch (e) {
bugsnag.notify(e);
const endpoint = publicS3Endpoint();
return `${endpoint}/${key}`;
} catch (err) {
if (process.env.NODE_ENV === 'production') {
bugsnag.notify(err);
} else {
throw err;
}
}
};
export { makePolicy, signPolicy, uploadToS3FromUrl };