fix: Cleanup S3 Attachments (#1217)
* fix: Server error if attempting to load an unknown attachment * fix: Migration should cascade delete to document attachments * fix: Delete S3 attachments along with documents
This commit is contained in:
parent
d3773dc943
commit
09dea295a2
@ -3,7 +3,7 @@ import Router from 'koa-router';
|
||||
import auth from '../middlewares/authentication';
|
||||
import { Attachment, Document } from '../models';
|
||||
import { getSignedImageUrl } from '../utils/s3';
|
||||
|
||||
import { NotFoundError } from '../errors';
|
||||
import policy from '../policies';
|
||||
|
||||
const { authorize } = policy;
|
||||
@ -15,6 +15,9 @@ router.post('attachments.redirect', auth(), async ctx => {
|
||||
|
||||
const user = ctx.state.user;
|
||||
const attachment = await Attachment.findByPk(id);
|
||||
if (!attachment) {
|
||||
throw new NotFoundError();
|
||||
}
|
||||
|
||||
if (attachment.isPrivate) {
|
||||
if (attachment.documentId) {
|
||||
|
@ -9,7 +9,7 @@ import {
|
||||
publicS3Endpoint,
|
||||
makeCredential,
|
||||
} from '../utils/s3';
|
||||
import { Attachment, Event, User, Team } from '../models';
|
||||
import { Document, Attachment, Event, User, Team } from '../models';
|
||||
import auth from '../middlewares/authentication';
|
||||
import pagination from './middlewares/pagination';
|
||||
import userInviter from '../commands/userInviter';
|
||||
@ -96,6 +96,11 @@ router.post('users.s3Upload', auth(), async ctx => {
|
||||
const endpoint = publicS3Endpoint();
|
||||
const url = `${endpoint}/${key}`;
|
||||
|
||||
if (documentId) {
|
||||
const document = await Document.findByPk(documentId, { userId: user.id });
|
||||
authorize(user, 'update', document);
|
||||
}
|
||||
|
||||
const attachment = await Attachment.create({
|
||||
key,
|
||||
acl,
|
||||
|
@ -1,11 +1,13 @@
|
||||
// @flow
|
||||
import debug from 'debug';
|
||||
import Router from 'koa-router';
|
||||
import subDays from 'date-fns/sub_days';
|
||||
import { AuthenticationError } from '../errors';
|
||||
import { Document } from '../models';
|
||||
import { Document, Attachment } from '../models';
|
||||
import { Op } from '../sequelize';
|
||||
|
||||
const router = new Router();
|
||||
const log = debug('utils');
|
||||
|
||||
router.post('utils.gc', async ctx => {
|
||||
const { token } = ctx.body;
|
||||
@ -14,15 +16,33 @@ router.post('utils.gc', async ctx => {
|
||||
throw new AuthenticationError('Invalid secret token');
|
||||
}
|
||||
|
||||
await Document.scope('withUnpublished').destroy({
|
||||
where: {
|
||||
deletedAt: {
|
||||
[Op.lt]: subDays(new Date(), 30),
|
||||
},
|
||||
log('Permanently deleting documents older than 30 days…');
|
||||
|
||||
const where = {
|
||||
deletedAt: {
|
||||
[Op.lt]: subDays(new Date(), 30),
|
||||
},
|
||||
};
|
||||
|
||||
const documents = await Document.scope('withUnpublished').findAll({
|
||||
attributes: ['id'],
|
||||
where,
|
||||
});
|
||||
const documentIds = documents.map(d => d.id);
|
||||
|
||||
await Attachment.destroy({
|
||||
where: {
|
||||
documentId: documentIds,
|
||||
},
|
||||
});
|
||||
|
||||
await Document.scope('withUnpublished').destroy({
|
||||
where,
|
||||
force: true,
|
||||
});
|
||||
|
||||
log(`Deleted ${documentIds.length} documents`);
|
||||
|
||||
ctx.body = {
|
||||
success: true,
|
||||
};
|
||||
|
43
server/migrations/20200328175012-cascade-delete.js
Normal file
43
server/migrations/20200328175012-cascade-delete.js
Normal file
@ -0,0 +1,43 @@
|
||||
const tableName = 'attachments';
|
||||
|
||||
// because of this issue in Sequelize the foreign key constraint may be named differently depending
|
||||
// on when the previous migrations were ran https://github.com/sequelize/sequelize/pull/9890
|
||||
const constraintNames = ['attachments_documentId_fkey', 'attachments_foreign_idx'];
|
||||
|
||||
module.exports = {
|
||||
up: async (queryInterface, Sequelize) => {
|
||||
let error;
|
||||
for (const constraintName of constraintNames) {
|
||||
try {
|
||||
await queryInterface.sequelize.query(`alter table "${tableName}" drop constraint "${constraintName}"`)
|
||||
await queryInterface.sequelize.query(
|
||||
`alter table "${tableName}"
|
||||
add constraint "${constraintName}" foreign key("documentId") references "documents" ("id")
|
||||
on delete cascade`
|
||||
);
|
||||
return;
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
},
|
||||
|
||||
down: async (queryInterface, Sequelize) => {
|
||||
let error;
|
||||
for (const constraintName of constraintNames) {
|
||||
try {
|
||||
await queryInterface.sequelize.query(`alter table "${tableName}" drop constraint "${constraintName}"`)
|
||||
await queryInterface.sequelize.query(
|
||||
`alter table "${tableName}"\
|
||||
add constraint "${constraintName}" foreign key("documentId") references "documents" ("id")
|
||||
on delete no action`
|
||||
);
|
||||
return;
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
},
|
||||
};
|
@ -1,6 +1,7 @@
|
||||
// @flow
|
||||
import path from 'path';
|
||||
import { DataTypes, sequelize } from '../sequelize';
|
||||
import { deleteFromS3 } from '../utils/s3';
|
||||
|
||||
const Attachment = sequelize.define(
|
||||
'attachment',
|
||||
@ -50,6 +51,10 @@ const Attachment = sequelize.define(
|
||||
}
|
||||
);
|
||||
|
||||
Attachment.beforeDestroy(async model => {
|
||||
await deleteFromS3(model.key);
|
||||
});
|
||||
|
||||
Attachment.associate = models => {
|
||||
Attachment.belongsTo(models.Team);
|
||||
Attachment.belongsTo(models.Document);
|
||||
|
@ -123,6 +123,15 @@ export const uploadToS3FromUrl = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteFromS3 = (key: string) => {
|
||||
return s3
|
||||
.deleteObject({
|
||||
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
|
||||
Key: key,
|
||||
})
|
||||
.promise();
|
||||
};
|
||||
|
||||
export const getSignedImageUrl = async (key: string) => {
|
||||
invariant(AWS_S3_UPLOAD_BUCKET_NAME, 'AWS_S3_UPLOAD_BUCKET_NAME not set');
|
||||
const isDocker = process.env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);
|
||||
|
Reference in New Issue
Block a user