refactor: Upload file to storage, and then pass attachmentId to collections.import
This avoids having large file uploads going directly to the server and allows us to fetch it async into a worker process
This commit is contained in:
@ -38,7 +38,7 @@ router.post("attachments.create", auth(), async (ctx) => {
|
||||
const key = `${bucket}/${user.id}/${s3Key}/${name}`;
|
||||
const credential = makeCredential();
|
||||
const longDate = format(new Date(), "YYYYMMDDTHHmmss\\Z");
|
||||
const policy = makePolicy(credential, longDate, acl);
|
||||
const policy = makePolicy(credential, longDate, acl, contentType);
|
||||
const endpoint = publicS3Endpoint();
|
||||
const url = `${endpoint}/${key}`;
|
||||
|
||||
@ -85,6 +85,7 @@ router.post("attachments.create", auth(), async (ctx) => {
|
||||
documentId,
|
||||
contentType,
|
||||
name,
|
||||
id: attachment.id,
|
||||
url: attachment.redirectUrl,
|
||||
size,
|
||||
},
|
||||
|
Reference in New Issue
Block a user