From bde6bfc5faa75fd9122714dc91cf528fd1d8416e Mon Sep 17 00:00:00 2001 From: mattwr18 Date: Thu, 26 Mar 2020 14:03:21 +0100 Subject: [PATCH] fix(migration): fix errors after dry run - avoid content-type is not a string, by checking if there is a mimeType or giving it a default 'image/jpeg' - check if the file exists before trying to upload it (maybe this is only an issue locally, but about half the files didn't exist in the file system. --- .../20200312140328-bulk_upload_to_s3.js | 47 +++++++++---------- backend/src/schema/resolvers/images/images.js | 4 +- 2 files changed, 25 insertions(+), 26 deletions(-) diff --git a/backend/src/db/migrations/20200312140328-bulk_upload_to_s3.js b/backend/src/db/migrations/20200312140328-bulk_upload_to_s3.js index 89351c3b1..908c97b42 100644 --- a/backend/src/db/migrations/20200312140328-bulk_upload_to_s3.js +++ b/backend/src/db/migrations/20200312140328-bulk_upload_to_s3.js @@ -1,5 +1,5 @@ import { getDriver } from '../../db/neo4j' -import { createReadStream } from 'fs' +import { existsSync, createReadStream } from 'fs' import path from 'path' import { S3 } from 'aws-sdk' import mime from 'mime-types' @@ -32,39 +32,38 @@ export async function up(next) { try { // Implement your migration here. const { records } = await transaction.run('MATCH (image:Image) RETURN image.url as url') - let urls = records.map(r => r.get('url')) - urls = urls.filter(url => url.startsWith('/uploads')) + let urls = records.map((r) => r.get('url')) + urls = urls.filter((url) => url.startsWith('/uploads')) const locations = await Promise.all( urls - .map(url => { + .map((url) => { return async () => { const { pathname } = new URL(url, 'http://example.org') const fileLocation = path.join(__dirname, `../../../public/${pathname}`) const s3Location = `original${pathname}` - const mimeType = mime.lookup(fileLocation) + if (existsSync(fileLocation)) { + const mimeType = mime.lookup(fileLocation) + const params = { + Bucket, + Key: s3Location, + ACL: 'public-read', + ContentType: mimeType || 'image/jpeg', + Body: createReadStream(fileLocation), + } - const params = { - Bucket, - Key: s3Location, - ACL: 'public-read', - ContentType: mimeType, - Body: createReadStream(fileLocation), + const data = await s3.upload(params).promise() + const { Location: spacesUrl } = data + + const updatedRecord = await transaction.run( + 'MATCH (image:Image {url: $url}) SET image.url = $spacesUrl RETURN image.url as url', + { url, spacesUrl }, + ) + const [updatedUrl] = updatedRecord.records.map((record) => record.get('url')) + return updatedUrl } - - const data = await s3.upload(params).promise() - const { Location: spacesUrl } = data - - const updatedRecord = await transaction.run( - 'MATCH (image:Image {url: $url}) SET image.url = $spacesUrl RETURN image.url as url', - { url, spacesUrl }, - ) - const [updatedUrl] = updatedRecord.records.map(record => record.get('url')) - // eslint-disable-next-line no-console - // https://image-upload.fra1.digitaloceanspaces.com/original/uploads/05b6cb85-deec-45f2-8e34-44111dceb743-avatar.png - return updatedUrl } }) - .map(p => p()), + .map((p) => p()), ) // eslint-disable-next-line no-console console.log('this is locations', locations) diff --git a/backend/src/schema/resolvers/images/images.js b/backend/src/schema/resolvers/images/images.js index 3e25fc2a5..18a3569b6 100644 --- a/backend/src/schema/resolvers/images/images.js +++ b/backend/src/schema/resolvers/images/images.js @@ -136,12 +136,12 @@ const s3Upload = async ({ createReadStream, uniqueFilename, mimetype }) => { return Location } -const localFileDelete = async url => { +const localFileDelete = async (url) => { const location = `public${url}` if (existsSync(location)) unlinkSync(location) } -const s3Delete = async url => { +const s3Delete = async (url) => { const s3 = new S3({ region, endpoint }) let { pathname } = new URL(url, 'http://example.org') // dummy domain to avoid invalid URL error pathname = pathname.substring(1) // remove first character '/'