From 76279902f989b7f4716332b6d79fe2cdb78ca1f1 Mon Sep 17 00:00:00 2001 From: Tom Moor Date: Mon, 24 Aug 2020 23:27:10 -0700 Subject: [PATCH] chore: Introduce AWS_S3_FORCE_PATH_STYLE option to maintain compatability with Minio et al (#1443) - Make AWS_S3_UPLOAD_BUCKET_NAME optional --- .env.sample | 1 + app.json | 5 +++++ index.js | 3 +-- server/utils/s3.js | 27 ++++++++++++--------------- 4 files changed, 19 insertions(+), 17 deletions(-) diff --git a/.env.sample b/.env.sample index b1ee6045b43b..da9204d2ed43 100644 --- a/.env.sample +++ b/.env.sample @@ -45,6 +45,7 @@ AWS_REGION=xx-xxxx-x AWS_S3_UPLOAD_BUCKET_URL=http://s3:4569 AWS_S3_UPLOAD_BUCKET_NAME=bucket_name_here AWS_S3_UPLOAD_MAX_SIZE=26214400 +AWS_S3_FORCE_PATH_STYLE=true # uploaded s3 objects permission level, default is private # set to "public-read" to allow public access AWS_S3_ACL=private diff --git a/app.json b/app.json index 09221239a431..5aa7e990979d 100644 --- a/app.json +++ b/app.json @@ -92,6 +92,11 @@ "value": "26214400", "required": false }, + "AWS_S3_FORCE_PATH_STYLE": { + "description": "Use path-style URL's for connecting to S3 instead of subdomain. This is useful for S3-compatible storage.", + "value": "true", + "required": false + }, "AWS_REGION": { "value": "us-east-1", "description": "Region in which the above S3 bucket exists", diff --git a/index.js b/index.js index 5f8f76aa969d..43a19c802cab 100644 --- a/index.js +++ b/index.js @@ -17,9 +17,8 @@ if (process.env.AWS_ACCESS_KEY_ID) { "AWS_REGION", "AWS_SECRET_ACCESS_KEY", "AWS_S3_UPLOAD_BUCKET_URL", - "AWS_S3_UPLOAD_BUCKET_NAME", "AWS_S3_UPLOAD_MAX_SIZE", - ].forEach(key => { + ].forEach((key) => { if (!process.env[key]) { console.error(`The ${key} env variable must be set when using AWS`); // $FlowFixMe diff --git a/server/utils/s3.js b/server/utils/s3.js index 4e72a12b6b29..05ed6d9e04ac 100644 --- a/server/utils/s3.js +++ b/server/utils/s3.js @@ -4,18 +4,18 @@ import * as Sentry from "@sentry/node"; import AWS from "aws-sdk"; import addHours from "date-fns/add_hours"; import format from "date-fns/format"; -import invariant from "invariant"; import fetch from "isomorphic-fetch"; const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY; const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID; const AWS_REGION = process.env.AWS_REGION; -const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME; +const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME || ""; +const AWS_S3_FORCE_PATH_STYLE = process.env.AWS_S3_FORCE_PATH_STYLE !== "false"; const s3 = new AWS.S3({ - s3ForcePathStyle: true, - accessKeyId: process.env.AWS_ACCESS_KEY_ID, - secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, + s3ForcePathStyle: AWS_S3_FORCE_PATH_STYLE, + accessKeyId: AWS_ACCESS_KEY_ID, + secretAccessKey: AWS_SECRET_ACCESS_KEY, endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL), signatureVersion: "v4", }); @@ -84,9 +84,9 @@ export const publicS3Endpoint = (isServerUpload?: boolean) => { "localhost:" ).replace(/\/$/, ""); - return `${host}/${isServerUpload && isDocker ? "s3/" : ""}${ - process.env.AWS_S3_UPLOAD_BUCKET_NAME - }`; + return `${host}/${ + isServerUpload && isDocker ? "s3/" : "" + }${AWS_S3_UPLOAD_BUCKET_NAME}`; }; export const uploadToS3FromUrl = async ( @@ -94,8 +94,6 @@ export const uploadToS3FromUrl = async ( key: string, acl: string ) => { - invariant(AWS_S3_UPLOAD_BUCKET_NAME, "AWS_S3_UPLOAD_BUCKET_NAME not set"); - try { // $FlowIssue https://github.com/facebook/flow/issues/2171 const res = await fetch(url); @@ -103,7 +101,7 @@ export const uploadToS3FromUrl = async ( await s3 .putObject({ ACL: acl, - Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME, + Bucket: AWS_S3_UPLOAD_BUCKET_NAME, Key: key, ContentType: res.headers["content-type"], ContentLength: res.headers["content-length"], @@ -126,18 +124,17 @@ export const uploadToS3FromUrl = async ( export const deleteFromS3 = (key: string) => { return s3 .deleteObject({ - Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME, + Bucket: AWS_S3_UPLOAD_BUCKET_NAME, Key: key, }) .promise(); }; export const getSignedImageUrl = async (key: string) => { - invariant(AWS_S3_UPLOAD_BUCKET_NAME, "AWS_S3_UPLOAD_BUCKET_NAME not set"); const isDocker = process.env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/); const params = { - Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME, + Bucket: AWS_S3_UPLOAD_BUCKET_NAME, Key: key, Expires: 60, }; @@ -149,7 +146,7 @@ export const getSignedImageUrl = async (key: string) => { export const getImageByKey = async (key: string) => { const params = { - Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME, + Bucket: AWS_S3_UPLOAD_BUCKET_NAME, Key: key, };