Skip to content

Commit

Permalink
chore: Introduce AWS_S3_FORCE_PATH_STYLE option to maintain compatabi…
Browse files Browse the repository at this point in the history
…lity with Minio et al (outline#1443)

- Make AWS_S3_UPLOAD_BUCKET_NAME optional
  • Loading branch information
tommoor authored Aug 25, 2020
1 parent 9b5573c commit 7627990
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 17 deletions.
1 change: 1 addition & 0 deletions .env.sample
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ AWS_REGION=xx-xxxx-x
AWS_S3_UPLOAD_BUCKET_URL=http://s3:4569
AWS_S3_UPLOAD_BUCKET_NAME=bucket_name_here
AWS_S3_UPLOAD_MAX_SIZE=26214400
AWS_S3_FORCE_PATH_STYLE=true
# uploaded s3 objects permission level, default is private
# set to "public-read" to allow public access
AWS_S3_ACL=private
Expand Down
5 changes: 5 additions & 0 deletions app.json
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,11 @@
"value": "26214400",
"required": false
},
"AWS_S3_FORCE_PATH_STYLE": {
"description": "Use path-style URL's for connecting to S3 instead of subdomain. This is useful for S3-compatible storage.",
"value": "true",
"required": false
},
"AWS_REGION": {
"value": "us-east-1",
"description": "Region in which the above S3 bucket exists",
Expand Down
3 changes: 1 addition & 2 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@ if (process.env.AWS_ACCESS_KEY_ID) {
"AWS_REGION",
"AWS_SECRET_ACCESS_KEY",
"AWS_S3_UPLOAD_BUCKET_URL",
"AWS_S3_UPLOAD_BUCKET_NAME",
"AWS_S3_UPLOAD_MAX_SIZE",
].forEach(key => {
].forEach((key) => {
if (!process.env[key]) {
console.error(`The ${key} env variable must be set when using AWS`);
// $FlowFixMe
Expand Down
27 changes: 12 additions & 15 deletions server/utils/s3.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,18 @@ import * as Sentry from "@sentry/node";
import AWS from "aws-sdk";
import addHours from "date-fns/add_hours";
import format from "date-fns/format";
import invariant from "invariant";
import fetch from "isomorphic-fetch";

const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
const AWS_REGION = process.env.AWS_REGION;
const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME;
const AWS_S3_UPLOAD_BUCKET_NAME = process.env.AWS_S3_UPLOAD_BUCKET_NAME || "";
const AWS_S3_FORCE_PATH_STYLE = process.env.AWS_S3_FORCE_PATH_STYLE !== "false";

const s3 = new AWS.S3({
s3ForcePathStyle: true,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
s3ForcePathStyle: AWS_S3_FORCE_PATH_STYLE,
accessKeyId: AWS_ACCESS_KEY_ID,
secretAccessKey: AWS_SECRET_ACCESS_KEY,
endpoint: new AWS.Endpoint(process.env.AWS_S3_UPLOAD_BUCKET_URL),
signatureVersion: "v4",
});
Expand Down Expand Up @@ -84,26 +84,24 @@ export const publicS3Endpoint = (isServerUpload?: boolean) => {
"localhost:"
).replace(/\/$/, "");

return `${host}/${isServerUpload && isDocker ? "s3/" : ""}${
process.env.AWS_S3_UPLOAD_BUCKET_NAME
}`;
return `${host}/${
isServerUpload && isDocker ? "s3/" : ""
}${AWS_S3_UPLOAD_BUCKET_NAME}`;
};

export const uploadToS3FromUrl = async (
url: string,
key: string,
acl: string
) => {
invariant(AWS_S3_UPLOAD_BUCKET_NAME, "AWS_S3_UPLOAD_BUCKET_NAME not set");

try {
// $FlowIssue https://github.com/facebook/flow/issues/2171
const res = await fetch(url);
const buffer = await res.buffer();
await s3
.putObject({
ACL: acl,
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
ContentType: res.headers["content-type"],
ContentLength: res.headers["content-length"],
Expand All @@ -126,18 +124,17 @@ export const uploadToS3FromUrl = async (
export const deleteFromS3 = (key: string) => {
return s3
.deleteObject({
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
})
.promise();
};

export const getSignedImageUrl = async (key: string) => {
invariant(AWS_S3_UPLOAD_BUCKET_NAME, "AWS_S3_UPLOAD_BUCKET_NAME not set");
const isDocker = process.env.AWS_S3_UPLOAD_BUCKET_URL.match(/http:\/\/s3:/);

const params = {
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
Expires: 60,
};
Expand All @@ -149,7 +146,7 @@ export const getSignedImageUrl = async (key: string) => {

export const getImageByKey = async (key: string) => {
const params = {
Bucket: process.env.AWS_S3_UPLOAD_BUCKET_NAME,
Bucket: AWS_S3_UPLOAD_BUCKET_NAME,
Key: key,
};

Expand Down

0 comments on commit 7627990

Please sign in to comment.