Skip to content

Commit

Permalink
feat: allow s3 file to be copied to specific URI (#85)
Browse files Browse the repository at this point in the history
  • Loading branch information
alexeagle authored Nov 13, 2024
1 parent 4a40fbb commit 55f3827
Show file tree
Hide file tree
Showing 5 changed files with 75 additions and 26 deletions.
3 changes: 2 additions & 1 deletion MODULE.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ module(

# Lower-bound dependency versions.
# Do not change unless the rules no longer work with the current version.
bazel_dep(name = "aspect_bazel_lib", version = "2.5.3")
# Needed for #804 Use statically-linked bsdtar on all platforms
bazel_dep(name = "aspect_bazel_lib", version = "2.6.1")
bazel_dep(name = "bazel_skylib", version = "1.5.0")
bazel_dep(name = "platforms", version = "0.0.8")
bazel_dep(name = "rules_oci", version = "1.7.4")
Expand Down
18 changes: 13 additions & 5 deletions aws/private/s3_sync.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@ _ATTRS = {
doc = "file containing a single line: the S3 path to copy to. Useful because the file content may be stamped.",
allow_single_file = True,
),
"destination_uri_file": attr.label(
doc = """Only permitted when copying a single src file. A file containing a single line:
the full [S3Uri](https://docs.aws.amazon.com/cli/latest/reference/s3/#path-argument-type) to copy the file to.""",
allow_single_file = True,
),
"role": attr.string(
doc = "Assume this role before copying files, using `aws sts assume-role`",
),
Expand All @@ -36,15 +41,18 @@ def _s3_sync_impl(ctx):
executable = ctx.actions.declare_file("{}/s3_sync.sh".format(ctx.label.name))
runfiles = [executable] + ctx.files.srcs
vars = []
if not ctx.attr.bucket and not ctx.attr.bucket_file:
fail("Either 'bucket' or 'bucket_file' must be set")
if ctx.attr.bucket and ctx.attr.bucket_file:
fail("At most one of 'bucket' or 'bucket_file' may be set")
if int(bool(ctx.attr.bucket)) + int(bool(ctx.attr.bucket_file)) + int(bool(ctx.attr.destination_uri_file)) != 1:
fail("Exactly one of 'bucket', 'bucket_file', or 'destination_uri_file' must be set")
if ctx.attr.bucket_file:
vars.append("bucket_file=\"{}\"".format(ctx.file.bucket_file.short_path))
runfiles.append(ctx.file.bucket_file)
else:
elif ctx.attr.bucket:
vars.append("bucket=\"{}\"".format(ctx.attr.bucket))
else:
if len(ctx.files.srcs) > 1:
fail("Only one source file may be copied using destination_uri_file")
vars.append("destination_uri_file=\"{}\"".format(ctx.file.destination_uri_file.short_path))
runfiles.append(ctx.file.destination_uri_file)
if ctx.attr.role:
vars.append("role=\"{}\"".format(ctx.attr.role))
ctx.actions.expand_template(
Expand Down
55 changes: 36 additions & 19 deletions aws/private/s3_sync.sh
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,23 @@ Options:
Default: false
Arguments:
<artifact> The path to a file which will be copied to the S3 bucket.
<artifact> The path to a file or directory which will be copied to the S3 bucket.
One or more artifacts can be specified.
EOF
}

s3_cp() {
local src="${1}"
local dst="${2}"

if [[ "${dry_run}" == "false" ]]; then
warn "Copying ${src} to ${dst}"
"$aws" s3 cp "${src}" "${dst}"
else
warn "[DRY RUN] Would copy ${src} to ${dst}"
fi
}

cp_artifact() {
local artifact="${1}"
local bucket="${2}"
Expand All @@ -91,14 +103,7 @@ cp_artifact() {
cp_artifact "${f}" "${bucket}"
done
else
local dst
dst="${bucket}/$(basename "${artifact}")"
if [[ "${dry_run}" == "false" ]]; then
warn "Copying ${artifact} to ${dst}"
"$aws" s3 cp "${artifact}" "${dst}"
else
warn "[DRY RUN] Would copy ${artifact} to ${dst}"
fi
s3_cp "${artifact}" "${bucket}/$(basename "${artifact}")"
fi
}

Expand All @@ -120,6 +125,10 @@ while (("$#")); do
bucket_file="${2}"
shift 2
;;
"--destination_uri_file")
destination_uri_file="${2}"
shift 2
;;
"--dry_run")
dry_run="true"
shift 1
Expand Down Expand Up @@ -148,15 +157,20 @@ done

# Process Arguments

[[ -n "${bucket_file:-}" ]] && bucket="$(<"${bucket_file}")"
[[ ${#artifacts[@]} -gt 0 ]] || usage_error "No artifacts were specified."

[[ -n "${bucket:-}" ]] || usage_error "Missing value for 'bucket'."
if [[ ! -z "${destination_uri_file}" ]]; then
[[ ${#artifacts[@]} -eq 1 ]] || usage_error "destination_uri_file may be used only with a single artifact to copy"
else
[[ -n "${bucket_file:-}" ]] && bucket="$(<"${bucket_file}")"

protocol="s3"
[[ -n "${bucket:-}" ]] || usage_error "Missing value for 'bucket'."

[[ "${bucket}" =~ ^${protocol}:// ]] || bucket="${protocol}://${bucket}"
# Syntax sugar: append s3:// protocol to bucket URI if absent
protocol="s3"

[[ ${#artifacts[@]} -gt 0 ]] || usage_error "No artifacts were specified."
[[ "${bucket}" =~ ^${protocol}:// ]] || bucket="${protocol}://${bucket}"
fi

[[ "${dry_run}" == "true" ]] &&
warn <<-'EOF'
Expand Down Expand Up @@ -190,11 +204,14 @@ fi

# Copy artifacts

msg "Copying the following artifacts to ${bucket}:" "${artifacts[@]}" ""

for artifact in "${artifacts[@]}"; do
cp_artifact "${artifact}" "${bucket}"
done
if [[ ! -z "${destination_uri_file}" ]]; then
s3_cp "${artifacts[0]}" "$(<"${destination_uri_file}")"
else
msg "Copying the following artifacts to ${bucket}:" "${artifacts[@]}" ""
for artifact in "${artifacts[@]}"; do
cp_artifact "${artifact}" "${bucket}"
done
fi

# shellcheck disable=SC2236
if [[ ! -z "${role:-}" && "${dry_run}" == "false" ]]; then
Expand Down
3 changes: 2 additions & 1 deletion docs/rules.md

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

22 changes: 22 additions & 0 deletions examples/release_to_s3/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,25 @@ s3_sync(
srcs = ["my_file.txt"],
bucket = "my-bucket-name/sub-folder",
)

##############
# Use case: Copy one file to an exact S3 URI that varies depending on stamping
# See https://github.com/aspect-build/rules_aws/issues/83
destination_uri_file = "dst.txt"

expand_template(
name = "destination_uri_file",
out = destination_uri_file,
# as an example, use the --embed_label flag to choose a destination file, e.g.
# bazel run --stamp --embed_label=prod123 //my:s3_sync
# will sync my_file.txt to myorg-bucket/prod123.txt
stamp_substitutions = {"default": "{{BUILD_EMBED_LABEL}}"},
# unstamped builds will release my_file.txt to myorg-bucket/default.txt
template = ["s3://myorg-bucket/default.txt"],
)

s3_sync(
name = "release_to_stamped_filename",
srcs = ["my_file.txt"],
destination_uri_file = destination_uri_file,
)

0 comments on commit 55f3827

Please sign in to comment.