From 5ee337e4fd7b17608c8bfe9196cd127115e9fb3a Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Fri, 29 Mar 2024 11:52:36 -0500 Subject: [PATCH 1/4] strip out AWS backup --- .github/workflows/build.yml | 163 ++---------------------------------- 1 file changed, 6 insertions(+), 157 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bb3f192d..95db68bc 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,5 +1,5 @@ # **what?** -# Build release artifacts and store them to S3 bucket if they do not already exist. +# Build release artifacts # # Expected build artifact layout: # @@ -8,27 +8,10 @@ # │ ├── dbt-*.whl # └── .md # -# Build artifacts get stored in S3 to a bucket with the following directory structure: -# "s3:////////" -# -# Notes: -# - resolves based on `test_run` and `nightly_release` inputs. -# nightly_release == true will use "nightly-releases" -# nightly_release == false resolves based on `test_run` input -# test_run == true will use "artifacts_testing" -# test_run == false will use "artifacts" -# -# Examples: -# nightly_release == true: "s3://core-team-artifacts/dbt-labs/dbt-core/nightly-releases/1.4.0a1.dev01112023+nightly/aaa410f17d300f1bde2cd67c03e48df135ab347b" -# test_run == true : "s3://core-team-artifacts/dbt-labs/dbt-core/artifacts_testing/1.2.3/ce98e6f067d9fa63a9b213bf99ebaf0c29d2b7eb/" -# test_run == false : "s3://core-team-artifacts/dbt-labs/dbt-core/artifacts/1.2.3/ce98e6f067d9fa63a9b213bf99ebaf0c29d2b7eb/" -# -# Inputs: # sha: The commit to attach to this release # version_number: The release version number (i.e. 1.0.0b1, 1.2.3rc2, 1.0.0) # changelog_path: Path to the changelog file for release notes # build_script_path: Path to the build script -# s3_bucket_name: AWS S3 bucket name # package_test_command: Command to use to check package runs # test_run: Test run (Bucket to upload the artifact) # nightly_release: Identifier that this is nightly release @@ -43,9 +26,7 @@ # Validation Checks # # 1. Make sure the sha has a changelog entry for this version and the version bump has been completed. -# 2. Check if build already exists in AWS s3 bucket. It will live in a bucket following the env.s3 naming convention below. -# If it does exist, upload it to the GitHub artifacts and skip the rest of the workflow. -# 3. Only upload artifacts and changelog to S3 if tests pass +# 2. Build the artifacts and check the integrity of the artifacts. name: Build @@ -66,7 +47,8 @@ on: default: "scripts/build-dist.sh" type: string s3_bucket_name: - required: true + description: deprecated - still here to not break backwards compatibility + required: false default: "core-team-artifacts" type: string package_test_command: @@ -82,23 +64,11 @@ on: default: false type: boolean - # pass through secrets so every repo can have their own and won't depend on a name - secrets: - AWS_ACCESS_KEY_ID: - description: AWS Access Key ID - required: true - AWS_SECRET_ACCESS_KEY: - description: AWS Access Key - required: true - permissions: contents: write - # this will be needed if we go with OIDC for auth instead of managing secrets in github for AWS - # id-token: write # https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers#adding-permissions-settings env: - ARTIFACT_RETENTION_DAYS: 2 - AWS_REGION: "us-east-1" + ARTIFACT_RETENTION_DAYS: 3 PYTHON_TARGET_VERSION: 3.8 NOTIFICATION_PREFIX: "[Build]" @@ -119,37 +89,9 @@ jobs: echo Nightly release: ${{ inputs.nightly_release }} # ENVIRONMENT VARIABLES echo GitHub artifact retention days: ${{ env.ARTIFACT_RETENTION_DAYS }} - echo Amazon Web Services region: ${{ env.AWS_REGION }} echo Python target version: ${{ env.PYTHON_TARGET_VERSION }} echo Notification prefix: ${{ env.NOTIFICATION_PREFIX }} - resolve-aws-bucket: - runs-on: ubuntu-latest - outputs: - aws-s3-bucket: ${{ steps.bucket_path.outputs.path }} - - steps: - - name: "Resolve S3 Bucket Path" - id: bucket_path - run: | - # Resolve folder to upload/check build artifact - artifact_folder="artifacts" - if [[ ${{ inputs.nightly_release }} == true ]] - then - artifact_folder="nightly-releases" - elif [[ ${{ inputs.test_run }} == true ]] - then - artifact_folder="artifacts_testing" - fi - # Generate path for build artifact. - # Include commit in path in case release commit gets updates on subsequent runs - bucket_path="s3://${{ inputs.s3_bucket_name }}/${{ github.repository }}/$artifact_folder/${{ inputs.version_number }}/${{ inputs.sha }}" - echo "path=$bucket_path" >> $GITHUB_OUTPUT - # Send notification - title="S3 Bucket Path" - echo "$title: $bucket_path" - echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$bucket_path" - audit-version-changelog: # Make sure the changelog has been generated and the version is up to date runs-on: ubuntu-latest @@ -195,75 +137,10 @@ jobs: exit 1 fi - check-build-exists: - runs-on: ubuntu-latest - needs: [audit-version-changelog, resolve-aws-bucket] - - outputs: - is_exists: ${{ steps.artifact_exists.outputs.is_exists }} - - steps: - - name: "Configure Aws Credentials" - uses: aws-actions/configure-aws-credentials@v2 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ env.AWS_REGION }} - - - name: "Copy Artifact From S3 Via CLI" - run: | - aws s3 cp ${{ needs.resolve-aws-bucket.outputs.aws-s3-bucket }} . --recursive # since it's an entire directory - - - name: "[DEBUG] Display Structure Of All Downloaded Files" - run: ls -R - - - name: "Check Artifact Integrity" - id: artifact_integrity - uses: andstor/file-existence-action@v3 - with: - files: "${{ inputs.changelog_path }}, dist/*.tar.gz, dist/*.whl" - - # upload the files downloaded from S3 to artifacts so we don't have to keep - # downloading from S3 - - name: "Upload Artifact From S3 To GitHub" - if: ${{ steps.artifact_integrity.outputs.files_exists == 'true' }} - uses: actions/upload-artifact@v4 - with: - name: ${{ inputs.version_number }} - path: | - ${{ inputs.changelog_path }} - dist/ - if-no-files-found: error - retention-days: ${{ env.ARTIFACT_RETENTION_DAYS }} - - - name: "[Notification] Upload Artifact From S3 To GitHub" - if: ${{ steps.artifact_integrity.outputs.files_exists == 'true' }} - run: | - title="Artifact ${{ inputs.version_number }} uploaded from S3 To GitHub" - message="The build artifact is pulled from the S3 bucket and uploaded to the GitHub artifact storage." - echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" - - - name: "Set Artifact Existence For Subsequent Jobs" - id: artifact_exists - run: echo "is_exists=${{ steps.artifact_integrity.outputs.files_exists }}" >> $GITHUB_OUTPUT - - skip-build: - runs-on: ubuntu-latest - needs: [check-build-exists] - if: ${{ needs.check-build-exists.outputs.is_exists == 'true' }} - - steps: - - name: "Build Exists, Skip To Test" - run: | - title="Build Exists in AWS S3 bucket" - message="A build already exists for version ${{ inputs.version_number }}, skipping build job." - echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" - unit: name: Unit Test runs-on: ubuntu-latest - needs: [audit-version-changelog, check-build-exists] - if: ${{ needs.check-build-exists.outputs.is_exists == 'false' }} + needs: [audit-version-changelog] env: TOXENV: "unit" @@ -388,31 +265,3 @@ jobs: - name: "[DEBUG] Check Source Distributions" run: | ${{ inputs.package_test_command }} - - upload-artifacts-aws: - runs-on: ubuntu-latest - needs: [test-build, resolve-aws-bucket] - - steps: - - name: "Download Artifact ${{ inputs.version_number }}" - uses: actions/download-artifact@v4 - with: - name: ${{ inputs.version_number }} - path: . - - - name: "Display Structure Of All Downloaded Files" - run: ls -R - - - name: "Configure Aws Credentials" - uses: aws-actions/configure-aws-credentials@v2 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ env.AWS_REGION }} - - - name: "Upload Artifact To S3 Via CLI" - run: | - aws s3 cp . ${{ needs.resolve-aws-bucket.outputs.aws-s3-bucket }} --recursive # since it's an entire directory - title="Artifact ${{ inputs.version_number }} uploaded to AWS S3 bucket" - message="S3 path: ${{ needs.resolve-aws-bucket.outputs.aws-s3-bucket }}" - echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" From 7dfb577fbd64fcfedb3c0c6f6a9e40ecea6f9441 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Fri, 29 Mar 2024 11:56:41 -0500 Subject: [PATCH 2/4] remove last ref to s3 bucket --- .github/workflows/build.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 95db68bc..5304cdac 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -83,7 +83,6 @@ jobs: echo The release version number: ${{ inputs.version_number }} echo The changelog path: ${{ inputs.changelog_path }} echo The build script path: ${{ inputs.build_script_path }} - echo The s3 bucket name: ${{ inputs.s3_bucket_name }} echo The package test command: ${{ inputs.package_test_command }} echo Test run: ${{ inputs.test_run }} echo Nightly release: ${{ inputs.nightly_release }} From dd3fa7850ace17a3488f78955416e47f302ece40 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Fri, 29 Mar 2024 11:59:46 -0500 Subject: [PATCH 3/4] bit more cleanup --- .github/workflows/build.yml | 44 ++----------------------------------- 1 file changed, 2 insertions(+), 42 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5304cdac..27314610 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -49,7 +49,6 @@ on: s3_bucket_name: description: deprecated - still here to not break backwards compatibility required: false - default: "core-team-artifacts" type: string package_test_command: required: true @@ -69,7 +68,7 @@ permissions: env: ARTIFACT_RETENTION_DAYS: 3 - PYTHON_TARGET_VERSION: 3.8 + PYTHON_TARGET_VERSION: 3.11 NOTIFICATION_PREFIX: "[Build]" jobs: @@ -136,48 +135,9 @@ jobs: exit 1 fi - unit: - name: Unit Test - runs-on: ubuntu-latest - needs: [audit-version-changelog] - - env: - TOXENV: "unit" - - steps: - - name: "Checkout ${{ github.repository }} Commit ${{ inputs.sha }}" - uses: actions/checkout@v4 - with: - persist-credentials: false - ref: ${{ inputs.sha }} - - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Spark Dependencies" - if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }} - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - - - name: "Install Python Dependencies" - run: | - python -m pip install --user --upgrade pip - python -m pip install tox - python -m pip --version - python -m tox --version - - - name: "Run Tox" - run: tox - build-packages: runs-on: ubuntu-latest - needs: [unit] - - outputs: - finished: ${{ steps.set_success.outputs.finished }} + needs: [audit-version-changelog] steps: - name: "Checkout Commit - ${{ inputs.sha }}" From 394c72d332448941b9b7c76192eabc3bf5615d7e Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Wed, 10 Apr 2024 09:40:27 -0500 Subject: [PATCH 4/4] replace secret definitions --- .github/workflows/build.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 27314610..13bd0333 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -62,6 +62,15 @@ on: required: false default: false type: boolean + # these secrets are no longer used but must remain defined to avoid breaking changes + secrets: + AWS_ACCESS_KEY_ID: + description: AWS Access Key ID + required: true + AWS_SECRET_ACCESS_KEY: + description: AWS Access Key + required: true + permissions: contents: write