Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexanderSabirov authored Jan 30, 2025
2 parents e460d14 + ec443bd commit 889b72c
Show file tree
Hide file tree
Showing 7 changed files with 235 additions and 35 deletions.
16 changes: 16 additions & 0 deletions .github/filters/changelog-entry-check.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
dbt-adapters:
- 'dbt-adapters/**'
dbt-tests-adapter:
- 'dbt-tests-adapter/**'
dbt-athena:
- 'dbt-athena/**'
dbt-bigquery:
- 'dbt-bigquery/**'
dbt-postgres:
- 'dbt-postgres/**'
dbt-redshift:
- 'dbt-redshift/**'
dbt-snowflake:
- 'dbt-snowflake/**'
dbt-spark:
- 'dbt-spark/**'
30 changes: 30 additions & 0 deletions .github/filters/integration-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
dbt-athena:
- 'dbt-athena/**'
- 'dbt-adapters/**'
- 'dbt-tests-adapter/**'
dbt-athena-community:
- 'dbt-athena/**'
- 'dbt-athena-community/**'
- 'dbt-adapters/**'
- 'dbt-tests-adapter/**'
dbt-bigquery:
- 'dbt-bigquery/**'
- 'dbt-adapters/**'
- 'dbt-tests-adapter/**'
dbt-postgres:
- 'dbt-postgres/**'
- 'dbt-adapters/**'
- 'dbt-tests-adapter/**'
dbt-redshift:
- 'dbt-redshift/**'
- 'dbt-postgres/src/dbt/include/postgres/**'
- 'dbt-adapters/**'
- 'dbt-tests-adapter/**'
dbt-snowflake:
- 'dbt-snowflake/**'
- 'dbt-adapters/**'
- 'dbt-tests-adapter/**'
dbt-spark:
- 'dbt-spark/**'
- 'dbt-adapters/**'
- 'dbt-tests-adapter/**'
25 changes: 25 additions & 0 deletions .github/filters/unit-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
dbt-adapters:
- 'dbt-adapters/**'
dbt-athena:
- 'dbt-athena/**'
- 'dbt-adapters/**'
dbt-athena-community:
- 'dbt-athena/**'
- 'dbt-athena-community/**'
- 'dbt-adapters/**'
dbt-bigquery:
- 'dbt-bigquery/**'
- 'dbt-adapters/**'
dbt-postgres:
- 'dbt-postgres/**'
- 'dbt-adapters/**'
dbt-redshift:
- 'dbt-redshift/**'
- 'dbt-postgres/src/dbt/include/postgres/**'
- 'dbt-adapters/**'
dbt-snowflake:
- 'dbt-snowflake/**'
- 'dbt-adapters/**'
dbt-spark:
- 'dbt-spark/**'
- 'dbt-adapters/**'
28 changes: 28 additions & 0 deletions .github/filters/verify-build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
dbt-adapters:
- 'dbt-adapters/**'
dbt-tests-adapter:
- 'dbt-tests-adapter/**'
- 'dbt-adapters/**'
dbt-athena:
- 'dbt-athena/**'
- 'dbt-adapters/**'
dbt-athena-community:
- 'dbt-athena/**'
- 'dbt-athena-community/**'
- 'dbt-adapters/**'
dbt-bigquery:
- 'dbt-bigquery/**'
- 'dbt-adapters/**'
dbt-postgres:
- 'dbt-postgres/**'
- 'dbt-adapters/**'
dbt-redshift:
- 'dbt-redshift/**'
- 'dbt-postgres/**'
- 'dbt-adapters/**'
dbt-snowflake:
- 'dbt-snowflake/**'
- 'dbt-adapters/**'
dbt-spark:
- 'dbt-spark/**'
- 'dbt-adapters/**'
83 changes: 79 additions & 4 deletions .github/workflows/_integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,33 @@ jobs:
- uses: pypa/hatch@install
- run: hatch run integration-tests tests/functional -k "not TestPython"
working-directory: ./${{ inputs.package }}

integration-tests-bigquery-flaky:
# we only run this for one python version to avoid running in parallel
if: ${{ inputs.package == 'dbt-bigquery' && inputs.python-version == '3.9' }}
runs-on: ${{ inputs.os }}
environment:
name: "dbt-bigquery"
env:
BIGQUERY_TEST_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_TEST_SERVICE_ACCOUNT_JSON }}
BIGQUERY_TEST_ALT_DATABASE: ${{ vars.BIGQUERY_TEST_ALT_DATABASE }}
BIGQUERY_TEST_NO_ACCESS_DATABASE: ${{ vars.BIGQUERY_TEST_NO_ACCESS_DATABASE }}
DBT_TEST_USER_1: ${{ vars.DBT_TEST_USER_1 }}
DBT_TEST_USER_2: ${{ vars.DBT_TEST_USER_2 }}
DBT_TEST_USER_3: ${{ vars.DBT_TEST_USER_3 }}
DATAPROC_REGION: ${{ vars.DATAPROC_REGION }}
DATAPROC_CLUSTER_NAME: ${{ vars.DATAPROC_CLUSTER_NAME }}
GCS_BUCKET: ${{ vars.GCS_BUCKET }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.branch }}
repository: ${{ inputs.repository }}
- uses: actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
- uses: pypa/hatch@install
- run: hatch run integration-tests tests/functional -n1 -k "TestPython"
if: ${{ inputs.python-version == '3.9' }} # we only run this for one version to run in series
working-directory: ./${{ inputs.package }}

integration-tests-postgres:
Expand Down Expand Up @@ -235,9 +260,6 @@ jobs:
REDSHIFT_TEST_IAM_USER_ACCESS_KEY_ID: ${{ vars.AWS_USER_ACCESS_KEY_ID }}
REDSHIFT_TEST_IAM_USER_SECRET_ACCESS_KEY: ${{ secrets.AWS_USER_SECRET_ACCESS_KEY }}
REDSHIFT_TEST_IAM_ROLE_PROFILE: ${{ vars.AWS_ROLE_PROFILE }}
DBT_TEST_USER_1: ${{ vars.DBT_TEST_USER_1 }}
DBT_TEST_USER_2: ${{ vars.DBT_TEST_USER_2 }}
DBT_TEST_USER_3: ${{ vars.DBT_TEST_USER_3 }}
steps:
- uses: actions/checkout@v4
with:
Expand Down Expand Up @@ -265,6 +287,59 @@ jobs:
aws configure --profile $AWS_ROLE_PROFILE set output json
- run: hatch run integration-tests tests/functional -m "not flaky" --ddtrace
working-directory: ./${{ inputs.package }}

integration-tests-redshift-flaky:
# we only run this for one python version to avoid running in parallel
if: ${{ inputs.package == 'dbt-redshift' && inputs.python-version == '3.9' }}
runs-on: ${{ inputs.os }}
environment:
name: "dbt-redshift"
env:
AWS_USER_PROFILE: ${{ vars.AWS_USER_PROFILE }}
AWS_USER_ACCESS_KEY_ID: ${{ vars.AWS_USER_ACCESS_KEY_ID }}
AWS_USER_SECRET_ACCESS_KEY: ${{ secrets.AWS_USER_SECRET_ACCESS_KEY }}
AWS_SOURCE_PROFILE: ${{ vars.AWS_SOURCE_PROFILE }}
AWS_ROLE_PROFILE: ${{ vars.AWS_ROLE_PROFILE }}
AWS_ROLE_ACCESS_KEY_ID: ${{ vars.AWS_ROLE_ACCESS_KEY_ID }}
AWS_ROLE_SECRET_ACCESS_KEY: ${{ secrets.AWS_ROLE_SECRET_ACCESS_KEY }}
AWS_ROLE_ARN: ${{ secrets.AWS_ROLE_ARN }}
AWS_REGION: ${{ vars.AWS_REGION }}
REDSHIFT_TEST_DBNAME: ${{ vars.REDSHIFT_TEST_DBNAME }}
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_TEST_USER: ${{ vars.REDSHIFT_TEST_USER }}
REDSHIFT_TEST_PORT: ${{ vars.REDSHIFT_TEST_PORT }}
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
REDSHIFT_TEST_CLUSTER_ID: ${{ vars.REDSHIFT_TEST_CLUSTER_ID }}
REDSHIFT_TEST_REGION: ${{ vars.AWS_REGION }}
REDSHIFT_TEST_IAM_USER_PROFILE: ${{ vars.AWS_USER_PROFILE }}
REDSHIFT_TEST_IAM_USER_ACCESS_KEY_ID: ${{ vars.AWS_USER_ACCESS_KEY_ID }}
REDSHIFT_TEST_IAM_USER_SECRET_ACCESS_KEY: ${{ secrets.AWS_USER_SECRET_ACCESS_KEY }}
REDSHIFT_TEST_IAM_ROLE_PROFILE: ${{ vars.AWS_ROLE_PROFILE }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.branch }}
repository: ${{ inputs.repository }}
- uses: actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
- uses: pypa/hatch@install
- name: Create AWS IAM profiles
run: |
aws configure --profile $AWS_USER_PROFILE set aws_access_key_id $AWS_USER_ACCESS_KEY_ID
aws configure --profile $AWS_USER_PROFILE set aws_secret_access_key $AWS_USER_SECRET_ACCESS_KEY
aws configure --profile $AWS_USER_PROFILE set region $AWS_REGION
aws configure --profile $AWS_USER_PROFILE set output json
aws configure --profile $AWS_SOURCE_PROFILE set aws_access_key_id $AWS_ROLE_ACCESS_KEY_ID
aws configure --profile $AWS_SOURCE_PROFILE set aws_secret_access_key $AWS_ROLE_SECRET_ACCESS_KEY
aws configure --profile $AWS_SOURCE_PROFILE set region $AWS_REGION
aws configure --profile $AWS_SOURCE_PROFILE set output json
aws configure --profile $AWS_ROLE_PROFILE set source_profile $AWS_SOURCE_PROFILE
aws configure --profile $AWS_ROLE_PROFILE set role_arn $AWS_ROLE_ARN
aws configure --profile $AWS_ROLE_PROFILE set region $AWS_REGION
aws configure --profile $AWS_ROLE_PROFILE set output json
- run: hatch run integration-tests tests/functional -m flaky -n1 --ddtrace
working-directory: ./${{ inputs.package }}

Expand Down
85 changes: 54 additions & 31 deletions .github/workflows/pull-request-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,49 @@ concurrency:
cancel-in-progress: true

jobs:
changelog-entry:
affected-packages:
runs-on: ubuntu-latest
outputs:
changelog-entry-check: ${{ steps.changelog-entry-check.outputs.changes }}
verify-build: ${{ steps.verify-build.outputs.changes }}
unit-tests: ${{ steps.unit-tests.outputs.changes }}
integration-tests: ${{ steps.integration-tests.outputs.changes }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- id: changelog-entry-check
uses: dorny/paths-filter@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
filters: .github/filters/changelog-entry-check.yml
- id: verify-build
uses: dorny/paths-filter@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
filters: .github/filters/verify-build.yml
- id: unit-tests
uses: dorny/paths-filter@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
filters: .github/filters/unit-tests.yml
- id: integration-tests
uses: dorny/paths-filter@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
filters: .github/filters/integration-tests.yml

changelog-entry-check:
uses: ./.github/workflows/_changelog-entry-check.yml
needs: affected-packages
if: ${{ toJson(fromJson(needs.affected-packages.outputs.changelog-entry-check)) != '[]' }}
strategy:
fail-fast: false
matrix:
package: ${{ fromJSON(needs.affected-packages.outputs.changelog-entry-check) }}
os: [ubuntu-22.04]
python-version: ["3.9", "3.10", "3.11", "3.12"]
with:
package: "dbt-athena"
pull-request: ${{ github.event.pull_request.number }}
Expand All @@ -24,21 +65,14 @@ jobs:
branch: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}

verify-builds:
verify-build:
uses: ./.github/workflows/_verify-build.yml
needs: affected-packages
if: ${{ toJson(fromJson(needs.affected-packages.outputs.verify-build)) != '[]' }}
strategy:
fail-fast: false
matrix:
package:
- "dbt-adapters"
- "dbt-tests-adapter"
- "dbt-athena"
- "dbt-athena-community"
- "dbt-bigquery"
- "dbt-postgres"
- "dbt-redshift"
- "dbt-snowflake"
- "dbt-spark"
package: ${{ fromJSON(needs.affected-packages.outputs.verify-build) }}
os: [ubuntu-22.04]
python-version: ["3.9", "3.10", "3.11", "3.12"]
with:
Expand All @@ -50,18 +84,12 @@ jobs:

unit-tests:
uses: ./.github/workflows/_unit-tests.yml
needs: affected-packages
if: ${{ toJson(fromJson(needs.affected-packages.outputs.unit-tests)) != '[]' }}
strategy:
fail-fast: false
matrix:
package:
- "dbt-adapters"
- "dbt-athena"
- "dbt-athena-community"
- "dbt-bigquery"
- "dbt-postgres"
- "dbt-redshift"
- "dbt-snowflake"
- "dbt-spark"
package: ${{ fromJSON(needs.affected-packages.outputs.unit-tests) }}
os: [ ubuntu-22.04 ]
python-version: ["3.9", "3.10", "3.11", "3.12"]
with:
Expand All @@ -73,17 +101,12 @@ jobs:

integration-tests:
uses: ./.github/workflows/_integration-tests.yml
needs: affected-packages
if: ${{ toJson(fromJson(needs.affected-packages.outputs.integration-tests)) != '[]' }}
strategy:
fail-fast: false
matrix:
package:
- "dbt-athena"
- "dbt-athena-community"
- "dbt-bigquery"
- "dbt-postgres"
- "dbt-redshift"
- "dbt-snowflake"
- "dbt-spark"
package: ${{ fromJSON(needs.affected-packages.outputs.integration-tests) }}
os: [ubuntu-22.04]
python-version: ["3.9", "3.10", "3.11", "3.12"]
with:
Expand All @@ -98,10 +121,10 @@ jobs:
results:
name: "Pull request checks" # keep this name, branch protection references it
if: always()
needs: [changelog-entry, code-quality, verify-builds, unit-tests, integration-tests]
needs: [changelog-entry-check, code-quality, verify-build, unit-tests, integration-tests]
runs-on: ${{ vars.DEFAULT_RUNNER }}
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
allowed-skips: 'changelog-entry'
allowed-skips: "changelog-entry-check,verify-build,unit-tests,integration-tests"
3 changes: 3 additions & 0 deletions dbt-redshift/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,3 +55,6 @@ filterwarnings = [
"ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning",
"ignore:unclosed file .*:ResourceWarning",
]
markers = [
'flaky: marks tests as flaky so they run one at a time (de-select with `-m "not flaky"`)'
]

0 comments on commit 889b72c

Please sign in to comment.