Skip to content

Commit

Permalink
Merge pull request #4065 from consideRatio/pr/deployer-skip-deploy
Browse files Browse the repository at this point in the history
automation/deployer: respect PR label `deployer:skip-deploy`
  • Loading branch information
consideRatio authored May 16, 2024
2 parents 296d855 + 5e11ed4 commit 74b3c4c
Show file tree
Hide file tree
Showing 4 changed files with 112 additions and 8 deletions.
37 changes: 36 additions & 1 deletion .github/workflows/deploy-hubs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,41 @@ jobs:
pip install --editable .
pip list
- name: Get merged/open PR labels
uses: actions/github-script@v7
id: pr-labels
with:
# Both pull_request and push can have triggered this job to run. A
# context with PR info (including its labels) is available when this
# job is triggered via pull_request, but not when triggered via push -
# a push can be triggered by other things than merged PRs. Due to
# this, we check if a pushed commit is matching some PRs merge commit,
# and if so gets its labels.
script: |
let labels = null;
if (context.eventName === 'pull_request') {
labels = context.payload.pull_request.labels;
}
else if (context.eventName === 'push') {
// api ref: https://octokit.github.io/rest.js/v20#pulls-list
const resp = await github.rest.pulls.list(
{
owner: context.repo.owner,
repo: context.repo.repo,
state: 'closed',
sort: 'updated',
direction: 'desc',
per_page: 100,
}
);
const merged_pr = resp.data.find(pr => pr.merge_commit_sha === context.sha);
labels = merged_pr?.labels;
}
label_names = (labels || []).map(l => l.name);
return label_names
- name: Identify files that have been added or modified
# Action repo: https://github.com/dorny/paths-filter
uses: dorny/paths-filter@v3
Expand All @@ -121,7 +156,7 @@ jobs:
# by one
- name: Generate matrix jobs
run: |
deployer generate helm-upgrade-jobs "${{ steps.changed-files.outputs.changed_files }}"
deployer generate helm-upgrade-jobs "${{ steps.changed-files.outputs.changed_files }}" '${{ steps.pr-labels.outputs.result }}'
# The comment-deployment-plan-pr.yaml workflow won't have the correct context to
# know the PR number, so we save it to a file to pass to that workflow
Expand Down
10 changes: 10 additions & 0 deletions deployer/commands/generate/helm_upgrade/decision.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ def generate_hub_matrix_jobs(
cluster_config,
cluster_info,
added_or_modified_files,
pr_labels=None,
upgrade_all_hubs_on_this_cluster=False,
upgrade_all_hubs_on_all_clusters=False,
):
Expand All @@ -88,6 +89,7 @@ def generate_hub_matrix_jobs(
redeployed.
added_or_modified_files (set[str]): A set of all added or modified files
provided in a GitHub Pull Requests
pr_labels (list, optional): A list of PR labels
upgrade_all_hubs_on_this_cluster (bool, optional): If True, generates jobs to
upgrade all hubs on the given cluster. This is triggered when the
cluster.yaml file itself has been modified. Defaults to False.
Expand All @@ -100,6 +102,9 @@ def generate_hub_matrix_jobs(
cluster, the cloud provider that cluster runs on, the name of a hub
deployed to that cluster, and the reason that hub needs to be redeployed.
"""
if pr_labels and "deployer:skip-deploy" in pr_labels:
return []

# Empty list to store all the matrix job definitions in
matrix_jobs = []

Expand Down Expand Up @@ -148,6 +153,7 @@ def generate_support_matrix_jobs(
cluster_config,
cluster_info,
added_or_modified_files,
pr_labels=None,
upgrade_support_on_this_cluster=False,
upgrade_support_on_all_clusters=False,
):
Expand All @@ -168,6 +174,7 @@ def generate_support_matrix_jobs(
cluster to be redeployed.
added_or_modified_files (set[str]): A set of all added or modified files
provided in a GitHub Pull Requests
pr_labels (list, optional): A list of PR labels
upgrade_support_on_this_cluster (bool, optional): If True, generates jobs to
update the support chart on the given cluster. This is triggered when the
cluster.yaml file itself is modified. Defaults to False.
Expand All @@ -192,6 +199,9 @@ def generate_support_matrix_jobs(
},
]
"""
if pr_labels and "deployer:skip-deploy" in pr_labels:
return []

# Rename dictionary key
cluster_info["reason_for_support_redeploy"] = cluster_info.pop(
"reason_for_redeploy"
Expand Down
21 changes: 14 additions & 7 deletions deployer/commands/generate/helm_upgrade/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,19 @@
def helm_upgrade_jobs(
changed_filepaths: str = typer.Argument(
..., help="Comma delimited list of files that have changed"
)
),
pr_labels: str = typer.Argument(
"[]",
help="JSON formatted list of PR labels, where 'deployer:skip-deploy' is respected.",
),
):
"""
Analyze added or modified files from a GitHub Pull Request and decide which
clusters and/or hubs require helm upgrades to be performed for their *hub helm
charts or the support helm chart.
Analyze added or modified files and labels from a GitHub Pull Request and
decide which clusters and/or hubs require helm upgrades to be performed for
their *hub helm charts or the support helm chart.
"""
pr_labels = json.loads(pr_labels)

changed_filepaths = changed_filepaths.split(",")
(
upgrade_support_on_all_clusters,
Expand Down Expand Up @@ -87,6 +93,7 @@ def helm_upgrade_jobs(
cluster_config,
cluster_info,
set(changed_filepaths),
pr_labels,
upgrade_all_hubs_on_this_cluster=upgrade_all_hubs_on_this_cluster,
upgrade_all_hubs_on_all_clusters=upgrade_all_hubs_on_all_clusters,
)
Expand All @@ -99,6 +106,7 @@ def helm_upgrade_jobs(
cluster_config,
cluster_info,
set(changed_filepaths),
pr_labels,
upgrade_support_on_this_cluster=upgrade_support_on_this_cluster,
upgrade_support_on_all_clusters=upgrade_support_on_all_clusters,
)
Expand Down Expand Up @@ -135,10 +143,9 @@ def helm_upgrade_jobs(
if ci_env:
# Add these matrix jobs as environment variables for use in another job
with open(env_file, "a") as f:
f.write(f"prod-hub-matrix-jobs={json.dumps(prod_hub_matrix_jobs)}")
f.write("\n")
f.write(f"prod-hub-matrix-jobs={json.dumps(prod_hub_matrix_jobs)}\n")
f.write(
f"support-and-staging-matrix-jobs={json.dumps(support_and_staging_matrix_jobs)}"
f"support-and-staging-matrix-jobs={json.dumps(support_and_staging_matrix_jobs)}\n"
)

# Don't bother generating a comment if both of the matrices are empty
Expand Down
52 changes: 52 additions & 0 deletions tests/test_helm_upgrade_decision.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,32 @@ def test_generate_hub_matrix_jobs_all_hubs():
assert isinstance(result_matrix_jobs[0], dict)


def test_generate_hub_matrix_jobs_skip_deploy_label():
cluster_file = root_path.joinpath("tests/test-clusters/cluster1/cluster.yaml")
with open(cluster_file) as f:
cluster_config = yaml.load(f)

cluster_info = {
"cluster_name": cluster_config.get("name", {}),
"provider": cluster_config.get("provider", {}),
"reason_for_redeploy": "",
}

modified_file = {
root_path.joinpath("tests/test-clusters/cluster1/hub1.values.yaml"),
}

pr_labels = ["unrelated1", "deployer:skip-deploy", "unrelated2"]

expected_matrix_jobs = []

result_matrix_jobs = generate_hub_matrix_jobs(
cluster_file, cluster_config, cluster_info, modified_file, pr_labels
)

case.assertCountEqual(result_matrix_jobs, expected_matrix_jobs)


def test_generate_support_matrix_jobs_one_cluster():
cluster_file = root_path.joinpath("tests/test-clusters/cluster1/cluster.yaml")
with open(cluster_file) as f:
Expand Down Expand Up @@ -246,6 +272,32 @@ def test_generate_support_matrix_jobs_all_clusters():
assert isinstance(result_matrix_jobs[0], dict)


def test_generate_support_matrix_jobs_skip_deploy_label():
cluster_file = root_path.joinpath("tests/test-clusters/cluster1/cluster.yaml")
with open(cluster_file) as f:
cluster_config = yaml.load(f)

cluster_info = {
"cluster_name": cluster_config.get("name", {}),
"provider": cluster_config.get("provider", {}),
"reason_for_redeploy": "",
}

modified_file = {
root_path.joinpath("tests/test-clusters/cluster1/support.values.yaml"),
}

pr_labels = ["unrelated1", "deployer:skip-deploy", "unrelated2"]

expected_matrix_jobs = []

result_matrix_jobs = generate_support_matrix_jobs(
cluster_file, cluster_config, cluster_info, modified_file, pr_labels
)

case.assertCountEqual(result_matrix_jobs, expected_matrix_jobs)


def test_discover_modified_common_files_hub_helm_charts():
input_path_basehub = [os.path.join("helm-charts", "basehub", "Chart.yaml")]
input_path_daskhub = [os.path.join("helm-charts", "daskhub", "Chart.yaml")]
Expand Down

0 comments on commit 74b3c4c

Please sign in to comment.