Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

automation/deployer: respect PR label deployer:skip-deploy #4065

Merged
merged 4 commits into from
May 16, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 31 additions & 1 deletion .github/workflows/deploy-hubs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,36 @@ jobs:
pip install --editable .
pip list

- name: Get merged/open PR labels
uses: actions/github-script@v7
id: pr-labels
with:
script: |
let labels = [];
if (context.eventName === 'pull_request') {
labels = context.payload.pull_request.labels;
}

// Determine if the pushed commit triggering the workflow is
// associated with a merged pull request
else if (context.eventName === 'push') {
// api ref: https://octokit.github.io/rest.js/v20#pulls-list
const resp = await github.rest.pulls.list(
{
owner: context.repo.owner,
repo: context.repo.repo,
state: 'closed',
sort: 'updated',
direction: 'desc',
per_page: 100,
}
);
const merged_pr = resp.data.find(pr => pr.merge_commit_sha === context.sha);
labels = merged_pr?.labels;
}
labels = (labels || []).map(l => l.name);
return labels

- name: Identify files that have been added or modified
# Action repo: https://github.com/dorny/paths-filter
uses: dorny/paths-filter@v3
Expand All @@ -121,7 +151,7 @@ jobs:
# by one
- name: Generate matrix jobs
run: |
deployer generate helm-upgrade-jobs "${{ steps.changed-files.outputs.changed_files }}"
deployer generate helm-upgrade-jobs "${{ steps.changed-files.outputs.changed_files }}" '${{ steps.pr-labels.outputs.result }}'

# The comment-deployment-plan-pr.yaml workflow won't have the correct context to
# know the PR number, so we save it to a file to pass to that workflow
Expand Down
10 changes: 10 additions & 0 deletions deployer/commands/generate/helm_upgrade/decision.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ def generate_hub_matrix_jobs(
cluster_config,
cluster_info,
added_or_modified_files,
pr_labels=None,
upgrade_all_hubs_on_this_cluster=False,
upgrade_all_hubs_on_all_clusters=False,
):
Expand All @@ -88,6 +89,7 @@ def generate_hub_matrix_jobs(
redeployed.
added_or_modified_files (set[str]): A set of all added or modified files
provided in a GitHub Pull Requests
pr_labels (list, optional): A list of PR labels
upgrade_all_hubs_on_this_cluster (bool, optional): If True, generates jobs to
upgrade all hubs on the given cluster. This is triggered when the
cluster.yaml file itself has been modified. Defaults to False.
Expand All @@ -100,6 +102,9 @@ def generate_hub_matrix_jobs(
cluster, the cloud provider that cluster runs on, the name of a hub
deployed to that cluster, and the reason that hub needs to be redeployed.
"""
if pr_labels and "deployer:skip-deploy" in pr_labels:
return []

# Empty list to store all the matrix job definitions in
matrix_jobs = []

Expand Down Expand Up @@ -148,6 +153,7 @@ def generate_support_matrix_jobs(
cluster_config,
cluster_info,
added_or_modified_files,
pr_labels=None,
upgrade_support_on_this_cluster=False,
upgrade_support_on_all_clusters=False,
):
Expand All @@ -168,6 +174,7 @@ def generate_support_matrix_jobs(
cluster to be redeployed.
added_or_modified_files (set[str]): A set of all added or modified files
provided in a GitHub Pull Requests
pr_labels (list, optional): A list of PR labels
upgrade_support_on_this_cluster (bool, optional): If True, generates jobs to
update the support chart on the given cluster. This is triggered when the
cluster.yaml file itself is modified. Defaults to False.
Expand All @@ -192,6 +199,9 @@ def generate_support_matrix_jobs(
},
]
"""
if pr_labels and "deployer:skip-deploy" in pr_labels:
return []

# Rename dictionary key
cluster_info["reason_for_support_redeploy"] = cluster_info.pop(
"reason_for_redeploy"
Expand Down
21 changes: 14 additions & 7 deletions deployer/commands/generate/helm_upgrade/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,19 @@
def helm_upgrade_jobs(
changed_filepaths: str = typer.Argument(
..., help="Comma delimited list of files that have changed"
)
),
pr_labels: str = typer.Argument(
"[]",
help="JSON formatted list of PR labels, where 'deployer:skip-deploy' is respected.",
),
):
"""
Analyze added or modified files from a GitHub Pull Request and decide which
clusters and/or hubs require helm upgrades to be performed for their *hub helm
charts or the support helm chart.
Analyze added or modified files and labels from a GitHub Pull Request and
decide which clusters and/or hubs require helm upgrades to be performed for
their *hub helm charts or the support helm chart.
"""
pr_labels = json.loads(pr_labels)

changed_filepaths = changed_filepaths.split(",")
(
upgrade_support_on_all_clusters,
Expand Down Expand Up @@ -87,6 +93,7 @@ def helm_upgrade_jobs(
cluster_config,
cluster_info,
set(changed_filepaths),
pr_labels,
upgrade_all_hubs_on_this_cluster=upgrade_all_hubs_on_this_cluster,
upgrade_all_hubs_on_all_clusters=upgrade_all_hubs_on_all_clusters,
)
Expand All @@ -99,6 +106,7 @@ def helm_upgrade_jobs(
cluster_config,
cluster_info,
set(changed_filepaths),
pr_labels,
upgrade_support_on_this_cluster=upgrade_support_on_this_cluster,
upgrade_support_on_all_clusters=upgrade_support_on_all_clusters,
)
Expand Down Expand Up @@ -135,10 +143,9 @@ def helm_upgrade_jobs(
if ci_env:
# Add these matrix jobs as environment variables for use in another job
with open(env_file, "a") as f:
f.write(f"prod-hub-matrix-jobs={json.dumps(prod_hub_matrix_jobs)}")
f.write("\n")
f.write(f"prod-hub-matrix-jobs={json.dumps(prod_hub_matrix_jobs)}\n")
f.write(
f"support-and-staging-matrix-jobs={json.dumps(support_and_staging_matrix_jobs)}"
f"support-and-staging-matrix-jobs={json.dumps(support_and_staging_matrix_jobs)}\n"
Comment on lines -141 to +148
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is an PR unrelated fix - when we write to the file path $GITHUB_ENV, it should always get full lines written to it. Otherwise we could get lines like MY_ENV=testSOMEOTHER_ENV=value

)

# Don't bother generating a comment if both of the matrices are empty
Expand Down
52 changes: 52 additions & 0 deletions tests/test_helm_upgrade_decision.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,32 @@ def test_generate_hub_matrix_jobs_all_hubs():
assert isinstance(result_matrix_jobs[0], dict)


def test_generate_hub_matrix_jobs_skip_deploy_label():
cluster_file = root_path.joinpath("tests/test-clusters/cluster1/cluster.yaml")
with open(cluster_file) as f:
cluster_config = yaml.load(f)

cluster_info = {
"cluster_name": cluster_config.get("name", {}),
"provider": cluster_config.get("provider", {}),
"reason_for_redeploy": "",
}

modified_file = {
root_path.joinpath("tests/test-clusters/cluster1/hub1.values.yaml"),
}

pr_labels = ["unrelated1", "deployer:skip-deploy", "unrelated2"]

expected_matrix_jobs = []

result_matrix_jobs = generate_hub_matrix_jobs(
cluster_file, cluster_config, cluster_info, modified_file, pr_labels
)

case.assertCountEqual(result_matrix_jobs, expected_matrix_jobs)


def test_generate_support_matrix_jobs_one_cluster():
cluster_file = root_path.joinpath("tests/test-clusters/cluster1/cluster.yaml")
with open(cluster_file) as f:
Expand Down Expand Up @@ -246,6 +272,32 @@ def test_generate_support_matrix_jobs_all_clusters():
assert isinstance(result_matrix_jobs[0], dict)


def test_generate_support_matrix_jobs_skip_deploy_label():
cluster_file = root_path.joinpath("tests/test-clusters/cluster1/cluster.yaml")
with open(cluster_file) as f:
cluster_config = yaml.load(f)

cluster_info = {
"cluster_name": cluster_config.get("name", {}),
"provider": cluster_config.get("provider", {}),
"reason_for_redeploy": "",
}

modified_file = {
root_path.joinpath("tests/test-clusters/cluster1/support.values.yaml"),
}

pr_labels = ["unrelated1", "deployer:skip-deploy", "unrelated2"]

expected_matrix_jobs = []

result_matrix_jobs = generate_support_matrix_jobs(
cluster_file, cluster_config, cluster_info, modified_file, pr_labels
)

case.assertCountEqual(result_matrix_jobs, expected_matrix_jobs)


def test_discover_modified_common_files_hub_helm_charts():
input_path_basehub = [os.path.join("helm-charts", "basehub", "Chart.yaml")]
input_path_daskhub = [os.path.join("helm-charts", "daskhub", "Chart.yaml")]
Expand Down