Skip to content

Tests

Tests #277

Workflow file for this run

name: "Tests"
on:
workflow_call:
workflow_dispatch:
inputs:
commit_report:
description: 'Commit generated report files: None, Docs, All'
required: false
default: 'None'
merge_group:
permissions:
actions: read
contents: write
pages: write
id-token: write
pull-requests: write
jobs:
tools-tests:
env:
pytest_verbosity: 2
pytest_report_title: "⭐️ Tools Tests"
runs-on: ["in-service", "n150"]
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Run Tools Tests
run: |
python3 -m pytest --github-report tests/tools/ -s
lowering-tests:
runs-on: ["in-service", "n150"]
env:
pytest_verbosity: 2
pytest_report_title: "⭐️ Aten → TTNN Lowering Tests"
strategy:
matrix: # Need to find a way to replace this with a generator
group: [1, 2]
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Run Lowering Tests
run: |
python3 -m pytest --github-report tests/lowering/ --splits 2 --group ${{ matrix.group }} -s
model-tests:
needs: lowering-tests
runs-on: ["in-service", "n150"]
strategy:
matrix: # Need to find a way to replace this with a generator
group: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40]
env:
pytest_verbosity: 0
pytest_report_title: "⭐️ Model Tests - Group ${{ matrix.group }}"
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Run Model Tests
run: |
set +e
python3 -m pytest --github-report tests/models/ --splits 40 --group ${{ matrix.group }} -s
exit_code=$? # Capture the exit code
if [ $exit_code -eq 5 ]; then
if [ ${{ matrix.group }} -eq 0 ]; then
echo "Error: pytest returned exit code 5 (No tests to run) in the first test group.";
exit 1; # Fail the workflow
else
echo "Success: pytest returned exit code 5 (No tests to run). This is acceptable for groups greater than 0.";
exit 0; # Success
fi
elif [ $exit_code -ne 0 ]; then
echo "Failure: Tests failed with exit code $exit_code.";
exit 1; # Fail the workflow for other errors
fi
echo "Success: Tests passed!";
exit 0;
- name: Upload Metrics Artifact
if: success() # Only run if tests passed
uses: actions/upload-artifact@v3
with:
name: model-tests-metrics-group-${{ matrix.group }}
path: tests/metrics/
validate-pr:
if: ${{ always() }}
runs-on: ubuntu-latest
name: Final Model Test Results
needs: [model-tests]
steps:
- run: |
result="${{ needs.model-tests.result }}"
if [[ $result == "success" || $result == "skipped" ]]; then
exit 0
else
exit 1
fi
collect-metrics:
needs: model-tests
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.commit_report != 'None'}}
runs-on: ["in-service", "n150"]
env:
PYTHONPATH: ${{ github.workspace }}
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Download Metrics Artifacts
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
GH_REPO: ${{ github.repository }}
run: |
mkdir -p tests/metrics
for i in $(seq 1 40); do
echo "Downloading metrics for Group $i"
gh run download ${{ github.run_id }} --name "model-tests-metrics-group-$i" --dir tests/metrics/
done
- name: Collect Metrics Report
run: |
python3 tools/collect_metrics.py
git config user.name "GitHub Actions"
git config user.email "[email protected]"
if [ "${{ github.event.inputs.commit_report }}" == "All" ]; then
git add --all
elif [ "${{ github.event.inputs.commit_report }}" == "Docs" ]; then
git add README.md
git add docs/
elif [ "${{ github.event.inputs.commit_report }}" == "None" ]; then
echo "No files will be committed"
exit 0
fi
if git diff-index --quiet HEAD; then
echo "No changes to commit"
else
git commit -m '[auto][on-merge-queue] Update metrics report in README.md'
git push
fi