Skip to content

Tests

Tests #734

Workflow file for this run

name: "Tests"
on:
workflow_call:
workflow_dispatch:
inputs:
commit_report:
description: 'Commit generated report files: None, Docs, All'
required: false
default: 'None'
merge_group:
permissions:
actions: read
contents: write
pages: write
id-token: write
pull-requests: write
jobs:
tools-tests:
env:
pytest_verbosity: 2
pytest_report_title: "⭐️ Tools Tests"
runs-on: ["in-service"]
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Run Tools Tests
run: |
python3 -m pytest --github-report tests/tools/ -s
lowering-tests:
runs-on: ["in-service"]
env:
pytest_verbosity: 2
pytest_report_title: "⭐️ Aten → TTNN Lowering Tests"
strategy:
matrix: # Need to find a way to replace this with a generator
group: [1, 2]
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Run Lowering Tests
run: |
python3 -m pytest --github-report tests/lowering/ --splits 2 --group ${{ matrix.group }} -s
model-tests:
needs: lowering-tests
runs-on: ["in-service"]
strategy:
matrix: # Need to find a way to replace this with a generator
group: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40]
env:
pytest_verbosity: 0
pytest_report_title: "⭐️ Model Tests - Group ${{ matrix.group }}"
steps:
- uses: actions/checkout@v4
with:
lfs: true
- uses: ./.github/actions/common_repo_setup
- name: Run Model Tests
run: |
set +e
python3 -m pytest --github-report tests/models/ --splits 40 --group ${{ matrix.group }} -s
exit_code=$? # Capture the exit code
if [ $exit_code -eq 5 ]; then
if [ ${{ matrix.group }} -eq 0 ]; then
echo "Error: pytest returned exit code 5 (No tests to run) in the first test group.";
exit 1; # Fail the workflow
else
echo "Success: pytest returned exit code 5 (No tests to run). This is acceptable for groups greater than 0.";
exit 0; # Success
fi
elif [ $exit_code -ne 0 ]; then
echo "Failure: Tests failed with exit code $exit_code.";
exit 1; # Fail the workflow for other errors
fi
echo "Success: Tests passed!";
ls -l
cd metrics
ls -l
cd ..
exit 0;
- name: Upload Metrics Artifact
if: success() # Only run if tests passed
uses: actions/upload-artifact@v4
with:
name: model-tests-metrics-group-${{ matrix.group }}
path: metrics/
model-autogen-op-tests:
needs: lowering-tests
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.commit_report != 'None'}}
runs-on: ["in-service"]
strategy:
matrix: # Need to find a way to replace this with a generator
group: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40]
env:
pytest_verbosity: 0
pytest_report_title: "⭐️ Model Input Variations Tests - Group ${{ matrix.group }}"
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Run Model Input Variations Tests
run: |
set +e
rm -rf tests/autogen_op/ALL
python3 -m pytest --github-report tests/autogen_op --splits 40 --group ${{ matrix.group }} -s
exit_code=$? # Capture the exit code, but ignore any errors for now.
ls -l
cd metrics-autogen-op
ls -l
cd ..
exit 0;
- name: Upload Input Variations Metrics Artifact
if: success() # Only run if tests passed
uses: actions/upload-artifact@v4
with:
name: model-autogen-op-tests-metrics-group-${{ matrix.group }}
path: metrics-autogen-op/
collect-model-artifacts-from-matrix-jobs:
needs: model-tests
runs-on: ["in-service"]
env:
pytest_verbosity: 0
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Download All Metrics Artifacts
uses: actions/download-artifact@v4
with:
pattern: model-tests-metrics-group-*
merge-multiple: true
path: metrics/
- name: Upload Metrics Artifact
if: success() # Only run if tests passed
uses: actions/upload-artifact@v4
with:
name: model-tests-metrics
path: metrics/
collect-op-artifacts-from-matrix-jobs:
needs: model-autogen-op-tests
runs-on: ["in-service"]
env:
pytest_verbosity: 0
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Download All Input Variations Metrics Artifacts
uses: actions/download-artifact@v4
with:
pattern: model-autogen-op-tests-metrics-group-*
merge-multiple: true
path: metrics-autogen-op/
- name: Upload Input Variations Metrics Artifact
uses: actions/upload-artifact@v4
with:
name: model-autogen-op-tests-metrics
path: metrics-autogen-op/
collect-metrics:
needs: [model-tests, model-autogen-op-tests]
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.commit_report != 'None'}}
runs-on: ["in-service"]
env:
PYTHONPATH: ${{ github.workspace }}
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Download All Metrics Artifacts
uses: actions/download-artifact@v4
with:
pattern: model-tests-metrics-group-*
merge-multiple: true
path: metrics/
- name: Download All Input Variations Metrics Artifacts
uses: actions/download-artifact@v4
with:
pattern: model-autogen-op-tests-metrics-group-*
merge-multiple: true
path: metrics-autogen-op/
# - name: Download Metrics Artifacts
# env:
# GH_TOKEN: ${{ secrets.GH_TOKEN }}
# GH_REPO: ${{ github.repository }}
# run: |
# mkdir -p metrics
# for i in $(seq 1 40); do
# echo "Downloading metrics for Group $i"
# gh run download ${{ github.run_id }} --name "model-tests-metrics-group-$i" --dir metrics/
# cd metrics
# ls -l
# cd ..
# done
- name: Collect Metrics Report
run: |
python3 tools/collect_metrics.py
git config user.name "GitHub Actions"
git config user.email "actions@github.com"
git pull
if [ "${{ github.event.inputs.commit_report }}" == "All" ]; then
git add --all
elif [ "${{ github.event.inputs.commit_report }}" == "Docs" ]; then
git add README.md
git add docs/
elif [ "${{ github.event.inputs.commit_report }}" == "None" ]; then
echo "No files will be committed"
exit 0
fi
if git diff-index --quiet HEAD; then
echo "No changes to commit"
else
git commit -m '[auto][on-merge-queue] Update metrics report in README.md'
git push
fi
push-autogen-op-tests:
needs: [model-tests, collect-metrics]
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.commit_report != 'None'}}
runs-on: ["in-service"]
env:
PYTHONPATH: ${{ github.workspace }}
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/common_repo_setup
- name: Download All Metrics Artifacts
uses: actions/download-artifact@v4
with:
pattern: model-tests-metrics-group-*
merge-multiple: true
path: metrics/
- name: Push Autogen Op Tests
run: |
git config user.name "GitHub Actions"
git config user.email "actions@github.com"
git pull
rm -rf tests/autogen_op/ # Remove old tests
python3 tools/generate_input_variation_test_from_models.py
python3 tools/generate_input_variation_test_from_models.py --merge=True
echo "Remove these files because it's heavy for running"
rm -f tests/autogen_op/Stable_Diffusion_V2/test_Stable_Diffusion_V2_aten_convolution_default.py
if [ "${{ github.event.inputs.commit_report }}" == "All" ]; then
git add --all
elif [ "${{ github.event.inputs.commit_report }}" == "Tests" ]; then
git add tests/
elif [ "${{ github.event.inputs.commit_report }}" == "None" ]; then
echo "No files will be committed"
exit 0
fi
if git diff-index --quiet HEAD; then
echo "No changes to commit"
else
git commit -m '[auto][on-merge-queue] Update input variations tests'
git push
fi
validate-pr:
if: ${{ always() }}
runs-on: ubuntu-latest
needs: [model-tests, tools-tests, lowering-tests, collect-metrics]
steps:
- run: |
model_result="${{ needs.model-tests.result}}"
tools_result="${{ needs.tools-tests.result}}"
lowering_result="${{ needs.lowering-tests.result}}"
if [[ ($tools_result == "success" || $tools_result == "skipped") &&
($lowering_result == "success" || $lowering_result == "skipped") &&
($model_result == "success" || $model_result == "skipped") ]] ; then
exit 0
else
exit 1
fi