Skip to content

Add M+theta arg error and test #8153

Add M+theta arg error and test

Add M+theta arg error and test #8153

Workflow file for this run

name: ASPIRE Python Pip CI
on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
push:
jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.9'
- name: Install dependencies
run: |
pip install tox tox-gh-actions
- name: Run Tox Check
run: tox -e check
data-cache:
needs: check
runs-on: ubuntu-latest
outputs:
cache_hash: ${{ steps.compute-cache-hash.outputs.cache_hash }}
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.9'
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Restore Cache
uses: actions/cache@v4
with:
key: cached-data-${{ hashFiles('**/registry.py', '**/*workflow.yml') }}
restore-keys: |
cached-data-
path: .github_cache/ASPIRE-data
enableCrossOsArchive: true
- name: Create Cache Directory
run: |
# Create cache and config directories
mkdir -p .github_cache/ASPIRE-data
chmod -R 777 .github_cache/ASPIRE-data
echo "cache:" > config.yaml
echo " cache_dir: .github_cache/ASPIRE-data" >> config.yaml
echo "Logging config.yaml for verification:"
cat config.yaml
- name: Download Cache Files
run: |
export ASPIREDIR=.
python -c "
from aspire.downloader import emdb_2660, simulated_channelspin
emdb_2660()
simulated_channelspin()
"
- name: Compute Cache Directory Hash
id: compute-cache-hash
run: |
echo "Computing hash for .github_cache/ASPIRE-data..."
# Compute a hash on the sorted file listing.
cache_hash=$(ls -1 .github_cache/ASPIRE-data/** | md5sum)
echo "Computed cache hash: $cache_hash"
# Expose the computed hash to subsequent steps/jobs.
echo "cache_hash=${cache_hash}" >> $GITHUB_OUTPUT
- name: Verify Cache Directory Before Saving
run: |
ls -lhR .github_cache/ASPIRE-data
[ -f config.yaml ]
- name: Save Cache
uses: actions/cache@v4
with:
key: cached-data-${{ hashFiles('**/registry.py', '**/*workflow.yml') }}
path: .github_cache/ASPIRE-data
enableCrossOsArchive: true
build:
needs: [check, data-cache]
runs-on: ubuntu-latest
# Run on every code push, but only on review ready PRs
if: ${{ github.event_name == 'push' || github.event.pull_request.draft == false }}
strategy:
matrix:
python-version: ['3.9', '3.10', '3.11', '3.12']
pyenv: [pip]
exclude:
# Exclude 3.9-pip so we can add pre/post tasks to that environment.
- python-version: '3.9'
pyenv: pip
include:
# Re-include 3.9 with additional tox tasks.
- python-version: '3.9'
pyenv: pip,docs
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }} ${{ matrix.pyenv }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
pip install tox tox-gh-actions
# Optional packages
pip install pyfftw # `test_fft` runs for pyfftw when installed
- name: Restore Cache
uses: actions/cache@v4
with:
key: cached-data-${{ hashFiles('**/registry.py', '**/*workflow.yml') }}
restore-keys: |
cached-data-
path: .github_cache/ASPIRE-data
enableCrossOsArchive: true
- name: Set Cache Directory
run: |
echo "cache:" > config.yaml
echo " cache_dir: .github_cache/ASPIRE-data" >> config.yaml
- name: Verify Restored Cache Directory
run: |
ls -lhR .github_cache/ASPIRE-data
[ -f config.yaml ]
- name: Test with tox
run: |
tox --override testenv.set_env=ASPIREDIR=${{ github.workspace }} \
--skip-missing-interpreters false \
-e py${{ matrix.python-version }}-${{ matrix.pyenv }}
- name: Validate Cache Directory Hash
run: |
echo "Computing hash for .github_cache/ASPIRE-data..."
new_hash=$(ls -1 .github_cache/ASPIRE-data/** | md5sum)
echo "Hash from data-cache job: ${{ needs.data-cache.outputs.cache_hash }}"
echo "Computed hash now: $new_hash"
if [ "${{ needs.data-cache.outputs.cache_hash }}" != "$new_hash" ]; then
echo "Error: Cache directory hash has changed!"
exit 1
fi
- name: Upload Coverage to CodeCov
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
conda-build:
needs: [check, data-cache]
runs-on: ${{ matrix.os }}
# Only run on review ready pull_requests
if: ${{ github.event_name == 'pull_request' && github.event.pull_request.draft == false }}
defaults:
run:
shell: bash -el {0}
strategy:
matrix:
os: [ubuntu-latest, ubuntu-22.04, macOS-latest, macOS-14]
backend: [default, openblas]
python-version: ['3.9']
include:
- os: ubuntu-latest
backend: intel
- os: macOS-latest
backend: accelerate
- os: windows-2022
backend: win64
steps:
- uses: actions/checkout@v4
- name: Set up Conda ${{ matrix.os }} Python ${{ matrix.python-version }}
uses: conda-incubator/setup-miniconda@v2.3.0
with:
miniconda-version: "latest"
auto-update-conda: true
python-version: ${{ matrix.python-version }}
activate-environment: aspire
environment-file: environment-${{ matrix.backend }}.yml
auto-activate-base: false
- name: Complete Install and Log Environment ${{ matrix.os }} Python ${{ matrix.python-version }}
run: |
conda info
conda list
pip install -e ".[dev]"
pip freeze
python -c "import numpy; numpy.show_config()"
- name: Restore Cache
uses: actions/cache@v4
with:
key: cached-data-${{ hashFiles('**/registry.py', '**/*workflow.yml') }}
restore-keys: |
cached-data-
path: .github_cache/ASPIRE-data
enableCrossOsArchive: true
- name: Set Cache Directory
run: |
echo "cache:" > config.yaml
echo " cache_dir: .github_cache/ASPIRE-data" >> config.yaml
- name: Execute Pytest Conda ${{ matrix.os }} Python ${{ matrix.python-version }}
run: |
export ASPIREDIR=.
export OMP_NUM_THREADS=2
# -n runs test in parallel using pytest-xdist
pytest -n2 --durations=50 -s
# Build and Deploy production (main) docs.
docs_deploy:
if: github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.9'
- name: Install Dependencies
run: |
pip install -e ".[dev]"
- name: Run Sphinx doc build script
env:
GITHUB_ACTOR: ${{ github.actor }}
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: "docs/buildsite.sh"
shell: bash
ampere_gpu:
needs: check
runs-on: self-hosted
# Run on every code push, but only on review ready PRs
if: ${{ github.event_name == 'push' || github.event.pull_request.draft == false }}
steps:
- uses: actions/checkout@v4
- name: Install dependencies
run: |
pip install -e ".[dev,gpu-12x]"
- name: Customize config
run: |
echo "Setup tmp dirs and chmod so others can cleanup."
CI_TMP_DIR=/var/ci/tmp
mkdir -p ${CI_TMP_DIR}
chmod g+rwx ${CI_TMP_DIR}
CI_CACHE_DIR=${CI_TMP_DIR}/cache
mkdir -p ${CI_CACHE_DIR}
chmod g+rwx ${CI_CACHE_DIR}
echo "Create and assign a unique temp dir to hold our config."
WORK_DIR=$(mktemp -d -p "${CI_TMP_DIR}")
echo "WORK_DIR=${WORK_DIR}"
echo "Stash the WORK_DIR to GitHub env so we can clean it up later."
echo "WORK_DIR=${WORK_DIR}" >> $GITHUB_ENV
echo -e "common:" >> ${WORK_DIR}/config.yaml
echo -e " numeric: cupy" >> ${WORK_DIR}/config.yaml
echo -e " fft: cupy\n" >> ${WORK_DIR}/config.yaml
echo -e "cache:" >> ${WORK_DIR}/config.yaml
echo -e " cache_dir: ${CI_CACHE_DIR}" >> ${WORK_DIR}/config.yaml
echo "Log the config: ${WORK_DIR}/config.yaml"
cat ${WORK_DIR}/config.yaml
- name: Cache Data
run: |
ASPIREDIR=${{ env.WORK_DIR }} python -c \
"import aspire; print(aspire.config['cache']['cache_dir']); import aspire.downloader; aspire.downloader.emdb_2660()"
- name: Run
run: |
ASPIREDIR=${{ env.WORK_DIR }} PYTHONWARNINGS=error python -m pytest --durations=50 --cov=aspire --cov-report=xml
- name: Upload Coverage to CodeCov
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Cleanup
run: rm -rf ${{ env.WORK_DIR }}
# Build branch's docs and gallery.
docs:
needs: [check, data-cache]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.9'
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Restore Cache
uses: actions/cache@v4
with:
key: cached-data-${{ hashFiles('**/registry.py', '**/*workflow.yml') }}
restore-keys: |
cached-data-
path: .github_cache/ASPIRE-data
enableCrossOsArchive: true
- name: Set Cache Directory
run: |
echo "cache:" > config.yaml
echo " cache_dir: .github_cache/ASPIRE-data" >> config.yaml
- name: Build Sphinx docs
run: |
export ASPIREDIR=${{ github.workspace }}
make distclean
sphinx-apidoc -f -o ./source ../src -H Modules
make html
working-directory: ./docs
- name: Archive Sphinx docs
uses: actions/upload-artifact@v4
with:
name: sphinx-docs
path: docs/build
retention-days: 7
- name: Validate Cache Directory Hash
run: |
echo "Computing hash for .github_cache/ASPIRE-data..."
new_hash=$(ls -1 .github_cache/ASPIRE-data/** | md5sum)
echo "Hash from data-cache job: ${{ needs.data-cache.outputs.cache_hash }}"
echo "Computed hash now: $new_hash"
if [ "${{ needs.data-cache.outputs.cache_hash }}" != "$new_hash" ]; then
echo "Error: Cache directory hash has changed!"
exit 1
fi
osx_arm:
needs: [check, data-cache]
runs-on: macos-14
# Run on every code push, but only on review ready PRs
if: ${{ github.event_name == 'push' || github.event.pull_request.draft == false }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Complete Install and Log Environment
run: |
python --version
pip install -e ".[dev]" # install aspire
pip freeze
- name: Restore Cache
uses: actions/cache@v4
with:
key: cached-data-${{ hashFiles('**/registry.py', '**/*workflow.yml') }}
restore-keys: |
cached-data-
path: .github_cache/ASPIRE-data
enableCrossOsArchive: true
- name: Set Cache Directory
run: |
echo "cache:" > config.yaml
echo " cache_dir: .github_cache/ASPIRE-data" >> config.yaml
- name: Verify Restored Cache Directory
run: |
ls -lhR .github_cache/ASPIRE-data
[ -f config.yaml ]
- name: Test
run: |
export ASPIREDIR=.
python -m pytest -n3 --durations=50