Skip to content

test: test ubuntu-24.04-arm #921

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 8 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 59 additions & 20 deletions .github/workflows/_build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,33 +27,49 @@ on:
type: boolean
required: true
description: Enable or disable running pip_audit to check installed packages for vulnerabilities
outputs:
artifacts-sha256:
value: ${{ jobs.build.outputs.artifacts-sha256 }}
description: The hash of the artifacts
permissions:
contents: read
env:
ARTIFACT_OS: ubuntu-latest # The default OS for release.
ARTIFACT_PYTHON: '3.13' # The default Python version for release.
RELEASE_OS_X86_64: ubuntu-24.04 # Default OS for x86_64-compatible release artifacts.
RELEASE_OS_ARM64: ubuntu-24.04-arm # Default OS for ARM64-compatible release artifacts.
RELEASE_PYTHON_VERSION: '3.13' # Default Python version used for release artifacts.

jobs:
build:
# Uncomment the following to disable checks and tests for Draft pull requests.
# if: github.event.pull_request.draft == false
outputs:
artifacts-sha256: ${{ steps.compute-hash.outputs.artifacts-sha256 }}
name: Build Python ${{ matrix.python }} on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
# It is recommended to pin a Runner version specifically:
# https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners
os: [ubuntu-latest, macos-latest, windows-latest]
os: [ubuntu-24.04, ubuntu-24.04-arm, macos-latest, windows-latest]
python: ['3.10', '3.11', '3.12', '3.13']

outputs:
arch-env: ${{ steps.set-arch-env.outputs.arch_env }}

steps:

# Create a GitHub Actions environment variable that maps a matrix.os value to a more descriptive environment
# value (e.g., ubuntu-x86-64 or ubuntu-arm64).
- name: Determine architecture label
id: set-arch-env
shell: bash
run: |
if [[ "${{ matrix.os }}" == "ubuntu-24.04" ]]; then
echo "arch_env=ubuntu-x86-64" >> "$GITHUB_OUTPUT"
elif [[ "${{ matrix.os }}" == "ubuntu-24.04-arm" ]]; then
echo "arch_env=ubuntu-arm64" >> "$GITHUB_OUTPUT"
else
echo "arch_env=unknown" >> "$GITHUB_OUTPUT"
fi

- name: Test the env variable
run: echo "Architecture-specific value ${{ steps.set-arch-env.outputs.arch_env }}"

- name: Harden Runner
uses: step-security/harden-runner@c6295a65d1254861815972266d5933fd6e532bdf # v2.11.1
with:
Expand Down Expand Up @@ -97,24 +113,33 @@ jobs:
HYPOTHESIS_PROFILE: github

# Generate the requirements.txt that contains the hash digests of the dependencies and
# generate the SBOM using CyclonDX SBOM generator.
# generate the SBOM using CyclonDX SBOM generator for the release Python version and
# supported release OS targets.
- name: Generate requirements.txt and SBOM
if: matrix.os == env.ARTIFACT_OS && matrix.python == env.ARTIFACT_PYTHON
if: >
matrix.python == env.RELEASE_PYTHON_VERSION &&
(matrix.os == env.RELEASE_OS_X86_64 || matrix.os == env.RELEASE_OS_ARM64)
run: make requirements sbom

# Remove the old requirements.txt file (which includes _all_ packages) and generate a
# new one for the package and its actual and required dependencies only.
# new one for the package and its actual and required dependencies only. Run this step
# for the release Python version and supported release OS targets only.
- name: Prune packages and generate required requirements.txt
if: matrix.os == env.ARTIFACT_OS && matrix.python == env.ARTIFACT_PYTHON
if: >
matrix.python == env.RELEASE_PYTHON_VERSION &&
(matrix.os == env.RELEASE_OS_X86_64 || matrix.os == env.RELEASE_OS_ARM64)
run: |
rm requirements.txt
make prune requirements

# Find the paths to the artifact files that will be included in the release, compute
# the SHA digest for all the release files and encode them using Base64, and export it
# from this job.
# from this job. Run this step for the release Python version and supported release
# OS targets only.
- name: Compute package hash
if: matrix.os == env.ARTIFACT_OS && matrix.python == env.ARTIFACT_PYTHON
if: >
matrix.python == env.RELEASE_PYTHON_VERSION &&
(matrix.os == env.RELEASE_OS_X86_64 || matrix.os == env.RELEASE_OS_ARM64)
id: compute-hash
shell: bash
run: |
Expand All @@ -126,17 +151,31 @@ jobs:
HTML_DOCS_PATH=$(find dist/ -type f -name "*-docs-html.zip")
MARKDOWN_DOCS_PATH=$(find dist/ -type f -name "*-docs-md.zip")
BUILD_EPOCH_PATH=$(find dist/ -type f -name "*-build-epoch.txt")
sha256sum --version
DIGEST=$(sha256sum "$TARBALL_PATH" "$WHEEL_PATH" "$REQUIREMENTS_PATH" "$SBOM_PATH" "$HTML_DOCS_PATH" "$MARKDOWN_DOCS_PATH" "$BUILD_EPOCH_PATH" | base64 -w0)
echo "Digest of artifacts is $DIGEST."
echo "artifacts-sha256=$DIGEST" >> "$GITHUB_OUTPUT"
echo "$DIGEST" > artifacts-sha256-file-${{ steps.set-arch-env.outputs.arch_env }}

# For now only generate artifacts for the specified OS and Python version in env variables.
# Currently reusable workflows do not support setting strategy property from the caller workflow.
# Run this step for the release Python version and supported release OS targets only.
- name: Upload the package artifact for debugging and release
if: matrix.os == env.ARTIFACT_OS && matrix.python == env.ARTIFACT_PYTHON
if: >
matrix.python == env.RELEASE_PYTHON_VERSION &&
(matrix.os == env.RELEASE_OS_X86_64 || matrix.os == env.RELEASE_OS_ARM64)
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
with:
name: artifact-${{ matrix.os }}-python-${{ matrix.python }}
path: dist
name: artifacts-${{ steps.set-arch-env.outputs.arch_env }}
path: ./dist*/
if-no-files-found: error
retention-days: 7

# Run this step for the release Python version and supported release OS targets only.
- name: Upload artifacts-sha256
if: >
matrix.python == env.RELEASE_PYTHON_VERSION &&
(matrix.os == env.RELEASE_OS_X86_64 || matrix.os == env.RELEASE_OS_ARM64)
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
with:
name: artifacts-sha256-file-${{ steps.set-arch-env.outputs.arch_env }}
path: artifacts-sha256-file-${{ steps.set-arch-env.outputs.arch_env }}
retention-days: 7
48 changes: 48 additions & 0 deletions .github/workflows/pr-change-set.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,51 @@ jobs:
contents: read
with:
disable-pip-audit: ${{ vars.DISABLE_PIP_AUDIT == 'true' }}

verify_artifacts:
needs: [build]
name: verify_artifacts
runs-on: ubuntu-latest
permissions:
contents: read
steps:

- name: Check out repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0

# Download all uploaded artifacts in the build job into the 'downloads' directory.
# This includes built package distributions and SHA256 hash files from all matrix jobs.
# The `path` input ensures all artifacts are placed under the 'downloads/' folder while
# maintaining their respective artifact subdirectory structure.
- name: Download artifact
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
path: downloads

# Verify hashes by first computing hashes for the artifacts and then comparing them
# against the hashes computed by the build job.
- name: Verify the artifact hash
run: |
set -euo pipefail
cd downloads
for ARCH in "ubuntu-x86-64" "ubuntu-arm64"; do
HASH_DIR="artifacts-sha256-file-${ARCH}"
ARTIFACT_DIR="artifacts-${ARCH}"
HASH_FILE="${HASH_DIR}/artifacts-sha256-file-${ARCH}"

echo "Verifying artifacts for ${ARCH}"
echo "Decoding expected SHA256 digest:"
DECODED_HASH=$(base64 --decode "${HASH_FILE}")
echo "$DECODED_HASH"

cd "${ARTIFACT_DIR}"
echo "$DECODED_HASH" | sha256sum --strict --check --status || {
echo "Hash verification failed for ${ARCH}!"
exit 1
}
cd - > /dev/null

echo "Hash verified successfully for ${ARCH}"
done
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,10 @@ repos:
# args: [--autofix]

# Check GitHub Actions workflow files.
- repo: https://github.com/Mateusz-Grzelinski/actionlint-py
rev: v1.7.4.18
hooks:
- id: actionlint
# - repo: https://github.com/Mateusz-Grzelinski/actionlint-py
# rev: v1.7.4.18
# hooks:
# - id: actionlint

# On push to the remote, run the unit tests.
- repo: local
Expand Down
43 changes: 31 additions & 12 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,25 @@ SHELL := bash

# Set the package's name and version for use throughout the Makefile.
PACKAGE_NAME := package
PACKAGE_VERSION := $(shell python -c $$'try: import $(PACKAGE_NAME); print($(PACKAGE_NAME).__version__);\nexcept: print("unknown");')
PACKAGE_VERSION := $(shell python -c $$'try: import $(PACKAGE_NAME); print($(PACKAGE_NAME).__version__, end="");\nexcept: print("unknown");')

# Determine the OS and architecture.
OS := $(shell uname -s)
ifeq ($(OS),Darwin)
PLATFORM_NAME := macosx
else
ifeq ($(OS),Linux)
PLATFORM_NAME := manylinux
endif
endif

ARCH := $(shell uname -m)

# Construct short package identifier.
PACKAGE_SDIST_NAME := $(PACKAGE_NAME)-$(PACKAGE_VERSION)

# Construct full package identifier.
PACKAGE_WHEEL_DIST_NAME := $(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-cp3-abi3-$(PLATFORM_NAME)_$(ARCH)

# This variable contains the first goal that matches any of the listed goals
# here, else it contains an empty string. The net effect is to filter out
Expand Down Expand Up @@ -107,7 +125,7 @@ upgrade-quiet:
# Generate a Software Bill of Materials (SBOM).
.PHONY: sbom
sbom: requirements
cyclonedx-py requirements --output-format json --outfile dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-sbom.json
cyclonedx-py requirements --output-format json --outfile dist/$(PACKAGE_WHEEL_DIST_NAME)-sbom.json

# Generate a requirements.txt file containing version and integrity hashes for all
# packages currently installed in the virtual environment. There's no easy way to
Expand All @@ -129,14 +147,14 @@ requirements.txt: pyproject.toml
[[ $$pkg =~ (.*)==(.*) ]] && curl -s https://pypi.org/pypi/$${BASH_REMATCH[1]}/$${BASH_REMATCH[2]}/json | python -c "import json, sys; print(''.join(f''' \\\\\n --hash=sha256:{pkg['digests']['sha256']}''' for pkg in json.load(sys.stdin)['urls']));" >> requirements.txt; \
done
echo -e -n "$(PACKAGE_NAME)==$(PACKAGE_VERSION)" >> requirements.txt
if [ -f dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz ]; then \
echo -e -n " \\\\\n $$(python -m pip hash --algorithm sha256 dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz | grep '^\-\-hash')" >> requirements.txt; \
if [ -f dist/$(PACKAGE_SDIST_NAME).tar.gz ]; then \
echo -e -n " \\\\\n $$(python -m pip hash --algorithm sha256 dist/$(PACKAGE_SDIST_NAME).tar.gz | grep '^\-\-hash')" >> requirements.txt; \
fi
if [ -f dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl ]; then \
echo -e -n " \\\\\n $$(python -m pip hash --algorithm sha256 dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl | grep '^\-\-hash')" >> requirements.txt; \
if [ -f dist/$(PACKAGE_WHEEL_DIST_NAME).whl ]; then \
echo -e -n " \\\\\n $$(python -m pip hash --algorithm sha256 dist/$(PACKAGE_WHEEL_DIST_NAME).whl | grep '^\-\-hash')" >> requirements.txt; \
fi
echo "" >> requirements.txt
cp requirements.txt dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-requirements.txt
cp requirements.txt dist/$(PACKAGE_WHEEL_DIST_NAME)-requirements.txt

# Audit the currently installed packages. Skip packages that are installed in
# editable mode (like the one in development here) because they may not have
Expand Down Expand Up @@ -175,17 +193,18 @@ test:
# When building these artifacts, we need the environment variable SOURCE_DATE_EPOCH
# set to the build date/epoch. For more details, see: https://flit.pypa.io/en/latest/reproducible.html
.PHONY: dist
dist: dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-html.zip dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-md.zip dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt
dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl: check test
dist: dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl dist/$(PACKAGE_SDIST_NAME).tar.gz dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-html.zip dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-md.zip dist/$(PACKAGE_WHEEL_DIST_NAME)-build-epoch.txt
dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl:
SOURCE_DATE_EPOCH=$(SOURCE_DATE_EPOCH) flit build --setup-py --format wheel
dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz: check test
mv dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl dist/$(PACKAGE_WHEEL_DIST_NAME).whl
dist/$(PACKAGE_SDIST_NAME).tar.gz:
SOURCE_DATE_EPOCH=$(SOURCE_DATE_EPOCH) flit build --setup-py --format sdist
dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-html.zip: docs-html
python -m zipfile -c dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-html.zip docs/_build/html/
dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-md.zip: docs-md
python -m zipfile -c dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-md.zip docs/_build/markdown/
dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt:
echo $(SOURCE_DATE_EPOCH) > dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt
dist/$(PACKAGE_WHEEL_DIST_NAME)-build-epoch.txt:
echo $(SOURCE_DATE_EPOCH) > dist/$(PACKAGE_WHEEL_DIST_NAME)-build-epoch.txt

# Build the HTML and Markdown documentation from the package's source.
DOCS_SOURCE := $(shell git ls-files docs/source)
Expand Down