Add and use ClientConnectionResetError (#9137) (#9194) #5913
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI | |
on: | |
merge_group: | |
push: | |
branches: | |
- 'master' | |
- '[0-9].[0-9]+' # matches to backport branches, e.g. 3.6 | |
tags: [ 'v*' ] | |
pull_request: | |
branches: | |
- 'master' | |
- '[0-9].[0-9]+' | |
schedule: | |
- cron: '0 6 * * *' # Daily 6AM UTC build | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} | |
cancel-in-progress: true | |
env: | |
COLOR: yes | |
FORCE_COLOR: 1 # Request colored output from CLI tools supporting it | |
MYPY_FORCE_COLOR: 1 | |
PY_COLORS: 1 | |
jobs: | |
lint: | |
name: Linter | |
runs-on: ubuntu-latest | |
timeout-minutes: 5 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: >- | |
Verify that `requirements/runtime-deps.in` | |
is in sync with `setup.cfg` | |
run: | | |
set -eEuo pipefail | |
make sync-direct-runtime-deps | |
git diff --exit-code -- requirements/runtime-deps.in | |
- name: Setup Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: 3.11 | |
- name: Cache PyPI | |
uses: actions/cache@v4.0.2 | |
with: | |
key: pip-lint-${{ hashFiles('requirements/*.txt') }} | |
path: ~/.cache/pip | |
restore-keys: | | |
pip-lint- | |
- name: Update pip, wheel, setuptools, build, twine | |
run: | | |
python -m pip install -U pip wheel setuptools build twine | |
- name: Install dependencies | |
run: | | |
python -m pip install -r requirements/lint.in -c requirements/lint.txt | |
- name: Install self | |
run: | | |
python -m pip install . -c requirements/runtime-deps.txt | |
env: | |
AIOHTTP_NO_EXTENSIONS: 1 | |
- name: Run linters | |
run: | | |
make mypy | |
- name: Install libenchant | |
run: | | |
sudo apt install libenchant-2-dev | |
- name: Install spell checker | |
run: | | |
pip install -r requirements/doc-spelling.in -c requirements/doc-spelling.txt | |
- name: Run docs spelling | |
run: | | |
# towncrier --yes # uncomment me after publishing a release | |
make doc-spelling | |
- name: Build package | |
run: | | |
python -m build | |
env: | |
AIOHTTP_NO_EXTENSIONS: 1 | |
- name: Run twine checker | |
run: | | |
twine check --strict dist/* | |
- name: Making sure that CONTRIBUTORS.txt remains sorted | |
run: | | |
LC_ALL=C sort --check --ignore-case CONTRIBUTORS.txt | |
gen_llhttp: | |
name: Generate llhttp sources | |
runs-on: ubuntu-latest | |
timeout-minutes: 5 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Cache llhttp generated files | |
uses: actions/cache@v4.0.2 | |
id: cache | |
with: | |
key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} | |
path: vendor/llhttp/build | |
- name: Setup NodeJS | |
if: steps.cache.outputs.cache-hit != 'true' | |
uses: actions/setup-node@v4 | |
with: | |
node-version: 18 | |
- name: Generate llhttp sources | |
if: steps.cache.outputs.cache-hit != 'true' | |
run: | | |
make generate-llhttp | |
- name: Upload llhttp generated files | |
uses: actions/upload-artifact@v3 | |
with: | |
name: llhttp | |
path: vendor/llhttp/build | |
if-no-files-found: error | |
test: | |
name: Test | |
needs: gen_llhttp | |
strategy: | |
matrix: | |
pyver: [3.9, '3.10', '3.11', '3.12'] | |
no-extensions: ['', 'Y'] | |
os: [ubuntu, macos, windows] | |
experimental: [false] | |
exclude: | |
- os: macos | |
no-extensions: 'Y' | |
- os: windows | |
no-extensions: 'Y' | |
include: | |
- pyver: pypy-3.9 | |
no-extensions: 'Y' | |
os: ubuntu | |
experimental: false | |
- os: ubuntu | |
pyver: "3.13" | |
experimental: true | |
no-extensions: 'Y' | |
fail-fast: true | |
runs-on: ${{ matrix.os }}-latest | |
continue-on-error: ${{ matrix.experimental }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Setup Python ${{ matrix.pyver }} | |
id: python-install | |
uses: actions/setup-python@v5 | |
with: | |
allow-prereleases: true | |
python-version: ${{ matrix.pyver }} | |
- name: Get pip cache dir | |
id: pip-cache | |
run: | | |
echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" | |
shell: bash | |
- name: Cache PyPI | |
uses: actions/cache@v4.0.2 | |
with: | |
key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} | |
path: ${{ steps.pip-cache.outputs.dir }} | |
restore-keys: | | |
pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}- | |
- name: Update pip, wheel, setuptools, build, twine | |
run: | | |
python -m pip install -U pip wheel setuptools build twine | |
- name: Install dependencies | |
run: | | |
python -m pip install -r requirements/test.in -c requirements/test.txt | |
- name: Restore llhttp generated files | |
if: ${{ matrix.no-extensions == '' }} | |
uses: actions/download-artifact@v3 | |
with: | |
name: llhttp | |
path: vendor/llhttp/build/ | |
- name: Cythonize | |
if: ${{ matrix.no-extensions == '' }} | |
run: | | |
make cythonize | |
- name: Install self | |
env: | |
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} | |
run: python -m pip install -e . | |
- name: Run unittests | |
env: | |
COLOR: yes | |
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} | |
PIP_USER: 1 | |
run: >- | |
PATH="${HOME}/Library/Python/3.11/bin:${HOME}/.local/bin:${PATH}" | |
pytest --junitxml=junit.xml | |
shell: bash | |
- name: Re-run the failing tests with maximum verbosity | |
if: failure() | |
env: | |
COLOR: yes | |
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} | |
run: >- # `exit 1` makes sure that the job remains red with flaky runs | |
pytest --no-cov -vvvvv --lf && exit 1 | |
shell: bash | |
- name: Run dev_mode tests | |
env: | |
COLOR: yes | |
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} | |
PIP_USER: 1 | |
run: python -X dev -m pytest -m dev_mode --cov-append | |
shell: bash | |
- name: Turn coverage into xml | |
env: | |
COLOR: 'yes' | |
PIP_USER: 1 | |
run: | | |
python -m coverage xml | |
- name: Upload coverage | |
uses: codecov/codecov-action@v4 | |
with: | |
file: ./coverage.xml | |
flags: >- | |
CI-GHA,OS-${{ | |
runner.os | |
}},VM-${{ | |
matrix.os | |
}},Py-${{ | |
steps.python-install.outputs.python-version | |
}} | |
token: ${{ secrets.CODECOV_TOKEN }} | |
- name: Upload test results to Codecov | |
if: ${{ !cancelled() }} | |
uses: codecov/test-results-action@v1 | |
with: | |
token: ${{ secrets.CODECOV_TOKEN }} | |
check: # This job does nothing and is only used for the branch protection | |
if: always() | |
needs: | |
- lint | |
- test | |
runs-on: ubuntu-latest | |
steps: | |
- name: Decide whether the needed jobs succeeded or failed | |
uses: re-actors/alls-green@release/v1 | |
with: | |
jobs: ${{ toJSON(needs) }} | |
pre-deploy: | |
name: Pre-Deploy | |
runs-on: ubuntu-latest | |
needs: check | |
# Run only on pushing a tag | |
if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') | |
steps: | |
- name: Dummy | |
run: | | |
echo "Predeploy step" | |
build-tarball: | |
name: Tarball | |
runs-on: ubuntu-latest | |
needs: pre-deploy | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Setup Python | |
uses: actions/setup-python@v5 | |
- name: Update pip, wheel, setuptools, build, twine | |
run: | | |
python -m pip install -U pip wheel setuptools build twine | |
- name: Install cython | |
run: >- | |
python -m | |
pip install -r requirements/cython.in -c requirements/cython.txt | |
- name: Restore llhttp generated files | |
uses: actions/download-artifact@v3 | |
with: | |
name: llhttp | |
path: vendor/llhttp/build/ | |
- name: Cythonize | |
run: | | |
make cythonize | |
- name: Make sdist | |
run: | | |
python -m build --sdist | |
- name: Upload artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: dist | |
path: dist | |
build-wheels: | |
name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }} | |
runs-on: ${{ matrix.os }}-latest | |
needs: pre-deploy | |
strategy: | |
matrix: | |
os: [ubuntu, windows, macos] | |
qemu: [''] | |
include: | |
# Split ubuntu job for the sake of speed-up | |
- os: ubuntu | |
qemu: aarch64 | |
- os: ubuntu | |
qemu: ppc64le | |
- os: ubuntu | |
qemu: s390x | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Set up QEMU | |
if: ${{ matrix.qemu }} | |
uses: docker/setup-qemu-action@v3 | |
with: | |
platforms: all | |
id: qemu | |
- name: Prepare emulation | |
run: | | |
if [[ -n "${{ matrix.qemu }}" ]]; then | |
# Build emulated architectures only if QEMU is set, | |
# use default "auto" otherwise | |
echo "CIBW_ARCHS_LINUX=${{ matrix.qemu }}" >> $GITHUB_ENV | |
fi | |
shell: bash | |
- name: Setup Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: 3.x | |
- name: Update pip, wheel, setuptools, build, twine | |
run: | | |
python -m pip install -U pip wheel setuptools build twine | |
- name: Install cython | |
run: >- | |
python -m | |
pip install -r requirements/cython.in -c requirements/cython.txt | |
- name: Restore llhttp generated files | |
uses: actions/download-artifact@v3 | |
with: | |
name: llhttp | |
path: vendor/llhttp/build/ | |
- name: Cythonize | |
run: | | |
make cythonize | |
- name: Build wheels | |
uses: pypa/cibuildwheel@v2.21.1 | |
env: | |
CIBW_ARCHS_MACOS: x86_64 arm64 universal2 | |
- uses: actions/upload-artifact@v3 | |
with: | |
name: dist | |
path: ./wheelhouse/*.whl | |
deploy: | |
name: Deploy | |
needs: [build-tarball, build-wheels] | |
runs-on: ubuntu-latest | |
permissions: | |
contents: write # IMPORTANT: mandatory for making GitHub Releases | |
id-token: write # IMPORTANT: mandatory for trusted publishing & sigstore | |
environment: | |
name: pypi | |
url: https://pypi.org/p/aiohttp | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Login | |
run: | | |
echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token | |
- name: Download distributions | |
uses: actions/download-artifact@v3 | |
with: | |
name: dist | |
path: dist | |
- name: Collected dists | |
run: | | |
tree dist | |
- name: Make Release | |
uses: aio-libs/create-release@v1.6.6 | |
with: | |
changes_file: CHANGES.rst | |
name: aiohttp | |
version_file: aiohttp/__init__.py | |
github_token: ${{ secrets.GITHUB_TOKEN }} | |
dist_dir: dist | |
fix_issue_regex: >- | |
:issue:`(\d+)` | |
fix_issue_repl: >- | |
#\1 | |
- name: >- | |
Publish 🐍📦 to PyPI | |
uses: pypa/gh-action-pypi-publish@release/v1 | |
- name: Sign the dists with Sigstore | |
uses: sigstore/gh-action-sigstore-python@v3.0.0 | |
with: | |
inputs: >- | |
./dist/*.tar.gz | |
./dist/*.whl | |
- name: Upload artifact signatures to GitHub Release | |
# Confusingly, this action also supports updating releases, not | |
# just creating them. This is what we want here, since we've manually | |
# created the release above. | |
uses: softprops/action-gh-release@v2 | |
with: | |
# dist/ contains the built packages, which smoketest-artifacts/ | |
# contains the signatures and certificates. | |
files: dist/** |