diff --git a/.codecov.yml b/.codecov.yml index aa8bed59986..f1d271533be 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -1,5 +1,7 @@ codecov: branch: master + notify: + after_n_builds: 10 coverage: range: "95..100" diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 0b1840ca094..00000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -name: 🐛 Bug report -about: Create a report to help us improve -labels: bug -assignees: aio-libs/triagers - ---- - -🐞 **Describe the bug** - - - -💡 **To Reproduce** - - - -💡 **Expected behavior** - - - -📋 **Logs/tracebacks** - -```python-traceback (paste your traceback in the next line) - -``` - -📋 **Your version of the Python** - -```console -$ python --version -... -``` - -📋 **Your version of the aiohttp/yarl/multidict distributions** - -```console -$ python -m pip show aiohttp -... -``` -```console -$ python -m pip show multidict -... -``` -```console -$ python -m pip show yarl -... -``` - -📋 **Additional context** - - - diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 00000000000..9a9b233cb97 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,138 @@ +--- +name: Bug Report +description: Create a report to help us improve. +labels: [bug] +assignees: aio-libs/triagers +body: +- type: markdown + attributes: + value: | + **Thanks for taking a minute to file a bug report!** + + ⚠ + Verify first that your issue is not [already reported on + GitHub][issue search]. + + _Please fill out the form below with as many precise + details as possible._ + + [issue search]: ../search?q=is%3Aissue&type=issues + +- type: textarea + attributes: + label: Describe the bug + description: >- + A clear and concise description of what the bug is. + validations: + required: true + +- type: textarea + attributes: + label: To Reproduce + description: >- + Describe the steps to reproduce this bug. + placeholder: | + 1. Implement the following server or a client '...' + 2. Then run '...' + 3. An error occurs. + validations: + required: true + +- type: textarea + attributes: + label: Expected behavior + description: >- + A clear and concise description of what you expected to happen. + validations: + required: true + +- type: textarea + attributes: + label: Logs/tracebacks + description: | + If applicable, add logs/tracebacks to help explain your problem. + Paste the output of the steps above, including the commands + themselves and their output/traceback etc. + render: python-traceback + validations: + required: true + +- type: textarea + attributes: + label: Python Version + description: Attach your version of Python. + render: console + value: | + $ python --version + validations: + required: true +- type: textarea + attributes: + label: aiohttp Version + description: Attach your version of aiohttp. + render: console + value: | + $ python -m pip show aiohttp + validations: + required: true +- type: textarea + attributes: + label: multidict Version + description: Attach your version of multidict. + render: console + value: | + $ python -m pip show multidict + validations: + required: true +- type: textarea + attributes: + label: yarl Version + description: Attach your version of yarl. + render: console + value: | + $ python -m pip show yarl + validations: + required: true + +- type: textarea + attributes: + label: OS + placeholder: >- + For example, Arch Linux, Windows, macOS, etc. + validations: + required: true + +- type: dropdown + attributes: + label: Related component + description: >- + aiohttp is both server framework and client library. + For getting rid of confusing make sure to select + 'server', 'client' or both. + multiple: true + options: + - Server + - Client + validations: + required: true + +- type: textarea + attributes: + label: Additional context + description: | + Add any other context about the problem here. + + Describe the environment you have that lead to your issue. + This includes proxy server and other bits that are related to your case. + +- type: checkboxes + attributes: + label: Code of Conduct + description: | + Read the [aio-libs Code of Conduct][CoC] first. + + [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md + options: + - label: I agree to follow the aio-libs Code of Conduct + required: true +... diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 076abc3b3a9..00000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -name: 🚀 Feature request -about: Suggest an idea for this project -labels: enhancement -assignees: aio-libs/triagers - ---- - -🐣 **Is your feature request related to a problem? Please describe.** - - - -💡 **Describe the solution you'd like** - - - -❓ **Describe alternatives you've considered** - - - -📋 **Additional context** - diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 00000000000..93167de4efd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,75 @@ +--- +name: 🚀 Feature request +description: Suggest an idea for this project. +labels: enhancement +body: +- type: markdown + attributes: + value: | + **Thanks for taking a minute to file a feature for aiohttp!** + + ⚠ + Verify first that your feature request is not [already reported on + GitHub][issue search]. + + _Please fill out the form below with as many precise + details as possible._ + + [issue search]: ../search?q=is%3Aissue&type=issues + +- type: textarea + attributes: + label: Is your feature request related to a problem? + description: >- + Please add a clear and concise description of what + the problem is. _Ex. I'm always frustrated when [...]_ + +- type: textarea + attributes: + label: Describe the solution you'd like + description: >- + A clear and concise description of what you want to happen. + validations: + required: true + +- type: textarea + attributes: + label: Describe alternatives you've considered + description: >- + A clear and concise description of any alternative solutions + or features you've considered. + validations: + required: true + +- type: dropdown + attributes: + label: Related component + description: >- + aiohttp is both server framework and client library. + For getting rid of confusing make sure to select + 'server', 'client' or both. + multiple: true + options: + - Server + - Client + validations: + required: true + +- type: textarea + attributes: + label: Additional context + description: >- + Add any other context or screenshots about + the feature request here. + +- type: checkboxes + attributes: + label: Code of Conduct + description: | + Read the [aio-libs Code of Conduct][CoC] first. + + [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md + options: + - label: I agree to follow the aio-libs Code of Conduct + required: true +... diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 92f960a6e2b..908f93d629f 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -6,7 +6,6 @@ updates: directory: "/" labels: - dependencies - - autosquash schedule: interval: "daily" @@ -15,7 +14,6 @@ updates: directory: "/" labels: - dependencies - - autosquash schedule: interval: "daily" open-pull-requests-limit: 10 @@ -25,17 +23,36 @@ updates: directory: "/" labels: - dependencies - - autosquash target-branch: "3.8" schedule: interval: "daily" + open-pull-requests-limit: 10 + + # Maintain dependencies for Python aiohttp 3.8 + - package-ecosystem: "pip" + directory: "/" + labels: + - dependencies + target-branch: "3.8" + schedule: + interval: "daily" + open-pull-requests-limit: 10 + + # Maintain dependencies for GitHub Actions aiohttp 3.8 + - package-ecosystem: "github-actions" + directory: "/" + labels: + - dependencies + target-branch: "3.8" + schedule: + interval: "daily" + open-pull-requests-limit: 10 # Maintain dependencies for Python aiohttp 3.8 - package-ecosystem: "pip" directory: "/" labels: - dependencies - - autosquash target-branch: "3.8" schedule: interval: "daily" diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml new file mode 100644 index 00000000000..5a9408a182c --- /dev/null +++ b/.github/workflows/auto-merge.yml @@ -0,0 +1,22 @@ +name: Dependabot auto-merge +on: pull_request_target + +permissions: + pull-requests: write + contents: write + +jobs: + dependabot: + runs-on: ubuntu-latest + if: ${{ github.actor == 'dependabot[bot]' }} + steps: + - name: Dependabot metadata + id: metadata + uses: dependabot/fetch-metadata@v1.1.1 + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" + - name: Enable auto-merge for Dependabot PRs + run: gh pr merge --auto --squash "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.github/workflows/autosquash.yml b/.github/workflows/autosquash.yml deleted file mode 100644 index 63d6868daf6..00000000000 --- a/.github/workflows/autosquash.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Autosquash -on: - check_run: - types: - # Check runs completing successfully can unblock the - # corresponding pull requests and make them mergeable. - - completed - pull_request: - types: - # A closed pull request makes the checks on the other - # pull request on the same base outdated. - - closed - # Adding the autosquash label to a pull request can - # trigger an update or a merge. - - labeled - pull_request_review: - types: - # Review approvals can unblock the pull request and - # make it mergeable. - - submitted - # Success statuses can unblock the corresponding - # pull requests and make them mergeable. - status: {} - -jobs: - autosquash: - name: Autosquash - runs-on: ubuntu-latest - # not awailable for forks, skip the workflow - if: ${{ github.event.pull_request.head.repo.full_name == 'aio-libs/aiohttp' }} - steps: - - id: generate_token - uses: tibdex/github-app-token@v1 - with: - app_id: ${{ secrets.BOT_APP_ID }} - private_key: ${{ secrets.BOT_PRIVATE_KEY }} - - uses: tibdex/autosquash@v2 - with: - github_token: ${{ steps.generate_token.outputs.token }} diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 8047b81d343..e5bea15f7c9 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,12 +21,12 @@ jobs: # if: ${{ github.event.pull_request.head.repo.full_name == 'aio-libs/aiohttp' }} steps: - id: generate_token - uses: tibdex/github-app-token@v1 + uses: tibdex/github-app-token@v1.4 with: app_id: ${{ secrets.BOT_APP_ID }} private_key: ${{ secrets.BOT_PRIVATE_KEY }} - name: Backport - uses: sqren/backport-github-action@v1 + uses: sqren/backport-github-action@v1.0.40 with: # Required # Token to authenticate requests @@ -46,7 +46,6 @@ jobs: # Optional # Comma separated list of labels that will be added to the backport PR. # target_pr_labels: 'backport' - target_pr_labels: 'autosquash' # Optional # If no labels match the `backport_label_pattern` the backport check will fail. To bypass this for a single PR you can add a label to indicate the PR should not be backported diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index be50d905be7..427950bcb6d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: timeout-minutes: 5 steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v2.3.4 with: submodules: true - name: Setup Python 3.8 @@ -30,7 +30,7 @@ jobs: with: python-version: 3.8 - name: Cache PyPI - uses: actions/cache@v2 + uses: actions/cache@v2.1.6 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -41,7 +41,7 @@ jobs: with: path: requirements/lint.txt - name: Pre-Commit hooks - uses: pre-commit/action@v2.0.0 + uses: pre-commit/action@v2.0.3 - name: Install itself run: | python setup.py install @@ -98,7 +98,7 @@ jobs: timeout-minutes: 15 steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v2.3.4 with: submodules: true - name: Setup Python ${{ matrix.pyver }} @@ -110,12 +110,12 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" # - name: Cache - name: Cache PyPI - uses: actions/cache@v2 + uses: actions/cache@v2.1.6 with: - key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-{{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} + key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} restore-keys: | - pip-ci-${{ runner.os }}-${{ matrix.pyver }}-{{ matrix.no-extensions }}- + pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}- - name: Cythonize if: ${{ matrix.no-extensions == '' }} run: | @@ -128,7 +128,7 @@ jobs: make vvtest python -m coverage xml - name: Upload coverage - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v2.1.0 with: file: ./coverage.xml flags: unit @@ -151,7 +151,7 @@ jobs: needs: pre-deploy steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v2.3.4 with: submodules: true - name: Setup Python 3.8 @@ -184,7 +184,7 @@ jobs: needs: pre-deploy steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v2.3.4 with: submodules: true - name: Set up QEMU @@ -233,7 +233,7 @@ jobs: needs: pre-deploy steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v2.3.4 with: submodules: true - name: Setup Python 3.8 diff --git a/.github/workflows/update-pre-commit.yml b/.github/workflows/update-pre-commit.yml index 82b625bab2e..56a964401d8 100644 --- a/.github/workflows/update-pre-commit.yml +++ b/.github/workflows/update-pre-commit.yml @@ -7,7 +7,7 @@ jobs: if: github.repository_owner == 'aiohttp' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v2.3.4 - name: Set up Python uses: actions/setup-python@v2 with: @@ -19,12 +19,12 @@ jobs: - name: Run pre-commit autoupdate run: pre-commit autoupdate - id: generate_token - uses: tibdex/github-app-token@v1 + uses: tibdex/github-app-token@v1.4 with: app_id: ${{ secrets.BOT_APP_ID }} private_key: ${{ secrets.BOT_PRIVATE_KEY }} - name: Create Pull Request - uses: peter-evans/create-pull-request@v3.5.2 + uses: peter-evans/create-pull-request@v3.10.1 with: token: ${{ steps.generate_token.outputs.token }} branch: update/pre-commit-autoupdate diff --git a/.mypy.ini b/.mypy.ini new file mode 100644 index 00000000000..5b5796e4a51 --- /dev/null +++ b/.mypy.ini @@ -0,0 +1,43 @@ +[mypy] +files = aiohttp, examples, tests +check_untyped_defs = True +exclude = examples/legacy/ +follow_imports_for_stubs = True +#disallow_any_decorated = True +disallow_any_generics = True +disallow_incomplete_defs = True +disallow_subclassing_any = True +disallow_untyped_calls = True +disallow_untyped_decorators = True +disallow_untyped_defs = True +implicit_reexport = False +no_implicit_optional = True +show_error_codes = True +strict_equality = True +warn_incomplete_stub = True +warn_redundant_casts = True +#warn_unreachable = True +warn_unused_ignores = True +disallow_any_unimported = True +warn_return_any = True + +[mypy-aiodns] +ignore_missing_imports = True + +[mypy-asynctest] +ignore_missing_imports = True + +[mypy-brotli] +ignore_missing_imports = True + +[mypy-cchardet] +ignore_missing_imports = True + +[mypy-gunicorn.*] +ignore_missing_imports = True + +[mypy-tokio] +ignore_missing_imports = True + +[mypy-uvloop] +ignore_missing_imports = True diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2a6f3aa4d82..ba83762c34f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,6 +30,8 @@ repos: exclude: >- ^docs/[^/]*\.svg$ - id: requirements-txt-fixer + exclude: >- + ^requirements/(cython|dev|doc-spelling|lint).txt$ - id: trailing-whitespace - id: file-contents-sorter files: | diff --git a/CHANGES.rst b/CHANGES.rst index 9d7a1914deb..f064f4895ce 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -14,6 +14,48 @@ Changelog .. towncrier release notes start +3.7.4.post0 (2021-03-06) +======================== + +Misc +---- + +- Bumped upper bound of the ``chardet`` runtime dependency + to allow their v4.0 version stream. + `#5366 `_ + + +---- + + +3.7.4 (2021-02-25) +================== + +Bugfixes +-------- + +- **(SECURITY BUG)** Started preventing open redirects in the + ``aiohttp.web.normalize_path_middleware`` middleware. For + more details, see + https://github.com/aio-libs/aiohttp/security/advisories/GHSA-v6wp-4m6f-gcjg. + + Thanks to `Beast Glatisant `__ for + finding the first instance of this issue and `Jelmer Vernooij + `__ for reporting and tracking it down + in aiohttp. + `#5497 `_ +- Fix interpretation difference of the pure-Python and the Cython-based + HTTP parsers construct a ``yarl.URL`` object for HTTP request-target. + + Before this fix, the Python parser would turn the URI's absolute-path + for ``//some-path`` into ``/`` while the Cython code preserved it as + ``//some-path``. Now, both do the latter. + `#5498 `_ + + +---- + + 3.7.3 (2020-11-18) ================== diff --git a/CHANGES/3450.bugfix b/CHANGES/3450.bugfix new file mode 100644 index 00000000000..6b82b4c0481 --- /dev/null +++ b/CHANGES/3450.bugfix @@ -0,0 +1 @@ +Started using `MultiLoopChildWatcher` when it's available under POSIX while setting up the test I/O loop. diff --git a/CHANGES/3927.misc b/CHANGES/3927.misc new file mode 100644 index 00000000000..5c67ea64744 --- /dev/null +++ b/CHANGES/3927.misc @@ -0,0 +1 @@ +Use ``mypy --strict`` diff --git a/CHANGES/4247.misc b/CHANGES/4247.misc new file mode 100644 index 00000000000..44b05c3c9c1 --- /dev/null +++ b/CHANGES/4247.misc @@ -0,0 +1 @@ +Updated the Autobahn test suite to v0.8.2. diff --git a/CHANGES/4431.bugfix b/CHANGES/4431.bugfix new file mode 100644 index 00000000000..bb325354c5e --- /dev/null +++ b/CHANGES/4431.bugfix @@ -0,0 +1 @@ +Fixed HTTP client requests to honor ``no_proxy`` environment variables. diff --git a/CHANGES/4594.feature b/CHANGES/4594.feature new file mode 100644 index 00000000000..f00e14a5e93 --- /dev/null +++ b/CHANGES/4594.feature @@ -0,0 +1 @@ +FileResponse now supports ETag. diff --git a/CHANGES/4686.feature b/CHANGES/4686.feature new file mode 100644 index 00000000000..1b74265fb94 --- /dev/null +++ b/CHANGES/4686.feature @@ -0,0 +1 @@ +Add a request handler type alias ``aiohttp.typedefs.Handler``. diff --git a/CHANGES/4818.feature b/CHANGES/4818.feature new file mode 100644 index 00000000000..158e4ebae84 --- /dev/null +++ b/CHANGES/4818.feature @@ -0,0 +1 @@ +Add validation of HTTP header keys and values to prevent header injection. diff --git a/CHANGES/5275.feature b/CHANGES/5275.feature new file mode 100644 index 00000000000..e44960a322b --- /dev/null +++ b/CHANGES/5275.feature @@ -0,0 +1 @@ +Add final declarations for constants. diff --git a/CHANGES/5326.doc b/CHANGES/5326.doc new file mode 100644 index 00000000000..74aff4c4225 --- /dev/null +++ b/CHANGES/5326.doc @@ -0,0 +1 @@ +Refactor OpenAPI/Swagger aiohttp addons, added aio-openapi diff --git a/CHANGES/5329.bugfix b/CHANGES/5329.bugfix new file mode 100644 index 00000000000..a86edc1ced1 --- /dev/null +++ b/CHANGES/5329.bugfix @@ -0,0 +1 @@ +Add missing slots to ```_RequestContextManager`` and ``_WSRequestContextManager`` diff --git a/CHANGES/5389.misc b/CHANGES/5389.misc new file mode 100644 index 00000000000..b127d576370 --- /dev/null +++ b/CHANGES/5389.misc @@ -0,0 +1 @@ +Use pip-tools to pin dependencies diff --git a/CHANGES/5403.bugfix b/CHANGES/5403.bugfix new file mode 100644 index 00000000000..40cc5a22294 --- /dev/null +++ b/CHANGES/5403.bugfix @@ -0,0 +1 @@ +Stop automatically releasing the ``ClientResponse`` object on calls to the ``ok`` property for the failed requests. diff --git a/CHANGES/5457.misc b/CHANGES/5457.misc new file mode 100644 index 00000000000..5f0fad6bce6 --- /dev/null +++ b/CHANGES/5457.misc @@ -0,0 +1 @@ +Improve Mypy coverage. diff --git a/CHANGES/5486.misc b/CHANGES/5486.misc new file mode 100644 index 00000000000..bc8fa84a4e3 --- /dev/null +++ b/CHANGES/5486.misc @@ -0,0 +1 @@ +Complete pip-tools setup. diff --git a/CHANGES/5494.bugfix b/CHANGES/5494.bugfix new file mode 100644 index 00000000000..449b6bdf3d6 --- /dev/null +++ b/CHANGES/5494.bugfix @@ -0,0 +1,4 @@ +Fixed the multipart POST requests processing to always release file +descriptors for the ``tempfile.Temporaryfile``-created +``_io.BufferedRandom`` instances of files sent within multipart request +bodies via HTTP POST requests. diff --git a/CHANGES/5494.misc b/CHANGES/5494.misc new file mode 100644 index 00000000000..3d83a77a033 --- /dev/null +++ b/CHANGES/5494.misc @@ -0,0 +1,3 @@ +Made sure to always close most of file descriptors and release other +resouces in tests. Started ignoring ``ResourceWarning``s in pytest for +warnings that are hard to track. diff --git a/CHANGES/5516.misc b/CHANGES/5516.misc new file mode 100644 index 00000000000..966c85cc9b4 --- /dev/null +++ b/CHANGES/5516.misc @@ -0,0 +1 @@ +Removed @unittest_run_loop. This is now the default behaviour. diff --git a/CHANGES/5533.misc b/CHANGES/5533.misc new file mode 100644 index 00000000000..2ccfbe81cd1 --- /dev/null +++ b/CHANGES/5533.misc @@ -0,0 +1 @@ +Add regression test for 0 timeouts. diff --git a/CHANGES/5572.feature b/CHANGES/5572.feature new file mode 100644 index 00000000000..a5d60fb6ee3 --- /dev/null +++ b/CHANGES/5572.feature @@ -0,0 +1,2 @@ +Always create a new event loop in ``aiohttp.web.run_app()``. +This adds better compatibility with ``asyncio.run()`` or if trying to run multiple apps in sequence. diff --git a/CHANGES/5585.feature b/CHANGES/5585.feature new file mode 100644 index 00000000000..06ddbe453d4 --- /dev/null +++ b/CHANGES/5585.feature @@ -0,0 +1 @@ +Add ``aiohttp.pytest_plugin.AiohttpClient`` for static typing of pytest plugin. diff --git a/CHANGES/5606.bugfix b/CHANGES/5606.bugfix new file mode 100644 index 00000000000..b61de2fa83f --- /dev/null +++ b/CHANGES/5606.bugfix @@ -0,0 +1 @@ +Replace deprecated app handler design in ``tests/autobahn/server.py`` with call to ``web.run_app``; replace deprecated ``aiohttp.ws_connect`` calls in ``tests/autobahn/client.py`` with ``aiohttp.ClienSession.ws_connect``. diff --git a/CHANGES/5635.misc b/CHANGES/5635.misc new file mode 100644 index 00000000000..c4ec37f8d26 --- /dev/null +++ b/CHANGES/5635.misc @@ -0,0 +1 @@ +Added regression tests for dispatching urlencoded routes. diff --git a/CHANGES/5648.misc b/CHANGES/5648.misc new file mode 100644 index 00000000000..17a71c3fda5 --- /dev/null +++ b/CHANGES/5648.misc @@ -0,0 +1 @@ +Fix pip upgrade on windows machines diff --git a/CHANGES/5725.doc b/CHANGES/5725.doc new file mode 100644 index 00000000000..1b50453f0b7 --- /dev/null +++ b/CHANGES/5725.doc @@ -0,0 +1,2 @@ +Fixed docs on request cookies type, so it matches what is actually used in the code (a +read-only dictionary-like object). diff --git a/CHANGES/5727.bugfix b/CHANGES/5727.bugfix new file mode 100644 index 00000000000..7f2e3a2a9d8 --- /dev/null +++ b/CHANGES/5727.bugfix @@ -0,0 +1 @@ + Remove incorrect default from docs diff --git a/CHANGES/5783.feature b/CHANGES/5783.feature new file mode 100644 index 00000000000..4be16c23343 --- /dev/null +++ b/CHANGES/5783.feature @@ -0,0 +1 @@ +Started keeping the ``Authorization`` header during http->https redirects when the host remains the same. diff --git a/CHANGES/5806.misc b/CHANGES/5806.misc new file mode 100644 index 00000000000..e2f30f91b5f --- /dev/null +++ b/CHANGES/5806.misc @@ -0,0 +1 @@ +Remove last remnants of attrs library. diff --git a/CHANGES/5829.misc b/CHANGES/5829.misc new file mode 100644 index 00000000000..5a892ed54e0 --- /dev/null +++ b/CHANGES/5829.misc @@ -0,0 +1 @@ +Disallow untyped defs on internal tests. diff --git a/CHANGES/5836.doc b/CHANGES/5836.doc new file mode 100644 index 00000000000..ef8909cefb8 --- /dev/null +++ b/CHANGES/5836.doc @@ -0,0 +1 @@ +Fix the `ClientResponse.release`'s type in the doc. Change from `comethod` to `method`. diff --git a/CHANGES/5870.misc b/CHANGES/5870.misc new file mode 100644 index 00000000000..e2cdd194380 --- /dev/null +++ b/CHANGES/5870.misc @@ -0,0 +1 @@ +Simplify generator expression. diff --git a/CHANGES/5877.bugfix b/CHANGES/5877.bugfix new file mode 100644 index 00000000000..5a8108a9a45 --- /dev/null +++ b/CHANGES/5877.bugfix @@ -0,0 +1 @@ +Uses :py:class:`~asyncio.ThreadedChildWatcher` under POSIX to allow setting up test loop in non-main thread. diff --git a/CHANGES/5890.misc b/CHANGES/5890.misc new file mode 100644 index 00000000000..489cfc336a7 --- /dev/null +++ b/CHANGES/5890.misc @@ -0,0 +1 @@ +Update to Mypy 0.910. diff --git a/CHANGES/5905.bugfix b/CHANGES/5905.bugfix new file mode 100644 index 00000000000..b667968fe19 --- /dev/null +++ b/CHANGES/5905.bugfix @@ -0,0 +1 @@ +remove deprecated loop argument for asnycio.sleep/gather calls diff --git a/CHANGES/5932.misc b/CHANGES/5932.misc new file mode 100644 index 00000000000..c9d96ad9361 --- /dev/null +++ b/CHANGES/5932.misc @@ -0,0 +1 @@ +Enable auto-merging of Dependabot PRs. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 84077a247e7..8669ccd3b2c 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -3,6 +3,7 @@ A. Jesse Jiryu Davis Adam Bannister Adam Cooper +Adam Horacek Adam Mills Adrian Krupa Adrián Chaves @@ -27,6 +28,7 @@ Alexandru Mihai Alexey Firsov Alexey Popravka Alexey Stepanov +Almaz Salakhov Amin Etesamian Amit Tulshyan Amy Boyle @@ -39,12 +41,14 @@ Andrew Lytvyn Andrew Svetlov Andrew Zhou Andrii Soldatenko +Anes Abismail Antoine Pietri Anton Kasyanov Anton Zhdan-Pushkin Arseny Timoniq Artem Yushkovskiy Arthur Darcet +Austin Scola Ben Bader Ben Timby Benedikt Reinartz @@ -112,6 +116,7 @@ Felix Yan Fernanda Guimarães FichteFoll Florian Scheffler +Franek Magiera Frederik Gladhorn Frederik Peter Aalund Gabriel Tremblay @@ -232,11 +237,13 @@ Paulius Šileikis Paulus Schoutsen Pavel Kamaev Pavel Polyakov +Pavel Sapezhko Pawel Kowalski Pawel Miech Pepe Osca Philipp A. Pieter van Beek +Qiao Han Rafael Viotti Raphael Bialon Raúl Cumplido @@ -264,6 +271,7 @@ Stanislav Prokop Stefan Tjarks Stepan Pletnev Stephan Jaensch +Stephen Cirelli Stephen Granade Steven Seguin Sunghyun Hwang @@ -307,6 +315,7 @@ Vladyslav Bondar W. Trevor King Wei Lin Weiwei Wang +Will Fatherley Will McGugan Willem de Groot William Grzybowski @@ -320,6 +329,7 @@ Yegor Roganov Yifei Kong Young-Ho Cha Yuriy Shatrov +Yury Pliner Yury Selivanov Yusuke Tsutsumi Yuval Ofir diff --git a/LICENSE.txt b/LICENSE.txt index 90c9d01bc5a..054102f2db3 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,192 +1,4 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2013-2020 aiohttp maintainers + Copyright 2013-2020 aio-libs collaboration. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index 9a1e8311495..0e7189760bb 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,9 @@ CS := $(wildcard aiohttp/*.c) PYS := $(wildcard aiohttp/*.py) REQS := $(wildcard requirements/*.txt) ALLS := $(sort $(CYS) $(CS) $(PYS) $(REQS)) +IN := doc-spelling lint cython dev +REQIN := $(foreach fname,$(IN),requirements/$(fname).in) + .PHONY: all all: test @@ -45,9 +48,11 @@ endif # Enumerate intermediate files to don't remove them automatically. .SECONDARY: $(call to-hash,$(ALLS)) +.update-pip: + @python -m pip install --upgrade pip -.install-cython: $(call to-hash,requirements/cython.txt) - pip install -r requirements/cython.txt +.install-cython: .update-pip $(call to-hash,requirements/cython.txt) + @pip install -r requirements/cython.txt @touch .install-cython aiohttp/_find_header.c: $(call to-hash,aiohttp/hdrs.py ./tools/gen.py) @@ -62,7 +67,7 @@ aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c cythonize: .install-cython $(PYXS:.pyx=.c) .install-deps: .install-cython $(PYXS:.pyx=.c) $(call to-hash,$(CYS) $(REQS)) - pip install -r requirements/dev.txt + @pip install -r requirements/dev.txt @touch .install-deps .PHONY: lint @@ -74,7 +79,7 @@ fmt format: .PHONY: mypy mypy: - mypy aiohttp tests + mypy .develop: .install-deps $(call to-hash,$(PYS) $(CYS) $(CS)) pip install -e . @@ -92,6 +97,11 @@ vtest: .develop vvtest: .develop @pytest -vv +.PHONY: cov-dev +cov-dev: .develop + @pytest --cov-report=html + @echo "xdg-open file://`pwd`/htmlcov/index.html" + .PHONY: clean clean: @rm -rf `find . -name __pycache__` @@ -128,17 +138,21 @@ clean: .PHONY: doc doc: - @make -C docs html SPHINXOPTS="-W -E" + @make -C docs html SPHINXOPTS="-W --keep-going -n -E" @echo "open file://`pwd`/docs/_build/html/index.html" .PHONY: doc-spelling doc-spelling: - @make -C docs spelling SPHINXOPTS="-W -E" + @make -C docs spelling SPHINXOPTS="-W --keep-going -n -E" + +.PHONY: compile-deps +compile-deps: .update-pip + @pip install pip-tools + @$(foreach fname,$(REQIN),pip-compile --allow-unsafe -q $(fname);) .PHONY: install -install: - @pip install -U 'pip' - @pip install -Ur requirements/dev.txt +install: .update-pip + @pip install -r requirements/dev.txt .PHONY: install-dev install-dev: .develop diff --git a/README.rst b/README.rst index 6019d225a89..6abb34bef56 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ Async http client/server framework ================================== -.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png +.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg :height: 64px :width: 64px :alt: aiohttp logo @@ -164,7 +164,6 @@ Requirements - Python >= 3.7 - async-timeout_ -- attrs_ - chardet_ - multidict_ - yarl_ @@ -174,7 +173,6 @@ recommended for sake of speed). .. _chardet: https://pypi.python.org/pypi/chardet .. _aiodns: https://pypi.python.org/pypi/aiodns -.. _attrs: https://github.com/python-attrs/attrs .. _multidict: https://pypi.python.org/pypi/multidict .. _yarl: https://pypi.python.org/pypi/yarl .. _async-timeout: https://pypi.python.org/pypi/async_timeout diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 113089ae0b9..4dd54a4e7e4 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -38,7 +38,7 @@ ) from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar from .formdata import FormData as FormData -from .helpers import BasicAuth as BasicAuth, ChainMapProxy as ChainMapProxy +from .helpers import BasicAuth, ChainMapProxy, ETag from .http import ( HttpVersion as HttpVersion, HttpVersion10 as HttpVersion10, @@ -145,6 +145,7 @@ # helpers "BasicAuth", "ChainMapProxy", + "ETag", # http "HttpVersion", "HttpVersion10", diff --git a/aiohttp/_http_writer.pyx b/aiohttp/_http_writer.pyx index 84b42fa1c35..eff85219586 100644 --- a/aiohttp/_http_writer.pyx +++ b/aiohttp/_http_writer.pyx @@ -111,6 +111,14 @@ cdef str to_str(object s): return str(s) +cdef void _safe_header(str string) except *: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return character detected in HTTP status message or " + "header. This is a potential security issue." + ) + + def _serialize_headers(str status_line, headers): cdef Writer writer cdef object key @@ -119,6 +127,10 @@ def _serialize_headers(str status_line, headers): _init_writer(&writer) + for key, val in headers.items(): + _safe_header(to_str(key)) + _safe_header(to_str(val)) + try: if _write_str(&writer, status_line) < 0: raise diff --git a/aiohttp/client.py b/aiohttp/client.py index 34ce9c92339..1aab4829ffe 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -9,6 +9,7 @@ import sys import traceback import warnings +from contextlib import suppress from types import SimpleNamespace, TracebackType from typing import ( Any, @@ -30,7 +31,7 @@ ) from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr -from typing_extensions import final +from typing_extensions import Final, final from yarl import URL from . import hdrs, http, payload @@ -80,7 +81,7 @@ BasicAuth, TimeoutHandle, ceil_timeout, - proxies_from_env, + get_env_proxy_for_url, sentinel, strip_auth_from_url, ) @@ -133,7 +134,7 @@ try: from ssl import SSLContext except ImportError: # pragma: no cover - SSLContext = object # type: ignore + SSLContext = object # type: ignore[misc,assignment] @dataclasses.dataclass(frozen=True) @@ -153,12 +154,12 @@ class ClientTimeout: # to create a timeout specific for a single request, either # - create a completely new one to overwrite the default - # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # - or use https://docs.python.org/3/library/dataclasses.html#dataclasses.replace # to overwrite the defaults # 5 Minute default read timeout -DEFAULT_TIMEOUT = ClientTimeout(total=5 * 60) +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) _RetType = TypeVar("_RetType") @@ -248,7 +249,7 @@ def __init__( if timeout is sentinel: self._timeout = DEFAULT_TIMEOUT else: - self._timeout = timeout # type: ignore + self._timeout = timeout # type: ignore[assignment] self._raise_for_status = raise_for_status self._auto_decompress = auto_decompress self._trust_env = trust_env @@ -381,7 +382,7 @@ async def _request( real_timeout = self._timeout # type: ClientTimeout else: if not isinstance(timeout, ClientTimeout): - real_timeout = ClientTimeout(total=timeout) # type: ignore + real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type] else: real_timeout = timeout # timeout is cumulative for all request operations @@ -444,11 +445,8 @@ async def _request( if proxy is not None: proxy = URL(proxy) elif self._trust_env: - for scheme, proxy_info in proxies_from_env().items(): - if scheme == url.scheme: - proxy = proxy_info.proxy - proxy_auth = proxy_info.proxy_auth - break + with suppress(LookupError): + proxy, proxy_auth = get_env_proxy_for_url(url) req = self._request_class( method, @@ -568,7 +566,16 @@ async def _request( elif not scheme: parsed_url = url.join(parsed_url) - if url.origin() != parsed_url.origin(): + is_same_host_https_redirect = ( + url.host == parsed_url.host + and parsed_url.scheme == "https" + and url.scheme == "http" + ) + + if ( + url.origin() != parsed_url.origin() + and not is_same_host_https_redirect + ): auth = None headers.pop(hdrs.AUTHORIZATION, None) @@ -690,7 +697,7 @@ async def _ws_connect( DeprecationWarning, stacklevel=2, ) - ws_timeout = ClientWSTimeout(ws_close=timeout) # type: ignore + ws_timeout = ClientWSTimeout(ws_close=timeout) # type: ignore[arg-type] else: ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT if receive_timeout is not None: @@ -1053,7 +1060,7 @@ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> No def send(self, arg: None) -> "asyncio.Future[Any]": return self._coro.send(arg) - def throw(self, arg: BaseException) -> None: # type: ignore + def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override] self._coro.throw(arg) def close(self) -> None: @@ -1072,6 +1079,8 @@ async def __aenter__(self) -> _RetType: class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): + __slots__ = () + async def __aexit__( self, exc_type: Optional[Type[BaseException]], @@ -1087,6 +1096,8 @@ async def __aexit__( class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): + __slots__ = () + async def __aexit__( self, exc_type: Optional[Type[BaseException]], diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 12fffff64ab..808c1cc614e 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -11,7 +11,7 @@ SSLContext = ssl.SSLContext except ImportError: # pragma: no cover - ssl = SSLContext = None # type: ignore + ssl = SSLContext = None # type: ignore[assignment] if TYPE_CHECKING: # pragma: no cover @@ -270,11 +270,11 @@ class ClientSSLError(ClientConnectorError): ssl_error_bases = (ClientSSLError,) -class ClientConnectorSSLError(*ssl_error_bases): # type: ignore +class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc] """Response ssl error.""" -class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore +class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc] """Response certificate error.""" def __init__( diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 640d1272cd6..c265521ba3b 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -149,7 +149,7 @@ def set_response_params( read_until_eof: bool = False, auto_decompress: bool = True, read_timeout: Optional[float] = None, - read_bufsize: int = 2 ** 16 + read_bufsize: int = 2 ** 16, ) -> None: self._skip_payload = skip_payload diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 97b2ce25f00..c63b73bcdf8 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -64,13 +64,13 @@ import ssl from ssl import SSLContext except ImportError: # pragma: no cover - ssl = None # type: ignore - SSLContext = object # type: ignore + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] try: import cchardet as chardet except ImportError: # pragma: no cover - import chardet # type: ignore + import chardet # type: ignore[no-redef] __all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") @@ -333,9 +333,9 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: if headers: if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() # type: ignore + headers = headers.items() # type: ignore[assignment] - for key, value in headers: # type: ignore + for key, value in headers: # type: ignore[misc] # A special case for Host header if key.lower() == "host": self.headers[key] = value @@ -347,7 +347,7 @@ def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: (hdr, None) for hdr in sorted(skip_auto_headers) ) used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in used_headers: @@ -369,7 +369,7 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: if isinstance(cookies, Mapping): iter_cookies = cookies.items() else: - iter_cookies = cookies # type: ignore + iter_cookies = cookies # type: ignore[assignment] for name, value in iter_cookies: if isinstance(value, Morsel): # Preserve coded_value @@ -377,7 +377,7 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: mrsl_val.set(value.key, value.value, value.coded_value) c[name] = mrsl_val else: - c[name] = value # type: ignore + c[name] = value # type: ignore[assignment] self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() @@ -519,10 +519,10 @@ async def write_bytes( await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): - self.body = (self.body,) # type: ignore + self.body = (self.body,) # type: ignore[assignment] for chunk in self.body: - await writer.write(chunk) # type: ignore + await writer.write(chunk) # type: ignore[arg-type] await writer.write_eof() except OSError as exc: @@ -806,7 +806,7 @@ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": link.add(key, value) - key = link.get("rel", url) # type: ignore + key = link.get("rel", url) # type: ignore[assignment] link.add("url", self.url.join(URL(url))) @@ -824,7 +824,8 @@ async def start(self, connection: "Connection") -> "ClientResponse": while True: # read response try: - message, payload = await self._protocol.read() # type: ignore + protocol = self._protocol + message, payload = await protocol.read() # type: ignore[union-attr] except http.HttpProcessingError as exc: raise ClientResponseError( self.request_info, @@ -923,14 +924,10 @@ def ok(self) -> bool: This is **not** a check for ``200 OK`` but a check that the response status is under 400. """ - try: - self.raise_for_status() - except ClientResponseError: - return False - return True + return 400 > self.status def raise_for_status(self) -> None: - if 400 <= self.status: + if not self.ok: # reason should always be not None for a started response assert self.reason is not None self.release() @@ -1015,7 +1012,7 @@ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> if encoding is None: encoding = self.get_encoding() - return self._body.decode(encoding, errors=errors) # type: ignore + return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] async def json( self, @@ -1043,7 +1040,7 @@ async def json( if encoding is None: encoding = self.get_encoding() - return loads(self._body.decode(encoding)) # type: ignore + return loads(self._body.decode(encoding)) # type: ignore[union-attr] async def __aenter__(self) -> "ClientResponse": return self diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 4bf5a3754ba..17690f2a076 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -2,9 +2,10 @@ import asyncio import dataclasses -from typing import Any, Optional +from typing import Any, Optional, cast import async_timeout +from typing_extensions import Final from .client_exceptions import ClientError from .client_reqrep import ClientResponse @@ -33,7 +34,9 @@ class ClientWSTimeout: ws_close: Optional[float] = None -DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0) +DEFAULT_WS_CLIENT_TIMEOUT: Final[ClientWSTimeout] = ClientWSTimeout( + ws_receive=None, ws_close=10.0 +) class ClientWebSocketResponse: @@ -65,10 +68,10 @@ def __init__( self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat - self._heartbeat_cb = None + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb = None + self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._loop = loop self._waiting = None # type: Optional[asyncio.Future[bool]] self._exception = None # type: Optional[BaseException] @@ -283,13 +286,13 @@ async def receive_str(self, *, timeout: Optional[float] = None) -> str: msg = await self.receive(timeout) if msg.type != WSMsgType.TEXT: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") - return msg.data + return cast(str, msg.data) async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: msg = await self.receive(timeout) if msg.type != WSMsgType.BINARY: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") - return msg.data + return cast(bytes, msg.data) async def receive_json( self, diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 46ecdb89792..58c528de6fb 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -55,8 +55,8 @@ SSLContext = ssl.SSLContext except ImportError: # pragma: no cover - ssl = None # type: ignore - SSLContext = object # type: ignore + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") @@ -218,7 +218,7 @@ def __init__( self._force_close = force_close # {host_key: FIFO list of waiters} - self._waiters = defaultdict(deque) # type: ignore + self._waiters = defaultdict(deque) # type: ignore[var-annotated] self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) @@ -226,10 +226,10 @@ def __init__( self.cookies = SimpleCookie() # type: SimpleCookie[str] # start keep-alive connection cleanup task - self._cleanup_handle = None + self._cleanup_handle: Optional[asyncio.TimerHandle] = None # start cleanup closed transports task - self._cleanup_closed_handle = None + self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None self._cleanup_closed_disabled = not enable_cleanup_closed self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]] self._cleanup_closed() @@ -744,7 +744,7 @@ def __init__( self._ssl = ssl if resolver is None: resolver = DefaultResolver() - self._resolver = resolver + self._resolver: AbstractResolver = resolver self._use_dns_cache = use_dns_cache self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) @@ -943,7 +943,7 @@ async def _wrap_create_connection( ) -> Tuple[asyncio.Transport, ResponseHandler]: try: async with ceil_timeout(timeout.sock_connect): - return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa + return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa except cert_errors as exc: raise ClientConnectorCertificateError(req.connection_key, exc) from exc except ssl_errors as exc: @@ -1031,7 +1031,7 @@ async def _create_proxy_connection( ) -> Tuple[asyncio.Transport, ResponseHandler]: headers = {} # type: Dict[str, str] if req.proxy_headers is not None: - headers = req.proxy_headers # type: ignore + headers = req.proxy_headers # type: ignore[assignment] headers[hdrs.HOST] = req.headers[hdrs.HOST] url = req.proxy @@ -1202,7 +1202,9 @@ def __init__( limit=limit, limit_per_host=limit_per_host, ) - if not isinstance(self._loop, asyncio.ProactorEventLoop): # type: ignore + if not isinstance( + self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): raise RuntimeError( "Named Pipes only available in proactor " "loop under windows" ) @@ -1218,7 +1220,7 @@ async def _create_connection( ) -> ResponseHandler: try: async with ceil_timeout(timeout.sock_connect): - _, proto = await self._loop.create_pipe_connection( # type: ignore + _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501 self._factory, self._path ) # the drain is required so that the connection_made is called diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index da76d2d8850..8d0a1edf49b 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -147,7 +147,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No for name, cookie in cookies: if not isinstance(cookie, Morsel): tmp = SimpleCookie() # type: SimpleCookie[str] - tmp[name] = cookie # type: ignore + tmp[name] = cookie # type: ignore[assignment] cookie = tmp[name] domain = cookie["domain"] diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index 12cfda77ce4..ef1e697497d 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -45,7 +45,7 @@ def add_field( *, content_type: Optional[str] = None, filename: Optional[str] = None, - content_transfer_encoding: Optional[str] = None + content_transfer_encoding: Optional[str] = None, ) -> None: if isinstance(value, io.IOBase): @@ -92,14 +92,14 @@ def add_fields(self, *fields: Any) -> None: if isinstance(rec, io.IOBase): k = guess_filename(rec, "unknown") - self.add_field(k, rec) # type: ignore + self.add_field(k, rec) # type: ignore[arg-type] elif isinstance(rec, (MultiDictProxy, MultiDict)): to_add.extend(rec.items()) elif isinstance(rec, (list, tuple)) and len(rec) == 2: k, fp = rec - self.add_field(k, fp) # type: ignore + self.add_field(k, fp) # type: ignore[arg-type] else: raise TypeError( diff --git a/aiohttp/hdrs.py b/aiohttp/hdrs.py index f04a5457f9f..d7d8e5000f3 100644 --- a/aiohttp/hdrs.py +++ b/aiohttp/hdrs.py @@ -2,21 +2,23 @@ # After changing the file content call ./tools/gen.py # to regenerate the headers parser +from typing import Set from multidict import istr +from typing_extensions import Final -METH_ANY = "*" -METH_CONNECT = "CONNECT" -METH_HEAD = "HEAD" -METH_GET = "GET" -METH_DELETE = "DELETE" -METH_OPTIONS = "OPTIONS" -METH_PATCH = "PATCH" -METH_POST = "POST" -METH_PUT = "PUT" -METH_TRACE = "TRACE" +METH_ANY: Final[str] = "*" +METH_CONNECT: Final[str] = "CONNECT" +METH_HEAD: Final[str] = "HEAD" +METH_GET: Final[str] = "GET" +METH_DELETE: Final[str] = "DELETE" +METH_OPTIONS: Final[str] = "OPTIONS" +METH_PATCH: Final[str] = "PATCH" +METH_POST: Final[str] = "POST" +METH_PUT: Final[str] = "PUT" +METH_TRACE: Final[str] = "TRACE" -METH_ALL = { +METH_ALL: Final[Set[str]] = { METH_CONNECT, METH_HEAD, METH_GET, @@ -28,81 +30,80 @@ METH_TRACE, } - -ACCEPT = istr("Accept") -ACCEPT_CHARSET = istr("Accept-Charset") -ACCEPT_ENCODING = istr("Accept-Encoding") -ACCEPT_LANGUAGE = istr("Accept-Language") -ACCEPT_RANGES = istr("Accept-Ranges") -ACCESS_CONTROL_MAX_AGE = istr("Access-Control-Max-Age") -ACCESS_CONTROL_ALLOW_CREDENTIALS = istr("Access-Control-Allow-Credentials") -ACCESS_CONTROL_ALLOW_HEADERS = istr("Access-Control-Allow-Headers") -ACCESS_CONTROL_ALLOW_METHODS = istr("Access-Control-Allow-Methods") -ACCESS_CONTROL_ALLOW_ORIGIN = istr("Access-Control-Allow-Origin") -ACCESS_CONTROL_EXPOSE_HEADERS = istr("Access-Control-Expose-Headers") -ACCESS_CONTROL_REQUEST_HEADERS = istr("Access-Control-Request-Headers") -ACCESS_CONTROL_REQUEST_METHOD = istr("Access-Control-Request-Method") -AGE = istr("Age") -ALLOW = istr("Allow") -AUTHORIZATION = istr("Authorization") -CACHE_CONTROL = istr("Cache-Control") -CONNECTION = istr("Connection") -CONTENT_DISPOSITION = istr("Content-Disposition") -CONTENT_ENCODING = istr("Content-Encoding") -CONTENT_LANGUAGE = istr("Content-Language") -CONTENT_LENGTH = istr("Content-Length") -CONTENT_LOCATION = istr("Content-Location") -CONTENT_MD5 = istr("Content-MD5") -CONTENT_RANGE = istr("Content-Range") -CONTENT_TRANSFER_ENCODING = istr("Content-Transfer-Encoding") -CONTENT_TYPE = istr("Content-Type") -COOKIE = istr("Cookie") -DATE = istr("Date") -DESTINATION = istr("Destination") -DIGEST = istr("Digest") -ETAG = istr("Etag") -EXPECT = istr("Expect") -EXPIRES = istr("Expires") -FORWARDED = istr("Forwarded") -FROM = istr("From") -HOST = istr("Host") -IF_MATCH = istr("If-Match") -IF_MODIFIED_SINCE = istr("If-Modified-Since") -IF_NONE_MATCH = istr("If-None-Match") -IF_RANGE = istr("If-Range") -IF_UNMODIFIED_SINCE = istr("If-Unmodified-Since") -KEEP_ALIVE = istr("Keep-Alive") -LAST_EVENT_ID = istr("Last-Event-ID") -LAST_MODIFIED = istr("Last-Modified") -LINK = istr("Link") -LOCATION = istr("Location") -MAX_FORWARDS = istr("Max-Forwards") -ORIGIN = istr("Origin") -PRAGMA = istr("Pragma") -PROXY_AUTHENTICATE = istr("Proxy-Authenticate") -PROXY_AUTHORIZATION = istr("Proxy-Authorization") -RANGE = istr("Range") -REFERER = istr("Referer") -RETRY_AFTER = istr("Retry-After") -SEC_WEBSOCKET_ACCEPT = istr("Sec-WebSocket-Accept") -SEC_WEBSOCKET_VERSION = istr("Sec-WebSocket-Version") -SEC_WEBSOCKET_PROTOCOL = istr("Sec-WebSocket-Protocol") -SEC_WEBSOCKET_EXTENSIONS = istr("Sec-WebSocket-Extensions") -SEC_WEBSOCKET_KEY = istr("Sec-WebSocket-Key") -SEC_WEBSOCKET_KEY1 = istr("Sec-WebSocket-Key1") -SERVER = istr("Server") -SET_COOKIE = istr("Set-Cookie") -TE = istr("TE") -TRAILER = istr("Trailer") -TRANSFER_ENCODING = istr("Transfer-Encoding") -UPGRADE = istr("Upgrade") -URI = istr("URI") -USER_AGENT = istr("User-Agent") -VARY = istr("Vary") -VIA = istr("Via") -WANT_DIGEST = istr("Want-Digest") -WARNING = istr("Warning") -WWW_AUTHENTICATE = istr("WWW-Authenticate") -X_FORWARDED_FOR = istr("X-Forwarded-For") -X_FORWARDED_HOST = istr("X-Forwarded-Host") -X_FORWARDED_PROTO = istr("X-Forwarded-Proto") +ACCEPT: Final[istr] = istr("Accept") +ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset") +ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding") +ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language") +ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges") +ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age") +ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials") +ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers") +ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods") +ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin") +ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers") +ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers") +ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method") +AGE: Final[istr] = istr("Age") +ALLOW: Final[istr] = istr("Allow") +AUTHORIZATION: Final[istr] = istr("Authorization") +CACHE_CONTROL: Final[istr] = istr("Cache-Control") +CONNECTION: Final[istr] = istr("Connection") +CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition") +CONTENT_ENCODING: Final[istr] = istr("Content-Encoding") +CONTENT_LANGUAGE: Final[istr] = istr("Content-Language") +CONTENT_LENGTH: Final[istr] = istr("Content-Length") +CONTENT_LOCATION: Final[istr] = istr("Content-Location") +CONTENT_MD5: Final[istr] = istr("Content-MD5") +CONTENT_RANGE: Final[istr] = istr("Content-Range") +CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding") +CONTENT_TYPE: Final[istr] = istr("Content-Type") +COOKIE: Final[istr] = istr("Cookie") +DATE: Final[istr] = istr("Date") +DESTINATION: Final[istr] = istr("Destination") +DIGEST: Final[istr] = istr("Digest") +ETAG: Final[istr] = istr("Etag") +EXPECT: Final[istr] = istr("Expect") +EXPIRES: Final[istr] = istr("Expires") +FORWARDED: Final[istr] = istr("Forwarded") +FROM: Final[istr] = istr("From") +HOST: Final[istr] = istr("Host") +IF_MATCH: Final[istr] = istr("If-Match") +IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since") +IF_NONE_MATCH: Final[istr] = istr("If-None-Match") +IF_RANGE: Final[istr] = istr("If-Range") +IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since") +KEEP_ALIVE: Final[istr] = istr("Keep-Alive") +LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID") +LAST_MODIFIED: Final[istr] = istr("Last-Modified") +LINK: Final[istr] = istr("Link") +LOCATION: Final[istr] = istr("Location") +MAX_FORWARDS: Final[istr] = istr("Max-Forwards") +ORIGIN: Final[istr] = istr("Origin") +PRAGMA: Final[istr] = istr("Pragma") +PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate") +PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization") +RANGE: Final[istr] = istr("Range") +REFERER: Final[istr] = istr("Referer") +RETRY_AFTER: Final[istr] = istr("Retry-After") +SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept") +SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version") +SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol") +SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions") +SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key") +SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1") +SERVER: Final[istr] = istr("Server") +SET_COOKIE: Final[istr] = istr("Set-Cookie") +TE: Final[istr] = istr("TE") +TRAILER: Final[istr] = istr("Trailer") +TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding") +UPGRADE: Final[istr] = istr("Upgrade") +URI: Final[istr] = istr("URI") +USER_AGENT: Final[istr] = istr("User-Agent") +VARY: Final[istr] = istr("Vary") +VIA: Final[istr] = istr("Via") +WANT_DIGEST: Final[istr] = istr("Want-Digest") +WARNING: Final[istr] = istr("Warning") +WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate") +X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For") +X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host") +X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto") diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 1b572fd3c0b..418de0f6f9a 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -40,7 +40,7 @@ cast, ) from urllib.parse import quote -from urllib.request import getproxies +from urllib.request import getproxies, proxy_bypass import async_timeout from multidict import CIMultiDict, MultiDict, MultiDictProxy @@ -51,7 +51,7 @@ from .log import client_logger from .typedefs import PathLike # noqa -__all__ = ("BasicAuth", "ChainMapProxy") +__all__ = ("BasicAuth", "ChainMapProxy", "ETag") PY_38 = sys.version_info >= (3, 8) @@ -114,7 +114,7 @@ def __await__(self) -> Generator[None, None, None]: iscoroutinefunction = asyncio.iscoroutinefunction else: - def iscoroutinefunction(func: Callable[..., Any]) -> bool: + def iscoroutinefunction(func: Any) -> bool: while isinstance(func, functools.partial): func = func.func return asyncio.iscoroutinefunction(func) @@ -269,6 +269,20 @@ def proxies_from_env() -> Dict[str, ProxyInfo]: return ret +def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + """Get a permitted proxy for the given URL from the env.""" + if url.host is not None and proxy_bypass(url.host): + raise LookupError(f"Proxying is disallowed for `{url.host!r}`") + + proxies_in_env = proxies_from_env() + try: + proxy_info = proxies_in_env[url.scheme] + except KeyError: + raise LookupError(f"No proxies found for `{url!s}` in the env") + else: + return proxy_info.proxy, proxy_info.proxy_auth + + @dataclasses.dataclass(frozen=True) class MimeType: type: str @@ -410,8 +424,8 @@ def is_expected_content_type( return expected_content_type in response_content_type -class _TSelf(Protocol): - _cache: Dict[str, Any] +class _TSelf(Protocol, Generic[_T]): + _cache: Dict[str, _T] class reify(Generic[_T]): @@ -428,7 +442,7 @@ def __init__(self, wrapped: Callable[..., _T]) -> None: self.__doc__ = wrapped.__doc__ self.name = wrapped.__name__ - def __get__(self, inst: _TSelf, owner: Optional[Type[Any]] = None) -> _T: + def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T: try: try: return inst._cache[self.name] @@ -441,7 +455,7 @@ def __get__(self, inst: _TSelf, owner: Optional[Type[Any]] = None) -> _T: return self raise - def __set__(self, inst: _TSelf, value: _T) -> None: + def __set__(self, inst: _TSelf[_T], value: _T) -> None: raise AttributeError("reified property is read-only") @@ -451,7 +465,7 @@ def __set__(self, inst: _TSelf, value: _T) -> None: from ._helpers import reify as reify_c if not NO_EXTENSIONS: - reify = reify_c # type: ignore + reify = reify_c # type: ignore[misc,assignment] except ImportError: pass @@ -547,7 +561,7 @@ def rfc822_formatted_time() -> str: return _cached_formatted_datetime -def _weakref_handle(info): # type: ignore +def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: ref, name = info ob = ref() if ob is not None: @@ -555,21 +569,27 @@ def _weakref_handle(info): # type: ignore getattr(ob, name)() -def weakref_handle(ob, name, timeout, loop): # type: ignore +def weakref_handle( + ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop +) -> Optional[asyncio.TimerHandle]: if timeout is not None and timeout > 0: when = loop.time() + timeout if timeout >= 5: when = ceil(when) return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) + return None -def call_later(cb, timeout, loop): # type: ignore +def call_later( + cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop +) -> Optional[asyncio.TimerHandle]: if timeout is not None and timeout > 0: when = loop.time() + timeout if timeout > 5: when = ceil(when) return loop.call_at(when, cb) + return None class TimeoutHandle: @@ -680,16 +700,16 @@ def timeout(self) -> None: def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout: - if delay is not None and delay > 0: - loop = asyncio.get_running_loop() - now = loop.time() - when = now + delay - if delay > 5: - when = ceil(when) - return async_timeout.timeout_at(when) - else: + if delay is None or delay <= 0: return async_timeout.timeout(None) + loop = asyncio.get_running_loop() + now = loop.time() + when = now + delay + if delay > 5: + when = ceil(when) + return async_timeout.timeout_at(when) + class HeadersMixin: @@ -713,23 +733,25 @@ def _parse_content_type(self, raw: str) -> None: @property def content_type(self) -> str: """The value of content part for Content-Type HTTP header.""" - raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore + raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined] if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_type # type: ignore + return self._content_type # type: ignore[return-value] @property def charset(self) -> Optional[str]: """The value of charset part for Content-Type HTTP header.""" - raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore + raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined] if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_dict.get("charset") # type: ignore + return self._content_dict.get("charset") # type: ignore[union-attr] @property def content_length(self) -> Optional[int]: """The value of Content-Length HTTP header.""" - content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore + content_length = self._headers.get( # type: ignore[attr-defined] + hdrs.CONTENT_LENGTH + ) if content_length is not None: return int(content_length) @@ -773,7 +795,7 @@ def get(self, key: str, default: Any = None) -> Any: def __len__(self) -> int: # reuses stored hash values if possible - return len(set().union(*self._maps)) # type: ignore + return len(set().union(*self._maps)) # type: ignore[arg-type] def __iter__(self) -> Iterator[str]: d = {} # type: Dict[str, Any] @@ -879,3 +901,26 @@ def populate_with_cookies( for cookie in cookies.values(): value = cookie.output(header="")[1:] headers.add(hdrs.SET_COOKIE, value) + + +# https://tools.ietf.org/html/rfc7232#section-2.3 +_ETAGC = r"[!#-}\x80-\xff]+" +_ETAGC_RE = re.compile(_ETAGC) +_QUOTED_ETAG = fr'(W/)?"({_ETAGC})"' +QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG) +LIST_QUOTED_ETAG_RE = re.compile(fr"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)") + +ETAG_ANY = "*" + + +@dataclasses.dataclass(frozen=True) +class ETag: + value: str + is_weak: bool = False + + +def validate_etag_value(value: str) -> None: + if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value): + raise ValueError( + f"Value {value!r} is not a valid etag. Maybe it contains '\"'?" + ) diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 2ea0387ce41..1045b6c0926 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -6,9 +6,23 @@ import zlib from contextlib import suppress from enum import IntEnum -from typing import Any, Generic, List, Optional, Tuple, Type, TypeVar, Union +from typing import ( + Any, + Generic, + List, + NamedTuple, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) from multidict import CIMultiDict, CIMultiDictProxy, istr +from typing_extensions import Final from yarl import URL from . import hdrs @@ -44,7 +58,7 @@ "RawResponseMessage", ) -ASCIISET = set(string.printable) +ASCIISET: Final[Set[str]] = set(string.printable) # See https://tools.ietf.org/html/rfc7230#section-3.1.1 # and https://tools.ietf.org/html/rfc7230#appendix-B @@ -53,25 +67,23 @@ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / # "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA # token = 1*tchar -METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") -VERSRE = re.compile(r"HTTP/(\d+).(\d+)") -HDRRE = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]") +METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") +VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)") +HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]") + + +class RawRequestMessage(NamedTuple): + method: str + path: str + version: HttpVersion + headers: CIMultiDictProxy[str] + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + url: URL -RawRequestMessage = collections.namedtuple( - "RawRequestMessage", - [ - "method", - "path", - "version", - "headers", - "raw_headers", - "should_close", - "compression", - "upgrade", - "chunked", - "url", - ], -) RawResponseMessage = collections.namedtuple( "RawResponseMessage", @@ -300,20 +312,27 @@ def feed_data( # \r\n\r\n found if self._lines[-1] == EMPTY: try: - msg = self.parse_message(self._lines) + msg: _MsgT = self.parse_message(self._lines) finally: self._lines.clear() - # payload length - length = msg.headers.get(CONTENT_LENGTH) - if length is not None: + def get_content_lenght() -> Optional[int]: + # payload length + length_hdr = msg.headers.get(CONTENT_LENGTH) + if length_hdr is None: + return None + try: - length = int(length) + length = int(length_hdr) except ValueError: raise InvalidHeader(CONTENT_LENGTH) + if length < 0: raise InvalidHeader(CONTENT_LENGTH) + return length + + length = get_content_lenght() # do not support old websocket spec if SEC_WEBSOCKET_KEY1 in msg.headers: raise InvalidHeader(SEC_WEBSOCKET_KEY1) @@ -503,6 +522,9 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: "Status line is too long", str(self.max_line_size), str(len(path)) ) + path_part, _hash_separator, url_fragment = path.partition("#") + path_part, _question_mark_separator, qs_part = path_part.partition("?") + # method if not METHRE.match(method): raise BadStatusLine(method) @@ -543,7 +565,16 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: compression, upgrade, chunked, - URL(path), + # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based + # NOTE: parser does, otherwise it results into the same + # NOTE: HTTP Request-Line input producing different + # NOTE: `yarl.URL()` objects + URL.build( + path=path_part, + query_string=qs_part, + fragment=url_fragment, + encoded=True, + ), ) @@ -827,12 +858,12 @@ def __init__(self) -> None: def decompress(self, data: bytes) -> bytes: if hasattr(self._obj, "decompress"): - return self._obj.decompress(data) - return self._obj.process(data) + return cast(bytes, self._obj.decompress(data)) + return cast(bytes, self._obj.process(data)) def flush(self) -> bytes: if hasattr(self._obj, "flush"): - return self._obj.flush() + return cast(bytes, self._obj.flush()) return b"" self.decompressor = BrotliDecoder() # type: Any @@ -897,7 +928,7 @@ def end_http_chunk_receiving(self) -> None: try: if not NO_EXTENSIONS: - from ._http_parser import ( # type: ignore + from ._http_parser import ( # type: ignore[import,no-redef] HttpRequestParser, HttpResponseParser, RawRequestMessage, diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index d0dee7f4519..ee877fd4def 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -9,7 +9,9 @@ import zlib from enum import IntEnum from struct import Struct -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast + +from typing_extensions import Final from .base_protocol import BaseProtocol from .helpers import NO_EXTENSIONS @@ -44,7 +46,7 @@ class WSCloseCode(IntEnum): BAD_GATEWAY = 1014 -ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode} +ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} class WSMsgType(IntEnum): @@ -62,7 +64,7 @@ class WSMsgType(IntEnum): ERROR = 0x102 -WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" +WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" UNPACK_LEN2 = Struct("!H").unpack_from @@ -72,8 +74,8 @@ class WSMsgType(IntEnum): PACK_LEN2 = Struct("!BBH").pack PACK_LEN3 = Struct("!BBQ").pack PACK_CLOSE_CODE = Struct("!H").pack -MSG_SIZE = 2 ** 14 -DEFAULT_LIMIT = 2 ** 16 +MSG_SIZE: Final[int] = 2 ** 14 +DEFAULT_LIMIT: Final[int] = 2 ** 16 _WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"]) @@ -100,18 +102,18 @@ def __init__(self, code: int, message: str) -> None: super().__init__(code, message) def __str__(self) -> str: - return self.args[1] + return cast(str, self.args[1]) class WSHandshakeError(Exception): """WebSocket protocol handshake error.""" -native_byteorder = sys.byteorder +native_byteorder: Final[str] = sys.byteorder # Used by _websocket_mask_python -_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)] +_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)] def _websocket_mask_python(mask: bytes, data: bytearray) -> None: @@ -142,16 +144,16 @@ def _websocket_mask_python(mask: bytes, data: bytearray) -> None: _websocket_mask = _websocket_mask_python else: try: - from ._websocket import _websocket_mask_cython # type: ignore + from ._websocket import _websocket_mask_cython # type: ignore[import] _websocket_mask = _websocket_mask_cython except ImportError: # pragma: no cover _websocket_mask = _websocket_mask_python -_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xFF, 0xFF]) +_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) -_WS_EXT_RE = re.compile( +_WS_EXT_RE: Final[Pattern[str]] = re.compile( r"^(?:;\s*(?:" r"(server_no_context_takeover)|" r"(client_no_context_takeover)|" @@ -159,7 +161,7 @@ def _websocket_mask_python(mask: bytes, data: bytearray) -> None: r"(client_max_window_bits(?:=(\d+))?)))*$" ) -_WS_EXT_RE_SPLIT = re.compile(r"permessage-deflate([^,]+)?") +_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index ffec6a756f9..428a7929b1a 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -171,19 +171,25 @@ async def drain(self) -> None: await self._protocol._drain_helper() +def _safe_header(string: str) -> str: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ) + return string + + def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes: - line = ( - status_line - + "\r\n" - + "".join([k + ": " + v + "\r\n" for k, v in headers.items()]) - ) - return line.encode("utf-8") + b"\r\n" + headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items()) + line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n" + return line.encode("utf-8") _serialize_headers = _py_serialize_headers try: - import aiohttp._http_writer as _http_writer # type: ignore + import aiohttp._http_writer as _http_writer # type: ignore[import] _c_serialize_headers = _http_writer._serialize_headers if not NO_EXTENSIONS: diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 00ba01af7be..b5e78c835e6 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -11,6 +11,7 @@ TYPE_CHECKING, Any, AsyncIterator, + Deque, Dict, Iterator, List, @@ -20,6 +21,7 @@ Tuple, Type, Union, + cast, ) from urllib.parse import parse_qsl, unquote, urlencode @@ -267,13 +269,13 @@ def __init__( self._length = int(length) if length is not None else None self._read_bytes = 0 # TODO: typeing.Deque is not supported by Python 3.5 - self._unread = deque() # type: Any + self._unread: Deque[bytes] = deque() self._prev_chunk = None # type: Optional[bytes] self._content_eof = 0 self._cache = {} # type: Dict[str, Any] def __aiter__(self) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore + return self # type: ignore[return-value] async def __anext__(self) -> bytes: part = await self.next() @@ -448,7 +450,7 @@ async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, An if not data: return None encoding = encoding or self.get_charset(default="utf-8") - return json.loads(data.decode(encoding)) + return cast(Dict[str, Any], json.loads(data.decode(encoding))) async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: """Like read(), but assumes that body parts contains form @@ -585,7 +587,7 @@ def __init__( def __aiter__( self, ) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore + return self # type: ignore[return-value] async def __anext__( self, @@ -886,7 +888,7 @@ def append_payload(self, payload: Payload) -> Payload: if size is not None and not (encoding or te_encoding): payload.headers[CONTENT_LENGTH] = str(size) - self._parts.append((payload, encoding, te_encoding)) # type: ignore + self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] return payload def append_json( @@ -951,7 +953,7 @@ async def write(self, writer: Any, close_boundary: bool = True) -> None: w.enable_compression(encoding) if te_encoding: w.enable_encoding(te_encoding) - await part.write(w) # type: ignore + await part.write(w) # type: ignore[arg-type] await w.write_eof() else: await part.write(writer) diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 0e881e41eae..b88da2cd8ed 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -23,6 +23,7 @@ ) from multidict import CIMultiDict +from typing_extensions import Final from . import hdrs from .abc import AbstractStreamWriter @@ -52,8 +53,7 @@ "AsyncIterablePayload", ) -TOO_LARGE_BYTES_BODY = 2 ** 20 # 1 MB - +TOO_LARGE_BYTES_BODY: Final[int] = 2 ** 20 # 1 MB if TYPE_CHECKING: # pragma: no cover from typing import List @@ -89,6 +89,10 @@ def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: return factory +PayloadType = Type["Payload"] +_PayloadRegistryItem = Tuple[PayloadType, Any] + + class PayloadRegistry: """Payload registry. @@ -96,12 +100,16 @@ class PayloadRegistry: """ def __init__(self) -> None: - self._first = [] # type: List[Tuple[Type[Payload], Any]] - self._normal = [] # type: List[Tuple[Type[Payload], Any]] - self._last = [] # type: List[Tuple[Type[Payload], Any]] + self._first = [] # type: List[_PayloadRegistryItem] + self._normal = [] # type: List[_PayloadRegistryItem] + self._last = [] # type: List[_PayloadRegistryItem] def get( - self, data: Any, *args: Any, _CHAIN: Any = chain, **kwargs: Any + self, + data: Any, + *args: Any, + _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, + **kwargs: Any, ) -> "Payload": if isinstance(data, Payload): return data @@ -112,7 +120,7 @@ def get( raise LookupError() def register( - self, factory: Type["Payload"], type: Any, *, order: Order = Order.normal + self, factory: PayloadType, type: Any, *, order: Order = Order.normal ) -> None: if order is Order.try_first: self._first.append((factory, type)) @@ -277,6 +285,8 @@ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: class IOBasePayload(Payload): + _value: IO[Any] + def __init__( self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any ) -> None: @@ -301,6 +311,8 @@ async def write(self, writer: AbstractStreamWriter) -> None: class TextIOPayload(IOBasePayload): + _value: TextIO + def __init__( self, value: TextIO, @@ -341,7 +353,12 @@ async def write(self, writer: AbstractStreamWriter) -> None: try: chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16) while chunk: - await writer.write(chunk.encode(self._encoding)) + data = ( + chunk.encode(encoding=self._encoding) + if self._encoding + else chunk.encode() + ) + await writer.write(data) chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16) finally: await loop.run_in_executor(None, self._value.close) diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 1c237132af7..153db35276b 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -2,6 +2,7 @@ import contextlib import inspect import warnings +from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Type, Union import pytest @@ -28,8 +29,10 @@ except ImportError: # pragma: no cover tokio = None +AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]] -def pytest_addoption(parser): # type: ignore + +def pytest_addoption(parser): # type: ignore[no-untyped-def] parser.addoption( "--aiohttp-fast", action="store_true", @@ -50,7 +53,7 @@ def pytest_addoption(parser): # type: ignore ) -def pytest_fixture_setup(fixturedef): # type: ignore +def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] """ Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. """ @@ -71,7 +74,7 @@ def pytest_fixture_setup(fixturedef): # type: ignore fixturedef.argnames += ("request",) strip_request = True - def wrapper(*args, **kwargs): # type: ignore + def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] request = kwargs["request"] if strip_request: del kwargs["request"] @@ -92,7 +95,7 @@ def wrapper(*args, **kwargs): # type: ignore # then advance it again in a finalizer gen = func(*args, **kwargs) - def finalizer(): # type: ignore + def finalizer(): # type: ignore[no-untyped-def] try: return _loop.run_until_complete(gen.__anext__()) except StopAsyncIteration: @@ -107,19 +110,19 @@ def finalizer(): # type: ignore @pytest.fixture -def fast(request): # type: ignore +def fast(request): # type: ignore[no-untyped-def] """--fast config option""" return request.config.getoption("--aiohttp-fast") @pytest.fixture -def loop_debug(request): # type: ignore +def loop_debug(request): # type: ignore[no-untyped-def] """--enable-loop-debug config option""" return request.config.getoption("--aiohttp-enable-loop-debug") @contextlib.contextmanager -def _runtime_warning_context(): # type: ignore +def _runtime_warning_context(): # type: ignore[no-untyped-def] """ Context manager which checks for RuntimeWarnings, specifically to avoid "coroutine 'X' was never awaited" warnings being missed. @@ -148,7 +151,7 @@ def _runtime_warning_context(): # type: ignore @contextlib.contextmanager -def _passthrough_loop_context(loop, fast=False): # type: ignore +def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] """ setups and tears down a loop unless one is passed in via the loop argument when it's passed straight through. @@ -163,7 +166,7 @@ def _passthrough_loop_context(loop, fast=False): # type: ignore teardown_test_loop(loop, fast=fast) -def pytest_pycollect_makeitem(collector, name, obj): # type: ignore +def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] """ Fix pytest collecting for coroutines. """ @@ -171,7 +174,7 @@ def pytest_pycollect_makeitem(collector, name, obj): # type: ignore return list(collector._genfunctions(name, obj)) -def pytest_pyfunc_call(pyfuncitem): # type: ignore +def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] """ Run coroutines in an event loop instead of a normal function call. """ @@ -191,7 +194,7 @@ def pytest_pyfunc_call(pyfuncitem): # type: ignore return True -def pytest_generate_tests(metafunc): # type: ignore +def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] if "loop_factory" not in metafunc.fixturenames: return @@ -207,7 +210,7 @@ def pytest_generate_tests(metafunc): # type: ignore if loops == "all": loops = "pyloop,uvloop?,tokio?" - factories = {} # type: ignore + factories = {} # type: ignore[var-annotated] for name in loops.split(","): required = not name.endswith("?") name = name.strip(" ?") @@ -226,7 +229,7 @@ def pytest_generate_tests(metafunc): # type: ignore @pytest.fixture -def loop(loop_factory, fast, loop_debug): # type: ignore +def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def] """Return an instance of the event loop.""" policy = loop_factory() asyncio.set_event_loop_policy(policy) @@ -238,8 +241,8 @@ def loop(loop_factory, fast, loop_debug): # type: ignore @pytest.fixture -def proactor_loop(): # type: ignore - policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore +def proactor_loop(): # type: ignore[no-untyped-def] + policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined] asyncio.set_event_loop_policy(policy) with loop_context(policy.new_event_loop) as _loop: @@ -248,20 +251,20 @@ def proactor_loop(): # type: ignore @pytest.fixture -def aiohttp_unused_port(): # type: ignore +def aiohttp_unused_port(): # type: ignore[no-untyped-def] """Return a port that is unused on the current host.""" return _unused_port @pytest.fixture -def aiohttp_server(loop): # type: ignore +def aiohttp_server(loop): # type: ignore[no-untyped-def] """Factory to create a TestServer instance, given an app. aiohttp_server(app, **kwargs) """ servers = [] - async def go(app, *, port=None, **kwargs): # type: ignore + async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def] server = TestServer(app, port=port) await server.start_server(**kwargs) servers.append(server) @@ -269,7 +272,7 @@ async def go(app, *, port=None, **kwargs): # type: ignore yield go - async def finalize(): # type: ignore + async def finalize() -> None: while servers: await servers.pop().close() @@ -277,14 +280,14 @@ async def finalize(): # type: ignore @pytest.fixture -def aiohttp_raw_server(loop): # type: ignore +def aiohttp_raw_server(loop): # type: ignore[no-untyped-def] """Factory to create a RawTestServer instance, given a web handler. aiohttp_raw_server(handler, **kwargs) """ servers = [] - async def go(handler, *, port=None, **kwargs): # type: ignore + async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def] server = RawTestServer(handler, port=port) await server.start_server(**kwargs) servers.append(server) @@ -292,7 +295,7 @@ async def go(handler, *, port=None, **kwargs): # type: ignore yield go - async def finalize(): # type: ignore + async def finalize() -> None: while servers: await servers.pop().close() @@ -300,7 +303,7 @@ async def finalize(): # type: ignore @pytest.fixture -def aiohttp_client_cls(): # type: ignore +def aiohttp_client_cls() -> Type[TestClient]: """ Client class to use in ``aiohttp_client`` factory. @@ -327,7 +330,9 @@ def test_login(aiohttp_client): @pytest.fixture -def aiohttp_client(loop, aiohttp_client_cls): # type: ignore +def aiohttp_client( + loop: asyncio.AbstractEventLoop, aiohttp_client_cls: Type[TestClient] +) -> Generator[AiohttpClient, None, None]: """Factory to create a TestClient instance. aiohttp_client(app, **kwargs) @@ -336,7 +341,12 @@ def aiohttp_client(loop, aiohttp_client_cls): # type: ignore """ clients = [] - async def go(__param, *, server_kwargs=None, **kwargs): # type: ignore + async def go( + __param: Union[Application, BaseTestServer], + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> TestClient: if isinstance(__param, Application): server_kwargs = server_kwargs or {} server = TestServer(__param, **server_kwargs) @@ -352,7 +362,7 @@ async def go(__param, *, server_kwargs=None, **kwargs): # type: ignore yield go - async def finalize(): # type: ignore + async def finalize() -> None: while clients: await clients.pop().close() diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index f1689915bbb..e62e7f377b6 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,6 +1,6 @@ import asyncio import socket -from typing import Any, Dict, List +from typing import Any, Dict, List, Type, Union from .abc import AbstractResolver @@ -37,7 +37,7 @@ async def resolve( hosts = [] for family, _, proto, _, address in infos: - if family == socket.AF_INET6 and address[3]: # type: ignore + if family == socket.AF_INET6 and address[3]: # type: ignore[misc] # This is essential for link-local IPv6 addresses. # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. @@ -101,7 +101,8 @@ async def resolve( return hosts async def close(self) -> None: - return self._resolver.cancel() + self._resolver.cancel() -DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver +_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]] +DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/aiohttp/streams.py b/aiohttp/streams.py index d8182b3d5f9..a077b81b82d 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -62,14 +62,16 @@ async def __anext__(self) -> Tuple[bytes, bool]: class AsyncStreamReaderMixin: def __aiter__(self) -> AsyncStreamIterator[bytes]: - return AsyncStreamIterator(self.readline) # type: ignore + return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: """Returns an asynchronous iterator that yields chunks of size n. Python-3.5 available for Python 3.5+ only """ - return AsyncStreamIterator(lambda: self.read(n)) # type: ignore + return AsyncStreamIterator( + lambda: self.read(n) # type: ignore[attr-defined,no-any-return] + ) def iter_any(self) -> AsyncStreamIterator[bytes]: """Returns an asynchronous iterator that yields all the available @@ -77,7 +79,7 @@ def iter_any(self) -> AsyncStreamIterator[bytes]: Python-3.5 available for Python 3.5+ only """ - return AsyncStreamIterator(self.readany) # type: ignore + return AsyncStreamIterator(self.readany) # type: ignore[attr-defined] def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: """Returns an asynchronous iterator that yields chunks of data @@ -86,7 +88,7 @@ def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: Python-3.5 available for Python 3.5+ only """ - return ChunkTupleAsyncStreamIterator(self) # type: ignore + return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type] class StreamReader(AsyncStreamReaderMixin): @@ -111,7 +113,7 @@ def __init__( limit: int, *, timer: Optional[BaseTimerContext] = None, - loop: asyncio.AbstractEventLoop + loop: asyncio.AbstractEventLoop, ) -> None: self._protocol = protocol self._low_water = limit diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 35f4e0a79ec..36a860b0171 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -2,7 +2,6 @@ import asyncio import contextlib -import functools import gc import inspect import ipaddress @@ -11,7 +10,17 @@ import sys from abc import ABC, abstractmethod from types import TracebackType -from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, Union +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterator, + List, + Optional, + Type, + Union, + cast, +) from unittest import mock from aiosignal import Signal @@ -47,7 +56,7 @@ if PY_38: from unittest import IsolatedAsyncioTestCase as TestCase else: - from asynctest import TestCase # type: ignore + from asynctest import TestCase # type: ignore[no-redef] REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" @@ -75,7 +84,7 @@ def unused_port() -> int: """Return a port that is unused on the current host.""" with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.bind(("127.0.0.1", 0)) - return s.getsockname()[1] + return cast(int, s.getsockname()[1]) class BaseTestServer(ABC): @@ -277,8 +286,8 @@ def server(self) -> BaseTestServer: return self._server @property - def app(self) -> Application: - return getattr(self._server, "app", None) + def app(self) -> Optional[Application]: + return cast(Optional[Application], getattr(self._server, "app", None)) @property def session(self) -> ClientSession: @@ -446,21 +455,6 @@ async def get_client(self, server: TestServer) -> TestClient: return TestClient(server) -def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: - """A decorator dedicated to use with asynchronous methods of an - AioHTTPTestCase. - - Handles executing an asynchronous function, using - the self.loop of the AioHTTPTestCase. - """ - - @functools.wraps(func, *args, **kwargs) - def new_func(self: Any, *inner_args: Any, **inner_kwargs: Any) -> Any: - return self.loop.run_until_complete(func(self, *inner_args, **inner_kwargs)) - - return new_func - - _LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] @@ -496,7 +490,16 @@ def setup_test_loop( asyncio.set_event_loop(loop) if sys.platform != "win32" and not skip_watcher: policy = asyncio.get_event_loop_policy() - watcher = asyncio.SafeChildWatcher() + watcher: asyncio.AbstractChildWatcher + try: # Python >= 3.8 + # Refs: + # * https://github.com/pytest-dev/pytest-xdist/issues/620 + # * https://stackoverflow.com/a/58614689/595220 + # * https://bugs.python.org/issue35621 + # * https://github.com/python/cpython/pull/14344 + watcher = asyncio.ThreadedChildWatcher() + except AttributeError: # Python < 3.8 + watcher = asyncio.SafeChildWatcher() watcher.attach_loop(loop) with contextlib.suppress(NotImplementedError): policy.set_child_watcher(watcher) @@ -600,7 +603,7 @@ def make_mocked_request( headers, raw_hdrs, closing, - False, + None, False, chunked, URL(path), diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index ab2d7035f2e..1b13a4dbd0f 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -1,6 +1,15 @@ import json import os -from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, Tuple, Union +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Iterable, + Mapping, + Tuple, + Union, +) from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr from yarl import URL @@ -14,6 +23,8 @@ _MultiDict = MultiDict[str] _MultiDictProxy = MultiDictProxy[str] from http.cookies import BaseCookie, Morsel + + from .web import Request, StreamResponse else: _CIMultiDict = CIMultiDict _CIMultiDictProxy = CIMultiDictProxy @@ -37,5 +48,6 @@ "BaseCookie[str]", ] +Handler = Callable[["Request"], Awaitable["StreamResponse"]] PathLike = Union[str, "os.PathLike[str]"] diff --git a/aiohttp/web.py b/aiohttp/web.py index eb23dd3ff72..5aef0c00e5f 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -6,16 +6,16 @@ from collections.abc import Iterable from importlib import import_module from typing import ( - Any as Any, - Awaitable as Awaitable, - Callable as Callable, + Any, + Awaitable, + Callable, Iterable as TypingIterable, - List as List, - Optional as Optional, - Set as Set, - Type as Type, - Union as Union, - cast as cast, + List, + Optional, + Set, + Type, + Union, + cast, ) from .abc import AbstractAccessLogger @@ -278,7 +278,7 @@ try: from ssl import SSLContext except ImportError: # pragma: no cover - SSLContext = Any # type: ignore + SSLContext = Any # type: ignore[misc,assignment] HostSequence = TypingIterable[str] @@ -304,7 +304,7 @@ async def _run_app( ) -> None: # An internal function to actually do all dirty job for application running if asyncio.iscoroutine(app): - app = await app # type: ignore + app = await app # type: ignore[misc] app = cast(Application, app) @@ -441,9 +441,7 @@ def _cancel_tasks( for task in to_cancel: task.cancel() - loop.run_until_complete( - asyncio.gather(*to_cancel, loop=loop, return_exceptions=True) - ) + loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) for task in to_cancel: if task.cancelled(): @@ -477,9 +475,11 @@ def run_app( handle_signals: bool = True, reuse_address: Optional[bool] = None, reuse_port: Optional[bool] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, ) -> None: """Run an app locally""" - loop = asyncio.get_event_loop() + if loop is None: + loop = asyncio.new_event_loop() loop.set_debug(debug) # Configure if and only if in debugging mode and using the default logger @@ -489,27 +489,29 @@ def run_app( if not access_log.hasHandlers(): access_log.addHandler(logging.StreamHandler()) - try: - main_task = loop.create_task( - _run_app( - app, - host=host, - port=port, - path=path, - sock=sock, - shutdown_timeout=shutdown_timeout, - keepalive_timeout=keepalive_timeout, - ssl_context=ssl_context, - print=print, - backlog=backlog, - access_log_class=access_log_class, - access_log_format=access_log_format, - access_log=access_log, - handle_signals=handle_signals, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) + main_task = loop.create_task( + _run_app( + app, + host=host, + port=port, + path=path, + sock=sock, + shutdown_timeout=shutdown_timeout, + keepalive_timeout=keepalive_timeout, + ssl_context=ssl_context, + print=print, + backlog=backlog, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + handle_signals=handle_signals, + reuse_address=reuse_address, + reuse_port=reuse_port, ) + ) + + try: + asyncio.set_event_loop(loop) loop.run_until_complete(main_task) except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 17a72f339d9..2056d90063d 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -46,10 +46,11 @@ if TYPE_CHECKING: # pragma: no cover + from .typedefs import Handler + _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] - _Handler = Callable[[Request], Awaitable[StreamResponse]] - _Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]] + _Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]] _Middlewares = FrozenList[_Middleware] _MiddlewaresHandlers = Sequence[_Middleware] _Subapps = List["Application"] @@ -367,7 +368,7 @@ def __bool__(self) -> bool: class CleanupError(RuntimeError): @property def exceptions(self) -> List[BaseException]: - return self.args[1] + return cast(List[BaseException], self.args[1]) if TYPE_CHECKING: # pragma: no cover diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 0737c4f42d7..1924f3fbfa3 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -9,14 +9,19 @@ Any, Awaitable, Callable, + Iterator, List, Optional, + Tuple, Union, cast, ) +from typing_extensions import Final + from . import hdrs from .abc import AbstractStreamWriter +from .helpers import ETAG_ANY, ETag from .typedefs import LooseHeaders from .web_exceptions import ( HTTPNotModified, @@ -35,7 +40,7 @@ _T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] -NOSENDFILE = bool(os.environ.get("AIOHTTP_NOSENDFILE")) +NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) class FileResponse(StreamResponse): @@ -100,6 +105,30 @@ async def _sendfile( await super().write_eof() return writer + @staticmethod + def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool: + if len(etags) == 1 and etags[0].value == ETAG_ANY: + return True + return any(etag.value == etag_value for etag in etags if not etag.is_weak) + + async def _not_modified( + self, request: "BaseRequest", etag_value: str, last_modified: float + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPNotModified.status_code) + self._length_check = False + self.etag = etag_value # type: ignore[assignment] + self.last_modified = last_modified # type: ignore[assignment] + # Delete any Content-Length headers provided by user. HTTP 304 + # should always have empty response body + return await super().prepare(request) + + async def _precondition_failed( + self, request: "BaseRequest" + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPPreconditionFailed.status_code) + self.content_length = 0 + return await super().prepare(request) + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: filepath = self._path @@ -112,20 +141,35 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter gzip = True loop = asyncio.get_event_loop() - st = await loop.run_in_executor(None, filepath.stat) + st: os.stat_result = await loop.run_in_executor(None, filepath.stat) - modsince = request.if_modified_since - if modsince is not None and st.st_mtime <= modsince.timestamp(): - self.set_status(HTTPNotModified.status_code) - self._length_check = False - # Delete any Content-Length headers provided by user. HTTP 304 - # should always have empty response body - return await super().prepare(request) + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + last_modified = st.st_mtime + + # https://tools.ietf.org/html/rfc7232#section-6 + ifmatch = request.if_match + if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch): + return await self._precondition_failed(request) unmodsince = request.if_unmodified_since - if unmodsince is not None and st.st_mtime > unmodsince.timestamp(): - self.set_status(HTTPPreconditionFailed.status_code) - return await super().prepare(request) + if ( + unmodsince is not None + and ifmatch is None + and st.st_mtime > unmodsince.timestamp() + ): + return await self._precondition_failed(request) + + ifnonematch = request.if_none_match + if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch): + return await self._not_modified(request, etag_value, last_modified) + + modsince = request.if_modified_since + if ( + modsince is not None + and ifnonematch is None + and st.st_mtime <= modsince.timestamp() + ): + return await self._not_modified(request, etag_value, last_modified) if hdrs.CONTENT_TYPE not in self.headers: ct, encoding = mimetypes.guess_type(str(filepath)) @@ -211,12 +255,14 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter self.set_status(status) if should_set_ct: - self.content_type = ct # type: ignore + self.content_type = ct # type: ignore[assignment] if encoding: self.headers[hdrs.CONTENT_ENCODING] = encoding if gzip: self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING - self.last_modified = st.st_mtime # type: ignore + + self.etag = etag_value # type: ignore[assignment] + self.last_modified = st.st_mtime # type: ignore[assignment] self.content_length = count self.headers[hdrs.ACCEPT_RANGES] = "bytes" diff --git a/aiohttp/web_log.py b/aiohttp/web_log.py index 2e898080ea9..2672a8b1981 100644 --- a/aiohttp/web_log.py +++ b/aiohttp/web_log.py @@ -200,10 +200,10 @@ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> No if key.__class__ is str: extra[key] = value else: - k1, k2 = key # type: ignore - dct = extra.get(k1, {}) # type: ignore - dct[k2] = value # type: ignore - extra[k1] = dct # type: ignore + k1, k2 = key # type: ignore[misc] + dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type] + dct[k2] = value # type: ignore[index,has-type] + extra[k1] = dct # type: ignore[has-type,assignment] self.logger.info(self._log_format % tuple(values), extra=extra) except Exception: diff --git a/aiohttp/web_middlewares.py b/aiohttp/web_middlewares.py index df2ee57dca0..43d3c056e6a 100644 --- a/aiohttp/web_middlewares.py +++ b/aiohttp/web_middlewares.py @@ -2,6 +2,7 @@ import warnings from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar +from .typedefs import Handler from .web_exceptions import HTTPMove, HTTPPermanentRedirect from .web_request import Request from .web_response import StreamResponse @@ -41,8 +42,7 @@ def middleware(f: _Func) -> _Func: return f -_Handler = Callable[[Request], Awaitable[StreamResponse]] -_Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]] +_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]] def normalize_path_middleware( @@ -50,7 +50,7 @@ def normalize_path_middleware( append_slash: bool = True, remove_slash: bool = False, merge_slashes: bool = True, - redirect_class: Type[HTTPMove] = HTTPPermanentRedirect + redirect_class: Type[HTTPMove] = HTTPPermanentRedirect, ) -> _Middleware: """ Middleware factory which produces a middleware that normalizes @@ -85,7 +85,7 @@ def normalize_path_middleware( correct_configuration = not (append_slash and remove_slash) assert correct_configuration, "Cannot both remove and append slash" - async def impl(request: Request, handler: _Handler) -> StreamResponse: + async def impl(request: Request, handler: Handler) -> StreamResponse: if isinstance(request.match_info.route, SystemRoute): paths_to_check = [] if "?" in request.raw_path: @@ -108,6 +108,7 @@ async def impl(request: Request, handler: _Handler) -> StreamResponse: paths_to_check.append(merged_slashes[:-1]) for path in paths_to_check: + path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg resolves, request = await _check_request_resolves(request, path) if resolves: raise redirect_class(request.raw_path + query) @@ -118,7 +119,7 @@ async def impl(request: Request, handler: _Handler) -> StreamResponse: def _fix_request_current_app(app: "Application") -> _Middleware: - async def impl(request: Request, handler: _Handler) -> StreamResponse: + async def impl(request: Request, handler: Handler) -> StreamResponse: with request.match_info.set_current_app(app): return await handler(request) diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index d5f53335b7d..aab4f31f297 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -1,5 +1,6 @@ import asyncio import asyncio.streams +import dataclasses import traceback from collections import deque from contextlib import suppress @@ -20,7 +21,6 @@ cast, ) -import attr import yarl from .abc import AbstractAccessLogger, AbstractAsyncAccessLogger, AbstractStreamWriter @@ -65,7 +65,16 @@ ] ERROR = RawRequestMessage( - "UNKNOWN", "/", HttpVersion10, {}, {}, True, False, False, False, yarl.URL("/") + "UNKNOWN", + "/", + HttpVersion10, + {}, # type: ignore[arg-type] + {}, # type: ignore[arg-type] + True, + None, + False, + False, + yarl.URL("/"), ) @@ -96,7 +105,7 @@ async def log( self.access_logger.log(request, response, self._loop.time() - request_start) -@attr.s(auto_attribs=True, frozen=True, slots=True) +@dataclasses.dataclass(frozen=True) class _ErrInfo: status: int exc: BaseException diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index b95a5631a83..d4d941d26e7 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -18,6 +18,7 @@ Mapping, MutableMapping, Optional, + Pattern, Set, Tuple, Union, @@ -26,13 +27,17 @@ from urllib.parse import parse_qsl from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from typing_extensions import Final from yarl import URL from . import hdrs from .abc import AbstractStreamWriter from .helpers import ( _SENTINEL, + ETAG_ANY, + LIST_QUOTED_ETAG_RE, ChainMapProxy, + ETag, HeadersMixin, is_expected_content_type, reify, @@ -75,31 +80,33 @@ class FileField: headers: "CIMultiDictProxy[str]" -_TCHAR = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" +_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" # '-' at the end to prevent interpretation as range in a char class -_TOKEN = fr"[{_TCHAR}]+" +_TOKEN: Final[str] = fr"[{_TCHAR}]+" -_QDTEXT = r"[{}]".format( +_QDTEXT: Final[str] = r"[{}]".format( r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) ) # qdtext includes 0x5C to escape 0x5D ('\]') # qdtext excludes obs-text (because obsoleted, and encoding not specified) -_QUOTED_PAIR = r"\\[\t !-~]" +_QUOTED_PAIR: Final[str] = r"\\[\t !-~]" -_QUOTED_STRING = r'"(?:{quoted_pair}|{qdtext})*"'.format( +_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR ) -_FORWARDED_PAIR = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( +_FORWARDED_PAIR: Final[ + str +] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( token=_TOKEN, quoted_string=_QUOTED_STRING ) -_QUOTED_PAIR_REPLACE_RE = re.compile(r"\\([\t !-~])") +_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") # same pattern as _QUOTED_PAIR but contains a capture group -_FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR) +_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR) ############################################################ # HTTP Request @@ -214,12 +221,14 @@ def clone( if method is not sentinel: dct["method"] = method if rel_url is not sentinel: - new_url: URL = URL(rel_url) # type: ignore + new_url: URL = URL(rel_url) # type: ignore[arg-type] dct["url"] = new_url dct["path"] = str(new_url) if headers is not sentinel: # a copy semantic - new_headers = CIMultiDictProxy(CIMultiDict(headers)) # type: ignore + new_headers = CIMultiDictProxy( + CIMultiDict(headers) # type: ignore[arg-type] + ) dct["headers"] = new_headers dct["raw_headers"] = tuple( (k.encode("utf-8"), v.encode("utf-8")) for k, v in new_headers.items() @@ -229,11 +238,11 @@ def clone( kwargs: Dict[str, str] = {} if scheme is not sentinel: - kwargs["scheme"] = scheme # type: ignore + kwargs["scheme"] = scheme # type: ignore[assignment] if host is not sentinel: - kwargs["host"] = host # type: ignore + kwargs["host"] = host # type: ignore[assignment] if remote is not sentinel: - kwargs["remote"] = remote # type: ignore + kwargs["remote"] = remote # type: ignore[assignment] return self.__class__( message, @@ -399,8 +408,7 @@ def host(self) -> str: host = self._message.headers.get(hdrs.HOST) if host is not None: return host - else: - return socket.getfqdn() + return socket.getfqdn() @reify def remote(self) -> Optional[str]: @@ -411,10 +419,11 @@ def remote(self) -> Optional[str]: - overridden value by .clone(remote=new_remote) call. - peername of opened socket """ + if self._transport_peername is None: + return None if isinstance(self._transport_peername, (list, tuple)): - return self._transport_peername[0] - else: - return self._transport_peername + return str(self._transport_peername[0]) + return str(self._transport_peername) @reify def url(self) -> URL: @@ -447,9 +456,9 @@ def raw_path(self) -> str: return self._message.path @reify - def query(self) -> "MultiDictProxy[str]": + def query(self) -> MultiDictProxy[str]: """A multidict with all the variables in the query string.""" - return self._rel_url.query + return MultiDictProxy(self._rel_url.query) @reify def query_string(self) -> str: @@ -494,6 +503,52 @@ def if_unmodified_since(self) -> Optional[datetime.datetime]: """ return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) + @staticmethod + def _etag_values(etag_header: str) -> Iterator[ETag]: + """Extract `ETag` objects from raw header.""" + if etag_header == ETAG_ANY: + yield ETag( + is_weak=False, + value=ETAG_ANY, + ) + else: + for match in LIST_QUOTED_ETAG_RE.finditer(etag_header): + is_weak, value, garbage = match.group(2, 3, 4) + # Any symbol captured by 4th group means + # that the following sequence is invalid. + if garbage: + break + + yield ETag( + is_weak=bool(is_weak), + value=value, + ) + + @classmethod + def _if_match_or_none_impl( + cls, header_value: Optional[str] + ) -> Optional[Tuple[ETag, ...]]: + if not header_value: + return None + + return tuple(cls._etag_values(header_value)) + + @reify + def if_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) + + @reify + def if_none_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-None-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) + @reify def if_range(self) -> Optional[datetime.datetime]: """The value of If-Range HTTP header, or None. @@ -673,6 +728,7 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": tmp.write(chunk) size += len(chunk) if 0 < max_size < size: + tmp.close() raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) @@ -763,6 +819,19 @@ def _finish(self) -> None: for fut in self._disconnection_waiters: fut.cancel() + if self._post is None or self.content_type != "multipart/form-data": + return + + # NOTE: Release file descriptors for the + # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom` + # NOTE: instances of files sent within multipart request body + # NOTE: via HTTP POST request. + for file_name, file_field_object in self._post.items(): + if not isinstance(file_field_object, FileField): + continue + + file_field_object.file.close() + async def wait_for_disconnection(self) -> None: loop = asyncio.get_event_loop() fut = loop.create_future() # type: asyncio.Future[None] diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index ac3ee266243..634e38b0725 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -28,12 +28,16 @@ from . import hdrs, payload from .abc import AbstractStreamWriter from .helpers import ( + ETAG_ANY, PY_38, + QUOTED_ETAG_RE, CookieMixin, + ETag, HeadersMixin, populate_with_cookies, rfc822_formatted_time, sentinel, + validate_etag_value, ) from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11 from .payload import Payload @@ -53,7 +57,7 @@ if not PY_38: # allow samesite to be used in python < 3.8 # already permitted in python 3.8, see https://bugs.python.org/issue29613 - Morsel._reserved["samesite"] = "SameSite" # type: ignore + Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined] class ContentCoding(enum.Enum): @@ -124,8 +128,11 @@ def prepared(self) -> bool: return self._payload_writer is not None @property - def task(self) -> "asyncio.Task[None]": - return getattr(self._req, "task", None) + def task(self) -> "Optional[asyncio.Task[None]]": + if self._req: + return self._req.task + else: + return None @property def status(self) -> int: @@ -268,6 +275,43 @@ def last_modified( elif isinstance(value, str): self._headers[hdrs.LAST_MODIFIED] = value + @property + def etag(self) -> Optional[ETag]: + quoted_value = self._headers.get(hdrs.ETAG) + if not quoted_value: + return None + elif quoted_value == ETAG_ANY: + return ETag(value=ETAG_ANY) + match = QUOTED_ETAG_RE.fullmatch(quoted_value) + if not match: + return None + is_weak, value = match.group(1, 2) + return ETag( + is_weak=bool(is_weak), + value=value, + ) + + @etag.setter + def etag(self, value: Optional[Union[ETag, str]]) -> None: + if value is None: + self._headers.pop(hdrs.ETAG, None) + elif (isinstance(value, str) and value == ETAG_ANY) or ( + isinstance(value, ETag) and value.value == ETAG_ANY + ): + self._headers[hdrs.ETAG] = ETAG_ANY + elif isinstance(value, str): + validate_etag_value(value) + self._headers[hdrs.ETAG] = f'"{value}"' + elif isinstance(value, ETag) and isinstance(value.value, str): + validate_etag_value(value.value) + hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"' + self._headers[hdrs.ETAG] = hdr_value + else: + raise ValueError( + f"Unsupported etag type: {type(value)}. " + f"etag must be str, ETag or None" + ) + def _generate_content_type_header( self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE ) -> None: @@ -360,7 +404,7 @@ async def _prepare_headers(self) -> None: elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204): del headers[hdrs.CONTENT_LENGTH] - if self.status != 204: + if self.status not in (204, 304): headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") headers.setdefault(hdrs.DATE, rfc822_formatted_time()) headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py index 9e66de5453b..787d9cbdeca 100644 --- a/aiohttp/web_routedef.py +++ b/aiohttp/web_routedef.py @@ -4,7 +4,6 @@ from typing import ( TYPE_CHECKING, Any, - Awaitable, Callable, Dict, Iterator, @@ -18,7 +17,7 @@ from . import hdrs from .abc import AbstractView -from .typedefs import PathLike +from .typedefs import Handler, PathLike if TYPE_CHECKING: # pragma: no cover from .web_request import Request @@ -52,8 +51,7 @@ def register(self, router: UrlDispatcher) -> List[AbstractRoute]: pass # pragma: no cover -_SimpleHandler = Callable[[Request], Awaitable[StreamResponse]] -_HandlerType = Union[Type[AbstractView], _SimpleHandler] +_HandlerType = Union[Type[AbstractView], Handler] @dataclasses.dataclass(frozen=True, repr=False) @@ -170,7 +168,7 @@ def __getitem__(self, index: int) -> AbstractRouteDef: def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... - def __getitem__(self, index): # type: ignore + def __getitem__(self, index): # type: ignore[no-untyped-def] return self._items[index] def __iter__(self) -> Iterator[AbstractRouteDef]: diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 4bcb9a2d469..c5294ffe295 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -18,7 +18,7 @@ try: from ssl import SSLContext except ImportError: - SSLContext = object # type: ignore + SSLContext = object # type: ignore[misc,assignment] __all__ = ( @@ -177,7 +177,9 @@ def __init__( self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0 ) -> None: loop = asyncio.get_event_loop() - if not isinstance(loop, asyncio.ProactorEventLoop): # type: ignore + if not isinstance( + loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): raise RuntimeError( "Named Pipes only available in proactor" "loop under windows" ) @@ -193,7 +195,9 @@ async def start(self) -> None: loop = asyncio.get_event_loop() server = self._runner.server assert server is not None - _server = await loop.start_serving_pipe(server, self._path) # type: ignore + _server = await loop.start_serving_pipe( # type: ignore[attr-defined] + server, self._path + ) self._server = _server[0] @@ -396,7 +400,7 @@ async def _make_server(self) -> Server: self._app.freeze() return Server( - self._app._handle, # type: ignore + self._app._handle, # type: ignore[arg-type] request_factory=self._make_request, **self._kwargs, ) diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index 8fdee1891cc..1c9fbf34ca8 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -19,7 +19,7 @@ def __init__( *, request_factory: Optional[_RequestFactory] = None, debug: Optional[bool] = None, - **kwargs: Any + **kwargs: Any, ) -> None: if debug is not None: warnings.warn( diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 32d5e4378b5..03f4acadd35 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -29,14 +29,14 @@ cast, ) -from typing_extensions import TypedDict -from yarl import URL, __version__ as yarl_version # type: ignore +from typing_extensions import Final, TypedDict +from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] from . import hdrs from .abc import AbstractMatchInfo, AbstractRouter, AbstractView from .helpers import DEBUG, iscoroutinefunction from .http import HttpVersion11 -from .typedefs import PathLike +from .typedefs import Handler, PathLike from .web_exceptions import ( HTTPException, HTTPExpectationFailed, @@ -70,14 +70,17 @@ else: BaseDict = dict -YARL_VERSION = tuple(map(int, yarl_version.split(".")[:2])) +YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) -HTTP_METHOD_RE = re.compile(r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$") -ROUTE_RE = re.compile(r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})") -PATH_SEP = re.escape("/") +HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( + r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" +) +ROUTE_RE: Final[Pattern[str]] = re.compile( + r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})" +) +PATH_SEP: Final[str] = re.escape("/") -_WebHandler = Callable[[Request], Awaitable[StreamResponse]] _ExpectHandler = Callable[[Request], Awaitable[None]] _Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] @@ -152,7 +155,7 @@ class AbstractRoute(abc.ABC): def __init__( self, method: str, - handler: Union[_WebHandler, Type[AbstractView]], + handler: Union[Handler, Type[AbstractView]], *, expect_handler: Optional[_ExpectHandler] = None, resource: Optional[AbstractResource] = None, @@ -189,7 +192,7 @@ def method(self) -> str: return self._method @property - def handler(self) -> _WebHandler: + def handler(self) -> Handler: return self._handler @property @@ -222,7 +225,7 @@ def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): self._frozen = False @property - def handler(self) -> _WebHandler: + def handler(self) -> Handler: return self._route.handler @property @@ -237,7 +240,7 @@ def expect_handler(self) -> _ExpectHandler: def http_exception(self) -> Optional[HTTPException]: return None - def get_info(self) -> _InfoDict: # type: ignore + def get_info(self) -> _InfoDict: # type: ignore[override] return self._route.get_info() @property @@ -317,7 +320,7 @@ def __init__(self, *, name: Optional[str] = None) -> None: def add_route( self, method: str, - handler: Union[Type[AbstractView], _WebHandler], + handler: Union[Type[AbstractView], Handler], *, expect_handler: Optional[_ExpectHandler] = None, ) -> "ResourceRoute": @@ -402,7 +405,7 @@ def raw_match(self, path: str) -> bool: def get_info(self) -> _InfoDict: return {"path": self._path} - def url_for(self) -> URL: # type: ignore + def url_for(self) -> URL: # type: ignore[override] return URL.build(path=self._path, encoded=True) def __repr__(self) -> str: @@ -550,7 +553,7 @@ def __init__( ), } - def url_for( # type: ignore + def url_for( # type: ignore[override] self, *, filename: Union[str, Path], @@ -602,7 +605,7 @@ def get_info(self) -> _InfoDict: "routes": self._routes, } - def set_options_route(self, handler: _WebHandler) -> None: + def set_options_route(self, handler: Handler) -> None: if "OPTIONS" in self._routes: raise RuntimeError("OPTIONS route was set already") self._routes["OPTIONS"] = ResourceRoute( @@ -859,7 +862,7 @@ class ResourceRoute(AbstractRoute): def __init__( self, method: str, - handler: Union[_WebHandler, Type[AbstractView]], + handler: Union[Handler, Type[AbstractView]], resource: AbstractResource, *, expect_handler: Optional[_ExpectHandler] = None, @@ -923,7 +926,9 @@ class View(AbstractView): async def _iter(self) -> StreamResponse: if self.request.method not in hdrs.METH_ALL: self._raise_allowed_methods() - method = getattr(self, self.request.method.lower(), None) + method: Callable[[], Awaitable[StreamResponse]] = getattr( + self, self.request.method.lower(), None + ) if method is None: self._raise_allowed_methods() resp = await method() @@ -1067,7 +1072,7 @@ def add_route( self, method: str, path: str, - handler: Union[_WebHandler, Type[AbstractView]], + handler: Union[Handler, Type[AbstractView]], *, name: Optional[str] = None, expect_handler: Optional[_ExpectHandler] = None, @@ -1109,15 +1114,13 @@ def add_static( self.register_resource(resource) return resource - def add_head(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: + def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method HEAD """ return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) - def add_options( - self, path: str, handler: _WebHandler, **kwargs: Any - ) -> AbstractRoute: + def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method OPTIONS """ @@ -1126,7 +1129,7 @@ def add_options( def add_get( self, path: str, - handler: _WebHandler, + handler: Handler, *, name: Optional[str] = None, allow_head: bool = True, @@ -1141,29 +1144,25 @@ def add_get( resource.add_route(hdrs.METH_HEAD, handler, **kwargs) return resource.add_route(hdrs.METH_GET, handler, **kwargs) - def add_post(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: + def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method POST """ return self.add_route(hdrs.METH_POST, path, handler, **kwargs) - def add_put(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: + def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method PUT """ return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) - def add_patch( - self, path: str, handler: _WebHandler, **kwargs: Any - ) -> AbstractRoute: + def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method PATCH """ return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) - def add_delete( - self, path: str, handler: _WebHandler, **kwargs: Any - ) -> AbstractRoute: + def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method DELETE """ diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 6b295e773b1..2303faa2c6a 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -4,10 +4,11 @@ import dataclasses import hashlib import json -from typing import Any, Iterable, Optional, Tuple +from typing import Any, Iterable, Optional, Tuple, cast import async_timeout from multidict import CIMultiDict +from typing_extensions import Final from . import hdrs from .abc import AbstractStreamWriter @@ -38,7 +39,7 @@ "WSMsgType", ) -THRESHOLD_CONNLOST_ACCESS = 5 +THRESHOLD_CONNLOST_ACCESS: Final[int] = 5 @dataclasses.dataclass(frozen=True) @@ -105,10 +106,10 @@ def __init__( self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat - self._heartbeat_cb = None + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb = None + self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._compress = compress self._max_msg_size = max_msg_size @@ -125,16 +126,18 @@ def _reset_heartbeat(self) -> None: self._cancel_heartbeat() if self._heartbeat is not None: + assert self._loop is not None self._heartbeat_cb = call_later( self._send_heartbeat, self._heartbeat, self._loop ) def _send_heartbeat(self) -> None: if self._heartbeat is not None and not self._closed: + assert self._loop is not None # fire-and-forget a task is not perfect but maybe ok for # sending ping. Otherwise we need a long-living heartbeat # task in the class. - self._loop.create_task(self._writer.ping()) # type: ignore + self._loop.create_task(self._writer.ping()) # type: ignore[union-attr] if self._pong_response_cb is not None: self._pong_response_cb.cancel() @@ -216,9 +219,9 @@ def _handshake( accept_val = base64.b64encode( hashlib.sha1(key.encode() + WS_KEY).digest() ).decode() - response_headers = CIMultiDict( # type: ignore + response_headers = CIMultiDict( # type: ignore[var-annotated] { - hdrs.UPGRADE: "websocket", # type: ignore + hdrs.UPGRADE: "websocket", # type: ignore[arg-type] hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: accept_val, } @@ -239,7 +242,12 @@ def _handshake( if protocol: response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol - return (response_headers, protocol, compress, notakeover) # type: ignore + return ( + response_headers, + protocol, + compress, + notakeover, + ) # type: ignore[return-value] def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: self._loop = request._loop @@ -338,7 +346,7 @@ async def send_json( ) -> None: await self.send_str(dumps(data), compress=compress) - async def write_eof(self) -> None: # type: ignore + async def write_eof(self) -> None: # type: ignore[override] if self._eof_sent: return if self._payload_writer is None: @@ -471,13 +479,13 @@ async def receive_str(self, *, timeout: Optional[float] = None) -> str: msg.type, msg.data ) ) - return msg.data + return cast(str, msg.data) async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: msg = await self.receive(timeout) if msg.type != WSMsgType.BINARY: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") - return msg.data + return cast(bytes, msg.data) async def receive_json( self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None diff --git a/aiohttp/worker.py b/aiohttp/worker.py index ba4d805d3e7..b164c801509 100644 --- a/aiohttp/worker.py +++ b/aiohttp/worker.py @@ -22,14 +22,14 @@ SSLContext = ssl.SSLContext except ImportError: # pragma: no cover - ssl = None # type: ignore - SSLContext = object # type: ignore + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] __all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker") -class GunicornWebWorker(base.Worker): +class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default @@ -97,7 +97,7 @@ async def _run(self) -> None: # If our parent changed then we shut down. pid = os.getpid() try: - while self.alive: # type: ignore + while self.alive: # type: ignore[has-type] self.notify() cnt = server.requests_count diff --git a/docs/Makefile b/docs/Makefile index 3837ff354b0..22eaead2649 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -2,7 +2,7 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = -W --keep-going -n SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build diff --git a/docs/_static/aiohttp-icon-128x128.png b/docs/_static/aiohttp-icon-128x128.png deleted file mode 100644 index e486a04e36e..00000000000 Binary files a/docs/_static/aiohttp-icon-128x128.png and /dev/null differ diff --git a/docs/_static/css/logo-adjustments.css b/docs/_static/css/logo-adjustments.css new file mode 100644 index 00000000000..b93746f65fb --- /dev/null +++ b/docs/_static/css/logo-adjustments.css @@ -0,0 +1,7 @@ +.sphinxsidebarwrapper>h1.logo { + display: none; +} + +.sphinxsidebarwrapper>p.logo>a>img.logo { + width: 65%; +} diff --git a/docs/abc.rst b/docs/abc.rst index 16e377e7c6c..e35bdd62c3d 100644 --- a/docs/abc.rst +++ b/docs/abc.rst @@ -19,21 +19,21 @@ aiohttp.web is built on top of few concepts: *application*, *router*, *router* is a *plugable* part: a library user may build a *router* from scratch, all other parts should work with new router seamlessly. -:class:`AbstractRouter` has the only mandatory method: -:meth:`AbstractRouter.resolve` coroutine. It must return an -:class:`AbstractMatchInfo` instance. +:class:`aiohttp.abc.AbstractRouter` has the only mandatory method: +:meth:`aiohttp.abc.AbstractRouter.resolve` coroutine. It must return an +:class:`aiohttp.abc.AbstractMatchInfo` instance. If the requested URL handler is found -:meth:`AbstractMatchInfo.handler` is a :term:`web-handler` for -requested URL and :attr:`AbstractMatchInfo.http_exception` is ``None``. +:meth:`aiohttp.abc.AbstractMatchInfo.handler` is a :term:`web-handler` for +requested URL and :attr:`aiohttp.abc.AbstractMatchInfo.http_exception` is ``None``. -Otherwise :attr:`AbstractMatchInfo.http_exception` is an instance of +Otherwise :attr:`aiohttp.abc.AbstractMatchInfo.http_exception` is an instance of :exc:`~aiohttp.web.HTTPException` like *404: NotFound* or *405: Method -Not Allowed*. :meth:`AbstractMatchInfo.handler` raises -:attr:`~AbstractMatchInfo.http_exception` on call. +Not Allowed*. :meth:`aiohttp.abc.AbstractMatchInfo.handler` raises +:attr:`~aiohttp.abc.AbstractMatchInfo.http_exception` on call. -.. class:: aiohttp.abc.AbstractRouter +.. class:: AbstractRouter Abstract router, :class:`aiohttp.web.Application` accepts it as *router* parameter and returns as @@ -49,12 +49,12 @@ Not Allowed*. :meth:`AbstractMatchInfo.handler` raises :attr:`aiohttp.web.Request.match_info` equals to ``None`` at resolving stage. - :return: :class:`AbstractMatchInfo` instance. + :return: :class:`aiohttp.abc.AbstractMatchInfo` instance. -.. class:: aiohttp.abc.AbstractMatchInfo +.. class:: AbstractMatchInfo - Abstract *match info*, returned by :meth:`AbstractRouter.resolve` call. + Abstract *match info*, returned by :meth:`aiohttp.abc.AbstractRouter.resolve` call. .. attribute:: http_exception @@ -100,9 +100,9 @@ attribute. Abstract Cookie Jar ------------------- -.. class:: aiohttp.abc.AbstractCookieJar +.. class:: AbstractCookieJar - The cookie jar instance is available as :attr:`ClientSession.cookie_jar`. + The cookie jar instance is available as :attr:`aiohttp.ClientSession.cookie_jar`. The jar contains :class:`~http.cookies.Morsel` items for storing internal cookie data. @@ -161,12 +161,12 @@ Abstract Cookie Jar .. versionadded:: 3.8 -Abstract Abstract Access Logger +Abstract Access Logger ------------------------------- -.. class:: aiohttp.abc.AbstractAccessLogger +.. class:: AbstractAccessLogger - An abstract class, base for all :class:`RequestHandler` + An abstract class, base for all :class:`aiohttp.web.RequestHandler` ``access_logger`` implementations Method ``log`` should be overridden. diff --git a/docs/built_with.rst b/docs/built_with.rst index 2a221522125..0a4c036adf5 100644 --- a/docs/built_with.rst +++ b/docs/built_with.rst @@ -18,11 +18,13 @@ You can also add a **Built with aiohttp** link somewhere in your project, pointing to ``_. +* `repo-peek `_ CLI tool to open a remote repo locally quickly. * `Molotov `_ Load testing tool. * `Arsenic `_ Async WebDriver. * `Home Assistant `_ Home Automation Platform. * `Backend.AI `_ Code execution API service. * `doh-proxy `_ DNS Over HTTPS Proxy. * `Mariner `_ Command-line torrent searcher. -* `DEEPaaS API `_ REST API for Machine learning, Deep learning and artificial intelligence applications. +* `DEEPaaS API `_ REST API for Machine learning, Deep learning and artificial intelligence applications. * `BentoML `_ Machine Learning model serving framework +* `salted `_ fast link check library (for HTML, Markdown, LaTeX, ...) with CLI diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 80c6bbb4357..7a2f4bef217 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -56,6 +56,17 @@ For *text/plain* :: await session.post(url, data='Привет, Мир!') +.. note:: + + ``Authorization`` header will be removed if you get redirected + to a different host or protocol, except the case when ``HTTP -> HTTPS`` + redirect is performed on the same host. + +.. versionchanged:: 4.0 + + Started keeping the ``Authorization`` header during ``HTTP -> HTTPS`` + redirects when the host remains the same. + Custom Cookies -------------- @@ -295,7 +306,7 @@ nature are installed to perform their job in each signal handle:: All signals take as a parameters first, the :class:`ClientSession` instance used by the specific request related to that signals and -second, a :class:`SimpleNamespace` instance called +second, a :class:`~types.SimpleNamespace` instance called ``trace_config_ctx``. The ``trace_config_ctx`` object can be used to share the state through to the different signals that belong to the same request and to the same :class:`TraceConfig` class, perhaps:: @@ -310,7 +321,7 @@ same request and to the same :class:`TraceConfig` class, perhaps:: The ``trace_config_ctx`` param is by default a -:class:`SimpleNampespace` that is initialized at the beginning of the +:class:`~types.SimpleNamespace` that is initialized at the beginning of the request flow. However, the factory used to create this object can be overwritten using the ``trace_config_ctx_factory`` constructor param of the :class:`TraceConfig` class. @@ -587,7 +598,7 @@ as it results in more compact code:: This approach can be successfully used to define numerous of session given certain requirements. It benefits from having a single location where :class:`aiohttp.ClientSession` -instances are created and where artifacts such as :class:`aiohttp.connector.BaseConnector` +instances are created and where artifacts such as :class:`aiohttp.BaseConnector` can be safely shared between sessions if needed. In the end all you have to do is to close all sessions after `yield` statement:: diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index e030ce92d38..7f7d44caad0 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -99,9 +99,9 @@ following code:: You can see that the URL has been correctly encoded by printing the URL. -For sending data with multiple values for the same key :class:`MultiDict` may be -used; the library support nested lists (``{'key': ['value1', 'value2']}``) -alternative as well. +For sending data with multiple values for the same key +:class:`~multidict.MultiDict` may be used; the library support nested lists +(``{'key': ['value1', 'value2']}``) alternative as well. It is also possible to pass a list of 2 item tuples as parameters, in that case you can specify multiple values for each key:: @@ -318,7 +318,7 @@ You can set the ``filename`` and ``content_type`` explicitly:: await session.post(url, data=data) If you pass a file object as data parameter, aiohttp will stream it to -the server automatically. Check :class:`~aiohttp.streams.StreamReader` +the server automatically. Check :class:`~aiohttp.StreamReader` for supported format information. .. seealso:: :ref:`aiohttp-multipart` diff --git a/docs/client_reference.rst b/docs/client_reference.rst index d7bf05f87e4..ed935a2da1a 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -88,14 +88,14 @@ The client session supports the context manager protocol for self closing. that generation. Note that ``Content-Length`` autogeneration can't be skipped. - Iterable of :class:`str` or :class:`~aiohttp.istr` (optional) + Iterable of :class:`str` or :class:`~multidict.istr` (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) :param version: supported HTTP version, ``HTTP 1.1`` by default. - :param cookie_jar: Cookie Jar, :class:`AbstractCookieJar` instance. + :param cookie_jar: Cookie Jar, :class:`~aiohttp.abc.AbstractCookieJar` instance. By default every session instance has own private cookie jar for automatic cookies processing but user may redefine this behavior @@ -108,7 +108,7 @@ The client session supports the context manager protocol for self closing. :class:`aiohttp.DummyCookieJar` instance can be provided. - :param callable json_serialize: Json *serializer* callable. + :param collections.abc.Callable json_serialize: Json *serializer* callable. By default :func:`json.dumps` function. @@ -210,7 +210,7 @@ The client session supports the context manager protocol for self closing. .. attribute:: cookie_jar - The session cookies, :class:`~aiohttp.AbstractCookieJar` instance. + The session cookies, :class:`~aiohttp.abc.AbstractCookieJar` instance. Gives access to cookie jar's content and modifiers. @@ -261,7 +261,7 @@ The client session supports the context manager protocol for self closing. Set of headers for which autogeneration skipped. - :class:`frozenset` of :class:`str` or :class:`~aiohttp.istr` (optional) + :class:`frozenset` of :class:`str` or :class:`~multidict.istr` (optional) .. versionadded:: 3.7 @@ -293,7 +293,7 @@ The client session supports the context manager protocol for self closing. Should :meth:`ClientResponse.raise_for_status()` be called for each response - Either :class:`bool` or :class:`callable` + Either :class:`bool` or :class:`collections.abc.Callable` .. versionadded:: 3.7 @@ -351,8 +351,8 @@ The client session supports the context manager protocol for self closing. Allowed values are: - :class:`collections.abc.Mapping` e.g. :class:`dict`, - :class:`aiohttp.MultiDict` or - :class:`aiohttp.MultiDictProxy` + :class:`multidict.MultiDict` or + :class:`multidict.MultiDictProxy` - :class:`collections.abc.Iterable` e.g. :class:`tuple` or :class:`list` - :class:`str` with preferably url-encoded content @@ -386,7 +386,7 @@ The client session supports the context manager protocol for self closing. passed. Using ``skip_auto_headers`` parameter allows to skip that generation. - Iterable of :class:`str` or :class:`~aiohttp.istr` + Iterable of :class:`str` or :class:`~multidict.istr` (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP @@ -492,7 +492,7 @@ The client session supports the context manager protocol for self closing. Use ``ssl=ssl_context`` - :param abc.Mapping proxy_headers: HTTP headers to send to the proxy if the + :param collections.abc.Mapping proxy_headers: HTTP headers to send to the proxy if the parameter proxy has been provided. .. versionadded:: 2.3 @@ -1082,14 +1082,14 @@ TCPConnector very rare cases. :param int family: TCP socket family, both IPv4 and IPv6 by default. - For *IPv4* only use :const:`socket.AF_INET`, - for *IPv6* only -- :const:`socket.AF_INET6`. + For *IPv4* only use :data:`socket.AF_INET`, + for *IPv6* only -- :data:`socket.AF_INET6`. *family* is ``0`` by default, that means both IPv4 and IPv6 are accepted. To specify only concrete version please pass - :const:`socket.AF_INET` or - :const:`socket.AF_INET6` explicitly. + :data:`socket.AF_INET` or + :data:`socket.AF_INET6` explicitly. :param ssl.SSLContext ssl_context: SSL context used for processing *HTTPS* requests (optional). @@ -1110,8 +1110,8 @@ TCPConnector .. attribute:: family - *TCP* socket family e.g. :const:`socket.AF_INET` or - :const:`socket.AF_INET6` + *TCP* socket family e.g. :data:`socket.AF_INET` or + :data:`socket.AF_INET6` Read-only property. @@ -1213,7 +1213,7 @@ Response object .. class:: ClientResponse - Client response returned by :meth:`ClientSession.request` and family. + Client response returned by :meth:`aiohttp.ClientSession.request` and family. User never creates the instance of ClientResponse class but gets it from API calls. @@ -1225,11 +1225,11 @@ Response object assert resp.status == 200 After exiting from ``async with`` block response object will be - *released* (see :meth:`release` coroutine). + *released* (see :meth:`release` method). .. attribute:: version - Response's version, :class:`HttpVersion` instance. + Response's version, :class:`~aiohttp.protocol.HttpVersion` instance. .. attribute:: status @@ -1354,7 +1354,7 @@ Response object .. seealso:: :meth:`close`, :meth:`release`. - .. comethod:: release() + .. method:: release() It is not required to call `release` on the response object. When the client fully receives the payload, the @@ -1428,7 +1428,7 @@ Response object responses. Autodetection works pretty fine anyway. - :param callable loads: :func:`callable` used for loading *JSON* + :param collections.abc.Callable loads: :term:`callable` used for loading *JSON* data, :func:`json.loads` by default. :param str content_type: specify response's content-type, if content type @@ -1441,7 +1441,7 @@ Response object .. attribute:: request_info - A namedtuple with request URL and headers from :class:`ClientRequest` + A namedtuple with request URL and headers from :class:`~aiohttp.ClientRequest` object, :class:`aiohttp.RequestInfo` instance. .. method:: get_encoding() @@ -1562,7 +1562,7 @@ manually. single message, ``None`` for not overriding per-socket setting. - :param callable dumps: any :term:`callable` that accepts an object and + :param collections.abc.Callable dumps: any :term:`callable` that accepts an object and returns a JSON string (:func:`json.dumps` by default). @@ -1630,7 +1630,7 @@ manually. A :ref:`coroutine` that calls :meth:`receive_str` and loads the JSON string to a Python dict. - :param callable loads: any :term:`callable` that accepts + :param collections.abc.Callable loads: any :term:`callable` that accepts :class:`str` and returns :class:`dict` with parsed JSON (:func:`json.loads` by default). @@ -1688,12 +1688,29 @@ ClientTimeout .. versionadded:: 3.3 +ETag +^^^^ + +.. class:: ETag(name, is_weak=False) + + Represents `ETag` identifier. + + .. attribute:: value + + Value of corresponding etag without quotes. + + .. attribute:: is_weak + + Flag indicates that etag is weak (has `W/` prefix). + + .. versionadded:: 3.8 + RequestInfo ^^^^^^^^^^^ .. class:: RequestInfo() - A data class with request URL and headers from :class:`ClientRequest` + A data class with request URL and headers from :class:`~aiohttp.ClientRequest` object, available as :attr:`ClientResponse.request_info` attribute. .. attribute:: url @@ -1728,7 +1745,7 @@ BasicAuth Should be used for specifying authorization data in client API, - e.g. *auth* parameter for :meth:`ClientSession.request`. + e.g. *auth* parameter for :meth:`ClientSession.request() `. .. classmethod:: decode(auth_header, encoding='latin1') @@ -1780,7 +1797,7 @@ CookieJar The class implements :class:`collections.abc.Iterable`, :class:`collections.abc.Sized` and - :class:`aiohttp.AbstractCookieJar` interfaces. + :class:`aiohttp.abc.AbstractCookieJar` interfaces. Implements cookie storage adhering to RFC 6265. @@ -1900,7 +1917,7 @@ added with at least one optional argument to :meth:`add_field`_ and `official Nginx documentation `_. @@ -86,8 +86,8 @@ First configure HTTP server itself: } } -This config listens on port ``80`` for server named ``example.com`` -and redirects everything to ``aiohttp`` backend group. +This config listens on port ``80`` for a server named ``example.com`` +and redirects everything to the ``aiohttp`` backend group. Also it serves static files from ``/path/to/app/static`` path as ``example.com/static``. @@ -124,20 +124,20 @@ selection. .. note:: - Nginx is not the only existing *reverse proxy server* but the most + Nginx is not the only existing *reverse proxy server*, but it's the most popular one. Alternatives like HAProxy may be used as well. Supervisord ----------- -After configuring Nginx we need to start our aiohttp backends. Better -to use some tool for starting them automatically after system reboot +After configuring Nginx we need to start our aiohttp backends. It's best +to use some tool for starting them automatically after a system reboot or backend crash. -There are very many ways to do it: Supervisord, Upstart, Systemd, +There are many ways to do it: Supervisord, Upstart, Systemd, Gaffer, Circus, Runit etc. -Here we'll use `Supervisord `_ for example: +Here we'll use `Supervisord `_ as an example: .. code-block:: cfg @@ -159,7 +159,7 @@ Here we'll use `Supervisord `_ for example: aiohttp server -------------- -The last step is preparing aiohttp server for working with supervisord. +The last step is preparing the aiohttp server to work with supervisord. Assuming we have properly configured :class:`aiohttp.web.Application` and port is specified by command line, the task is trivial: @@ -196,17 +196,17 @@ aiohttp can be deployed using `Gunicorn pre-fork worker model. Gunicorn launches your app as worker processes for handling incoming requests. -In opposite to deployment with :ref:`bare Nginx -` the solution does not need to -manually run several aiohttp processes and use tool like supervisord -for monitoring it. But nothing is for free: running aiohttp +As opposed to deployment with :ref:`bare Nginx +`, this solution does not need to +manually run several aiohttp processes and use a tool like supervisord +to monitor them. But nothing is free: running aiohttp application under gunicorn is slightly slower. Prepare environment ------------------- -You firstly need to setup your deployment environment. This example is +You first need to setup your deployment environment. This example is based on `Ubuntu `_ 16.04. Create a directory for your application:: @@ -214,7 +214,7 @@ Create a directory for your application:: >> mkdir myapp >> cd myapp -Create Python virtual environment:: +Create a Python virtual environment:: >> python3 -m venv venv >> source venv/bin/activate diff --git a/docs/index.rst b/docs/index.rst index 78663bde26b..0f627bd170f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -148,7 +148,6 @@ Dependencies - Python 3.7+ - *async_timeout* -- *attrs* - *chardet* - *multidict* - *yarl* diff --git a/docs/multipart.rst b/docs/multipart.rst index 1be94ab07e7..a880a9a055f 100644 --- a/docs/multipart.rst +++ b/docs/multipart.rst @@ -152,9 +152,9 @@ will include the file's basename:: part = root.append(open(__file__, 'rb')) If you want to send a file with a different name, just handle the -:class:`Payload` instance which :meth:`MultipartWriter.append` will +:class:`~aiohttp.payload.Payload` instance which :meth:`MultipartWriter.append` will always return and set `Content-Disposition` explicitly by using -the :meth:`Payload.set_content_disposition` helper:: +the :meth:`Payload.set_content_disposition() ` helper:: part.set_content_disposition('attachment', filename='secret.txt') diff --git a/docs/multipart_reference.rst b/docs/multipart_reference.rst index 032ecc8b7aa..90fda21179d 100644 --- a/docs/multipart_reference.rst +++ b/docs/multipart_reference.rst @@ -7,7 +7,7 @@ Multipart reference .. class:: MultipartResponseWrapper(resp, stream) - Wrapper around the :class:`MultipartBodyReader` to take care about + Wrapper around the :class:`MultipartReader` to take care about underlying connection and close it when it needs in. @@ -135,7 +135,7 @@ Multipart reference Constructs reader instance from HTTP response. - :param response: :class:`~aiohttp.client.ClientResponse` instance + :param response: :class:`~aiohttp.ClientResponse` instance .. method:: at_eof() diff --git a/docs/new_router.rst b/docs/new_router.rst index a88b20838aa..dd0914982f2 100644 --- a/docs/new_router.rst +++ b/docs/new_router.rst @@ -45,7 +45,7 @@ User still may use wildcard for accepting all HTTP methods (maybe we will add something like ``resource.add_wildcard(handler)`` later). Since **names** belongs to **resources** now ``app.router['name']`` -returns a **resource** instance instead of :class:`aiohttp.web.Route`. +returns a **resource** instance instead of :class:`aiohttp.web.AbstractRoute`. **resource** has ``.url()`` method, so ``app.router['name'].url(parts={'a': 'b'}, query={'arg': 'param'})`` @@ -65,8 +65,8 @@ The refactoring is 99% compatible with previous implementation. 99% means all example and the most of current code works without modifications but we have subtle API backward incompatibles. -``app.router['name']`` returns a :class:`aiohttp.web.BaseResource` -instance instead of :class:`aiohttp.web.Route` but resource has the +``app.router['name']`` returns a :class:`aiohttp.web.AbstractResource` +instance instead of :class:`aiohttp.web.AbstractRoute` but resource has the same ``resource.url(...)`` most useful method, so end user should feel no difference. @@ -81,4 +81,4 @@ shortcut for:: return route ``app.router.register_route(...)`` is still supported, it creates -:class:`aiohttp.web.ResourceAdapter` for every call (but it's deprecated now). +``aiohttp.web.ResourceAdapter`` for every call (but it's deprecated now). diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 859ca7b10b4..4a18a93b0b5 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -29,6 +29,7 @@ Dict Discord Django Dup +ETag Facebook HTTPException HttpProcessingError @@ -155,6 +156,7 @@ env environ eof epoll +etag facto fallback fallbacks diff --git a/docs/streams.rst b/docs/streams.rst index 617a1a26f1b..3f9da3494a2 100644 --- a/docs/streams.rst +++ b/docs/streams.rst @@ -7,7 +7,7 @@ Streaming API ``aiohttp`` uses streams for retrieving *BODIES*: -:attr:`aiohttp.web.Request.content` and +:attr:`aiohttp.web.BaseRequest.content` and :attr:`aiohttp.ClientResponse.content` are properties with stream API. @@ -16,7 +16,7 @@ Streaming API The reader from incoming stream. User should never instantiate streams manually but use existing - :attr:`aiohttp.web.Request.content` and + :attr:`aiohttp.web.BaseRequest.content` and :attr:`aiohttp.ClientResponse.content` properties for accessing raw BODY data. diff --git a/docs/testing.rst b/docs/testing.rst index f5895617523..59c1cbe439d 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -57,7 +57,7 @@ requests to this server. :class:`~aiohttp.test_utils.TestServer` runs :class:`aiohttp.web.Application` based server, :class:`~aiohttp.test_utils.RawTestServer` starts -:class:`aiohttp.web.WebServer` low level server. +:class:`aiohttp.web.Server` low level server. For performing HTTP requests to these servers you have to create a test client: :class:`~aiohttp.test_utils.TestClient` instance. @@ -290,7 +290,7 @@ Unittest To test applications with the standard library's unittest or unittest-based functionality, the AioHTTPTestCase is provided:: - from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop + from aiohttp.test_utils import AioHTTPTestCase from aiohttp import web class MyAppTestCase(AioHTTPTestCase): @@ -306,26 +306,11 @@ functionality, the AioHTTPTestCase is provided:: app.router.add_get('/', hello) return app - # the unittest_run_loop decorator can be used in tandem with - # the AioHTTPTestCase to simplify running - # tests that are asynchronous - @unittest_run_loop async def test_example(self): - resp = await self.client.request("GET", "/") - assert resp.status == 200 - text = await resp.text() - assert "Hello, world" in text - - # a vanilla example - def test_example_vanilla(self): - async def test_get_route(): - url = "/" - resp = await self.client.request("GET", url) - assert resp.status == 200 + async with self.client.request("GET", "/") as resp: + self.assertEqual(resp.status, 200) text = await resp.text() - assert "Hello, world" in text - - self.loop.run_until_complete(test_get_route()) + self.assertIn("Hello, world", text) .. class:: AioHTTPTestCase @@ -353,7 +338,7 @@ functionality, the AioHTTPTestCase is provided:: .. attribute:: app - The application returned by :meth:`get_app` + The application returned by :meth:`~aiohttp.test_utils.AioHTTPTestCase.get_application` (:class:`aiohttp.web.Application` instance). .. comethod:: get_client() @@ -408,25 +393,13 @@ functionality, the AioHTTPTestCase is provided:: .. note:: The ``TestClient``'s methods are asynchronous: you have to - execute function on the test client using asynchronous methods. - - A basic test class wraps every test method by - :func:`unittest_run_loop` decorator:: + execute functions on the test client using asynchronous methods.:: class TestA(AioHTTPTestCase): - @unittest_run_loop async def test_f(self): - resp = await self.client.get('/') - - -.. decorator:: unittest_run_loop: - - A decorator dedicated to use with asynchronous methods of an - :class:`AioHTTPTestCase`. - - Handles executing an asynchronous function, using - the :attr:`AioHTTPTestCase.loop` of the :class:`AioHTTPTestCase`. + async with self.client.get('/') as resp: + body = await resp.text() Patching unittest test cases ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -571,7 +544,7 @@ conditions that hard to reproduce on real server:: :type writer: aiohttp.StreamWriter :param transport: asyncio transport instance - :type transport: asyncio.transports.Transport + :type transport: asyncio.Transport :param payload: raw payload reader object :type payload: aiohttp.StreamReader @@ -650,8 +623,8 @@ Test server Runs given :class:`aiohttp.web.Application` instance on random TCP port. After creation the server is not started yet, use -:meth:`~aiohttp.test_utils.TestServer.start_server` for actual server -starting and :meth:`~aiohttp.test_utils.TestServer.close` for +:meth:`~aiohttp.test_utils.BaseTestServer.start_server` for actual server +starting and :meth:`~aiohttp.test_utils.BaseTestServer.close` for stopping/cleanup. Test server usually works in conjunction with @@ -687,7 +660,7 @@ for accessing to the server. .. attribute:: handler - :class:`aiohttp.web.WebServer` used for HTTP requests serving. + :class:`aiohttp.web.Server` used for HTTP requests serving. .. attribute:: server @@ -799,7 +772,7 @@ Test Client .. attribute:: app - An alias for :attr:`self.server.app`. return ``None`` if + An alias for ``self.server.app``. return ``None`` if ``self.server`` is not :class:`TestServer` instance(e.g. :class:`RawTestServer` instance for test low-level server). diff --git a/docs/third_party.rst b/docs/third_party.rst index b4d87b84459..48dd0c6d354 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -4,26 +4,26 @@ Third-Party libraries ===================== -aiohttp is not the library for making HTTP requests and creating WEB -server only. +aiohttp is not just a library for making HTTP requests and creating web +servers. -It is the grand basement for libraries built *on top* of aiohttp. +It is the foundation for libraries built *on top* of aiohttp. This page is a list of these tools. -Please feel free to add your open sourced library if it's not enlisted -yet by making Pull Request to https://github.com/aio-libs/aiohttp/ +Please feel free to add your open source library if it's not listed +yet by making a pull request to https://github.com/aio-libs/aiohttp/ -* Why do you might want to include your awesome library into the list? +* Why would you want to include your awesome library in this list? -* Just because the list increases your library visibility. People +* Because the list increases your library visibility. People will have an easy way to find it. Officially supported -------------------- -This list contains libraries which are supported by *aio-libs* team +This list contains libraries which are supported by the *aio-libs* team and located on https://github.com/aio-libs @@ -31,7 +31,7 @@ aiohttp extensions ^^^^^^^^^^^^^^^^^^ - `aiohttp-session `_ - provides sessions for :mod:`aiohttp.web`. + provides sessions for :mod:`aiohttp.web`. - `aiohttp-debugtoolbar `_ is a library for *debug toolbar* support for :mod:`aiohttp.web`. @@ -65,7 +65,7 @@ Database drivers - `aiopg `_ PostgreSQL async driver. -- `aiomysql `_ MySql async driver. +- `aiomysql `_ MySQL async driver. - `aioredis `_ Redis async driver. @@ -82,32 +82,66 @@ Other tools Approved third-party libraries ------------------------------ -The libraries are not part of ``aio-libs`` but they are proven to be very +These libraries are not part of ``aio-libs`` but they have proven to be very well written and highly recommended for usage. - `uvloop `_ Ultra fast implementation of asyncio event loop on top of ``libuv``. - We are highly recommending to use it instead of standard ``asyncio``. + We highly recommend to use this instead of standard ``asyncio``. Database drivers ^^^^^^^^^^^^^^^^ - `asyncpg `_ Another - PostgreSQL async driver. It's much faster than ``aiopg`` but it is - not drop-in replacement -- the API is different. Anyway please take - a look on it -- the driver is really incredible fast. + PostgreSQL async driver. It's much faster than ``aiopg`` but is + not a drop-in replacement -- the API is different. But, please take + a look at it -- the driver is incredibly fast. +OpenAPI / Swagger extensions +---------------------------- + +Extensions bringing `OpenAPI `_ +support to aiohttp web servers. + +- `aiohttp-apispec `_ + Build and document REST APIs with ``aiohttp`` and ``apispec``. + +- `aiohttp_apiset `_ + Package to build routes using swagger specification. + +- `aiohttp-pydantic `_ + An ``aiohttp.View`` to validate the HTTP request's body, query-string, and + headers regarding function annotations and generate OpenAPI doc. Python 3.8+ + required. + +- `aiohttp-swagger `_ + Swagger API Documentation builder for aiohttp server. + +- `aiohttp-swagger3 `_ + Library for Swagger documentation builder and validating aiohttp requests + using swagger specification 3.0. + +- `aiohttp-swaggerify `_ + Library to automatically generate swagger2.0 definition for aiohttp endpoints. + +- `aio-openapi `_ + Asynchronous web middleware for aiohttp and serving Rest APIs with OpenAPI v3 + specification and with optional PostgreSQL database bindings. + +- `rororo `_ + Implement ``aiohttp.web`` OpenAPI 3 server applications with schema first + approach. Python 3.6+ required. Others ------ -The list of libraries which are exists but not enlisted in former categories. +Here is a list of other known libraries that do not belong in the former categories. -They may be perfect or not -- we don't know. +We cannot vouch for the quality of these libraries, use them at your own risk. Please add your library reference here first and after some time -period ask to raise the status. +ask to raise the status. - `pytest-aiohttp-client `_ Pytest fixture with simpler api, payload decoding and status code assertions. @@ -127,21 +161,9 @@ period ask to raise the status. - `gain `_ Web crawling framework based on asyncio for everyone. -- `aiohttp-swagger `_ - Swagger API Documentation builder for aiohttp server. - -- `aiohttp-swagger3 `_ - Library for Swagger documentation builder and validating aiohttp requests using swagger specification 3.0. - -- `aiohttp-swaggerify `_ - Library to automatically generate swagger2.0 definition for aiohttp endpoints. - - `aiohttp-validate `_ Simple library that helps you validate your API endpoints requests/responses with json schema. -- `aiohttp-pydantic `_ - An ``aiohttp.View`` to validate the HTTP request's body, query-string, and headers regarding function annotations and generate Open API doc. Python 3.8+ required. - - `raven-aiohttp `_ An aiohttp transport for raven-python (Sentry client). @@ -150,9 +172,6 @@ period ask to raise the status. popular web frameworks, including Flask, Django, Bottle, Tornado, Pyramid, webapp2, Falcon, and aiohttp. -- `aioauth-client `_ OAuth - client for aiohttp. - - `aiohttpretty `_ A simple asyncio compatible httpretty mock using aiohttp. @@ -164,9 +183,6 @@ period ask to raise the status. `_ A transmute implementation for aiohttp. -- `aiohttp_apiset `_ - Package to build routes using swagger specification. - - `aiohttp-login `_ Registration and authorization (including social) for aiohttp applications. @@ -245,9 +261,6 @@ period ask to raise the status. - `GINO `_ An asyncio ORM on top of SQLAlchemy core, delivered with an aiohttp extension. -- `aiohttp-apispec `_ - Build and document REST APIs with ``aiohttp`` and ``apispec``. - - `eider-py `_ Python implementation of the `Eider RPC protocol `_. @@ -264,10 +277,6 @@ period ask to raise the status. - `DBGR `_ Terminal based tool to test and debug HTTP APIs with ``aiohttp``. -- `rororo `_ - Implement ``aiohtp.web`` OpenAPI 3 server applications with schema first - approach. Python 3.6+ required. - - `aiohttp-middlewares `_ Collection of useful middlewares for ``aiohttp.web`` applications. Python 3.6+ required. diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 0e86a211905..0476b5f1faf 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -436,7 +436,7 @@ the keyword-only ``middlewares`` parameter:: Internally, a single :ref:`request handler ` is constructed by applying the middleware chain to the original handler in reverse order, -and is called by the :class:`RequestHandler` as a regular *handler*. +and is called by the :class:`~aiohttp.web.RequestHandler` as a regular *handler*. Since *middlewares* are themselves coroutines, they may perform extra ``await`` calls when creating a new handler, e.g. call database etc. @@ -748,7 +748,7 @@ header:: Custom resource implementation ------------------------------ -To register custom resource use :meth:`UrlDispatcher.register_resource`. +To register custom resource use :meth:`~aiohttp.web.UrlDispatcher.register_resource`. Resource instance must implement `AbstractResource` interface. .. _aiohttp-web-app-runners: @@ -849,9 +849,9 @@ sources (e.g. ZeroMQ, Redis Pub/Sub, AMQP, etc.) to react to received messages within the application. For example the background task could listen to ZeroMQ on -:data:`zmq.SUB` socket, process and forward retrieved messages to +``zmq.SUB`` socket, process and forward retrieved messages to clients connected via WebSocket that are stored somewhere in the -application (e.g. in the :obj:`application['websockets']` list). +application (e.g. in the ``application['websockets']`` list). To run such short and long running background tasks aiohttp provides an ability to register :attr:`Application.on_startup` signal handler(s) that @@ -893,7 +893,7 @@ signal handlers as shown in the example below:: web.run_app(app) -The task :func:`listen_to_redis` will run forever. +The task ``listen_to_redis`` will run forever. To shut it down correctly :attr:`Application.on_cleanup` signal handler may be used to send a cancellation to it. diff --git a/docs/web_exceptions.rst b/docs/web_exceptions.rst index 14d522479db..989f1d90f52 100644 --- a/docs/web_exceptions.rst +++ b/docs/web_exceptions.rst @@ -479,13 +479,13 @@ HTTP exceptions for status code in range 400-499, e.g. ``raise web.HTTPNotFound( :exc:`HTTPClientError`. :param link: A link to a resource with information for blocking reason, - :class:`str` or :class:`URL` + :class:`str` or :class:`~yarl.URL` For other parameters see :exc:`HTTPException` constructor. .. attribute:: link - A :class:`URL` link to a resource with information for blocking reason, + A :class:`~yarl.URL` link to a resource with information for blocking reason, read-only property. diff --git a/docs/web_lowlevel.rst b/docs/web_lowlevel.rst index 1e245da36d6..456b8fea4cd 100644 --- a/docs/web_lowlevel.rst +++ b/docs/web_lowlevel.rst @@ -19,7 +19,7 @@ request and returns a response object. This is done by introducing :class:`aiohttp.web.Server` class which serves a *protocol factory* role for -:meth:`asyncio.AbstractEventLoop.create_server` and bridges data +:meth:`asyncio.loop.create_server` and bridges data stream to *web handler* and sends result back. diff --git a/docs/web_quickstart.rst b/docs/web_quickstart.rst index 4f9948ae18f..a9e080776df 100644 --- a/docs/web_quickstart.rst +++ b/docs/web_quickstart.rst @@ -137,7 +137,7 @@ requests on a *path* having **any** *HTTP method*:: app.add_routes([web.route('*', '/path', all_handler)]) The *HTTP method* can be queried later in the request handler using the -:attr:`Request.method` property. +:attr:`aiohttp.web.BaseRequest.method` property. By default endpoints added with ``GET`` method will accept ``HEAD`` requests and return the same response headers as they would @@ -355,7 +355,7 @@ Route tables look like Django way:: web.post('/post', handle_post), -The snippet calls :meth:`~aiohttp.web.UrlDispather.add_routes` to +The snippet calls :meth:`~aiohttp.web.UrlDispatcher.add_routes` to register a list of *route definitions* (:class:`aiohttp.web.RouteDef` instances) created by :func:`aiohttp.web.get` or :func:`aiohttp.web.post` functions. @@ -399,7 +399,7 @@ The container is a list-like object with additional decorators routes. After filling the container -:meth:`~aiohttp.web.UrlDispather.add_routes` is used for adding +:meth:`~aiohttp.web.UrlDispatcher.add_routes` is used for adding registered *route definitions* into application's router. .. seealso:: :ref:`aiohttp-web-route-table-def` reference. @@ -468,17 +468,17 @@ HTTP Forms HTTP Forms are supported out of the box. If form's method is ``"GET"`` (``
``) use -:attr:`Request.query` for getting form data. +:attr:`aiohttp.web.BaseRequest.query` for getting form data. To access form data with ``"POST"`` method use -:meth:`Request.post` or :meth:`Request.multipart`. +:meth:`aiohttp.web.BaseRequest.post` or :meth:`aiohttp.web.BaseRequest.multipart`. -:meth:`Request.post` accepts both +:meth:`aiohttp.web.BaseRequest.post` accepts both ``'application/x-www-form-urlencoded'`` and ``'multipart/form-data'`` form's data encoding (e.g. ````). It stores files data in temporary directory. If `client_max_size` is specified `post` raises `ValueError` exception. -For efficiency use :meth:`Request.multipart`, It is especially effective +For efficiency use :meth:`aiohttp.web.BaseRequest.multipart`, It is especially effective for uploading large files (:ref:`aiohttp-web-file-upload`). Values submitted by the following form: @@ -552,10 +552,10 @@ a container for the file as well as some of its metadata:: You might have noticed a big warning in the example above. The general issue is -that :meth:`Request.post` reads the whole payload in memory, +that :meth:`aiohttp.web.BaseRequest.post` reads the whole payload in memory, resulting in possible :abbr:`OOM (Out Of Memory)` errors. To avoid this, for multipart uploads, you -should use :meth:`Request.multipart` which returns a :ref:`multipart reader +should use :meth:`aiohttp.web.BaseRequest.multipart` which returns a :ref:`multipart reader `:: async def store_mp3_handler(request): diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 78d352f5adc..9b50a1c1412 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -118,7 +118,7 @@ and :ref:`aiohttp-web-signals` handlers. - Overridden value by :meth:`~BaseRequest.clone` call. - *Host* HTTP header - - :func:`socket.gtfqdn` + - :func:`socket.getfqdn` Read-only :class:`str` property. @@ -219,9 +219,9 @@ and :ref:`aiohttp-web-signals` handlers. .. attribute:: cookies - A multidict of all request's cookies. + A read-only dictionary-like object containing the request's cookies. - Read-only :class:`~multidict.MultiDictProxy` lazy property. + Read-only :class:`~types.MappingProxyType` property. .. attribute:: content @@ -318,6 +318,26 @@ and :ref:`aiohttp-web-signals` handlers. .. versionadded:: 3.1 + .. attribute:: if_match + + Read-only property that returns :class:`~aiohttp.ETag` objects specified + in the *If-Match* header. + + Returns :class:`tuple` of :class:`~aiohttp.ETag` or ``None`` if + *If-Match* header is absent. + + .. versionadded:: 3.8 + + .. attribute:: if_none_match + + Read-only property that returns :class:`~aiohttp.ETag` objects specified + *If-None-Match* header. + + Returns :class:`tuple` of :class:`~aiohttp.ETag` or ``None`` if + *If-None-Match* header is absent. + + .. versionadded:: 3.8 + .. attribute:: if_range Read-only property that returns the date specified in the @@ -365,7 +385,7 @@ and :ref:`aiohttp-web-signals` handlers. .. note:: The method **does** store read data internally, subsequent - :meth:`~Request.read` call will return the same value. + :meth:`~aiohttp.web.BaseRequest.read` call will return the same value. .. comethod:: text() @@ -377,16 +397,16 @@ and :ref:`aiohttp-web-signals` handlers. .. note:: The method **does** store read data internally, subsequent - :meth:`~Request.text` call will return the same value. + :meth:`~aiohttp.web.BaseRequest.text` call will return the same value. .. comethod:: json(*, loads=json.loads, \ content_type='application/json') Read request body decoded as *json*. If request's content-type does not - match `content_type` parameter, :class:`web.HTTPBadRequest` get raised. + match `content_type` parameter, :exc:`aiohttp.web.HTTPBadRequest` get raised. To disable content type check pass ``None`` value. - :param callable loads: any :term:`callable` that accepts + :param collections.abc.Callable loads: any :term:`callable` that accepts :class:`str` and returns :class:`dict` with parsed JSON (:func:`json.loads` by default). @@ -396,12 +416,12 @@ and :ref:`aiohttp-web-signals` handlers. .. note:: The method **does** store read data internally, subsequent - :meth:`~Request.json` call will return the same value. + :meth:`~aiohttp.web.BaseRequest.json` call will return the same value. .. comethod:: multipart() - Returns :class:`aiohttp.multipart.MultipartReader` which processes + Returns :class:`aiohttp.MultipartReader` which processes incoming *multipart* request. The method is just a boilerplate :ref:`coroutine ` @@ -440,7 +460,7 @@ and :ref:`aiohttp-web-signals` handlers. .. note:: The method **does** store read data internally, subsequent - :meth:`~Request.post` call will return the same value. + :meth:`~aiohttp.web.BaseRequest.post` call will return the same value. .. comethod:: release() @@ -450,7 +470,7 @@ and :ref:`aiohttp-web-signals` handlers. .. note:: - User code may never call :meth:`~Request.release`, all + User code may never call :meth:`~aiohttp.web.BaseRequest.release`, all required work will be processed by :mod:`aiohttp.web` internal machinery. @@ -611,7 +631,7 @@ StreamResponse .. attribute:: keep_alive - Read-only property, copy of :attr:`Request.keep_alive` by default. + Read-only property, copy of :attr:`aiohttp.web.BaseRequest.keep_alive` by default. Can be switched to ``False`` by :meth:`force_close` call. @@ -725,7 +745,7 @@ StreamResponse :param int version: a decimal integer, identifies to which version of the state management specification the cookie - conforms. (Optional, *version=1* by default) + conforms. (optional) :param str samesite: Asserts that a cookie must not be sent with cross-origin requests, providing some protection @@ -774,6 +794,20 @@ StreamResponse as an :class:`int` or a :class:`float` object, and the value ``None`` to unset the header. + .. attribute:: etag + + *ETag* header for outgoing response. + + This property accepts raw :class:`str` values, :class:`~aiohttp.ETag` + objects and the value ``None`` to unset the header. + + In case of :class:`str` input, etag is considered as strong by default. + + **Do not** use double quotes ``"`` in the etag value, + they will be added automatically. + + .. versionadded:: 3.8 + .. comethod:: prepare(request) :param aiohttp.web.Request request: HTTP request object, that the @@ -877,7 +911,7 @@ Response Setting :attr:`text` also recalculates :attr:`~StreamResponse.content_length` value and - :attr:`~StreamResponse.body` value + :attr:`~aiohttp.StreamResponse.body` value Resetting :attr:`text` (assigning ``None``) sets :attr:`~StreamResponse.content_length` to ``None`` too, dropping @@ -1102,7 +1136,7 @@ WebSocketResponse single message, ``None`` for not overriding per-socket setting. - :param callable dumps: any :term:`callable` that accepts an object and + :param collections.abc.Callable dumps: any :term:`callable` that accepts an object and returns a JSON string (:func:`json.dumps` by default). @@ -1201,7 +1235,7 @@ WebSocketResponse Can only be called by the request handling task. - :param callable loads: any :term:`callable` that accepts + :param collections.abc.Callable loads: any :term:`callable` that accepts :class:`str` and returns :class:`dict` with parsed JSON (:func:`json.loads` by default). @@ -1289,7 +1323,7 @@ properties for later access from a :ref:`handler` via the conn.execute("DELETE * FROM table") Although :class:`Application` is a :obj:`dict`-like object, it can't be -duplicated like one using :meth:`Application.copy`. +duplicated like one using :meth:`~aiohttp.web.Application.copy`. .. class:: Application(*, logger=, middlewares=(), \ handler_args=None, client_max_size=1024**2, \ @@ -1439,7 +1473,7 @@ duplicated like one using :meth:`Application.copy`. :param Application subapp: nested application. - :returns: a :class:`MatchedSubAppResource` instance. + :returns: a :class:`~aiohttp.web.MatchedSubAppResource` instance. .. method:: add_routes(routes_table) @@ -1493,8 +1527,8 @@ duplicated like one using :meth:`Application.copy`. based but traversal ones). For sake of that fact we have very trivial ABC for - :class:`AbstractRouter`: it should have only - :meth:`AbstractRouter.resolve` coroutine. + :class:`~aiohttp.abc.AbstractRouter`: it should have only + :meth:`aiohttp.abc.AbstractRouter.resolve` coroutine. No methods for adding routes or route reversing (getting URL by route name). All those are router implementation details (but, @@ -1506,7 +1540,7 @@ Server ^^^^^^ A protocol factory compatible with -:meth:`~asyncio.AbstreactEventLoop.create_server`. +:meth:`~asyncio.AbstractEventLoop.create_server`. .. class:: Server @@ -1533,11 +1567,11 @@ Router For dispatching URLs to :ref:`handlers` :mod:`aiohttp.web` uses *routers*. -Router is any object that implements :class:`AbstractRouter` interface. +Router is any object that implements :class:`~aiohttp.abc.AbstractRouter` interface. :mod:`aiohttp.web` provides an implementation called :class:`UrlDispatcher`. -:class:`Application` uses :class:`UrlDispatcher` as :meth:`router` by default. +:class:`Application` uses :class:`UrlDispatcher` as :meth:`~aiohttp.web.Application.router` by default. .. class:: UrlDispatcher() @@ -1595,13 +1629,13 @@ Router is any object that implements :class:`AbstractRouter` interface. :param str path: route path. Should be started with slash (``'/'``). - :param callable handler: route handler. + :param collections.abc.Callable handler: route handler. :param str name: optional route name. - :param coroutine expect_handler: optional *expect* header handler. + :param collections.abc.Coroutine expect_handler: optional *expect* header handler. - :returns: new :class:`PlainRoute` or :class:`DynamicRoute` instance. + :returns: new :class:`AbstractRoute` instance. .. method:: add_routes(routes_table) @@ -1700,7 +1734,7 @@ Router is any object that implements :class:`AbstractRouter` interface. :param str name: optional route name. - :param coroutine expect_handler: optional *expect* header handler. + :param collections.abc.Coroutine expect_handler: optional *expect* header handler. :param int chunk_size: size of single chunk for file downloading, 256Kb by default. @@ -1720,23 +1754,23 @@ Router is any object that implements :class:`AbstractRouter` interface. :param bool append_version: flag for adding file version (hash) to the url query string, this value will be used as default when you call to - :meth:`StaticRoute.url` and - :meth:`StaticRoute.url_for` methods. + :meth:`~aiohttp.web.AbstractRoute.url` and + :meth:`~aiohttp.web.AbstractRoute.url_for` methods. - :returns: new :class:`StaticRoute` instance. + :returns: new :class:`~aiohttp.web.AbstractRoute` instance. .. comethod:: resolve(request) A :ref:`coroutine` that returns - :class:`AbstractMatchInfo` for *request*. + :class:`~aiohttp.abc.AbstractMatchInfo` for *request*. The method never raises exception, but returns - :class:`AbstractMatchInfo` instance with: + :class:`~aiohttp.abc.AbstractMatchInfo` instance with: - 1. :attr:`~AbstractMatchInfo.http_exception` assigned to + 1. :attr:`~aiohttp.abc.AbstractMatchInfo.http_exception` assigned to :exc:`HTTPException` instance. - 2. :attr:`~AbstractMatchInfo.handler` which raises + 2. :meth:`~aiohttp.abc.AbstractMatchInfo.handler` which raises :exc:`HTTPNotFound` or :exc:`HTTPMethodNotAllowed` on handler's execution if there is no registered route for *request*. @@ -1745,7 +1779,7 @@ Router is any object that implements :class:`AbstractRouter` interface. Used by internal machinery, end user unlikely need to call the method. - .. note:: The method uses :attr:`Request.raw_path` for pattern + .. note:: The method uses :attr:`aiohttp.web.BaseRequest.raw_path` for pattern matching against registered routes. .. method:: resources() @@ -1777,7 +1811,7 @@ Router is any object that implements :class:`AbstractRouter` interface. *all* named **resources**. The view maps every named resource's **name** to the - :class:`BaseResource` instance. It supports the usual + :class:`AbstractResource` instance. It supports the usual :obj:`dict`-like operations, except for any mutable operations (i.e. it's **read-only**):: @@ -1811,11 +1845,11 @@ unique *name* and at least one :term:`route`. finished. 4. Otherwise router tries next resource from the *routing table*. 5. If the end of *routing table* is reached and no *resource* / - *route* pair found the *router* returns special :class:`AbstractMatchInfo` - instance with :attr:`AbstractMatchInfo.http_exception` is not ``None`` + *route* pair found the *router* returns special :class:`~aiohttp.abc.AbstractMatchInfo` + instance with :attr:`aiohttp.abc.AbstractMatchInfo.http_exception` is not ``None`` but :exc:`HTTPException` with either *HTTP 404 Not Found* or *HTTP 405 Method Not Allowed* status code. - Registered :attr:`AbstractMatchInfo.handler` raises this exception on call. + Registered :meth:`~aiohttp.abc.AbstractMatchInfo.handler` raises this exception on call. User should never instantiate resource classes but give it by :meth:`UrlDispatcher.add_resource` call. @@ -1911,9 +1945,9 @@ Resource classes hierarchy:: The method should be unique for resource. - :param callable handler: route handler. + :param collections.abc.Callable handler: route handler. - :param coroutine expect_handler: optional *expect* header handler. + :param collections.abc.Coroutine expect_handler: optional *expect* header handler. :returns: new :class:`ResourceRoute` instance. @@ -2298,7 +2332,7 @@ A routes table definition used for describing routes by decorators .. class:: RouteTableDef() A sequence of :class:`RouteDef` instances (implements - :class:`abc.collections.Sequence` protocol). + :class:`collections.abc.Sequence` protocol). In addition to all standard :class:`list` methods the class provides also methods like ``get()`` and ``post()`` for adding new @@ -2382,12 +2416,12 @@ Matching result can be accessible from handler as :attr:`Request.match_info` attribute. In general the result may be any object derived from -:class:`AbstractMatchInfo` (:class:`UrlMappingMatchInfo` for default +:class:`~aiohttp.abc.AbstractMatchInfo` (:class:`UrlMappingMatchInfo` for default :class:`UrlDispatcher` router). .. class:: UrlMappingMatchInfo - Inherited from :class:`dict` and :class:`AbstractMatchInfo`. Dict + Inherited from :class:`dict` and :class:`~aiohttp.abc.AbstractMatchInfo`. Dict items are filled by matching info and is :term:`resource`\-specific. .. attribute:: expect_handler @@ -2400,7 +2434,7 @@ In general the result may be any object derived from .. attribute:: route - :class:`Route` instance for url matching. + :class:`AbstractRoute` instance for url matching. View @@ -2408,7 +2442,7 @@ View .. class:: View(request) - Inherited from :class:`AbstractView`. + Inherited from :class:`~aiohttp.abc.AbstractView`. Base class for class based views. Implementations should derive from :class:`View` and override methods for handling HTTP verbs like @@ -2485,7 +2519,7 @@ application on specific TCP or Unix socket, e.g.:: A :class:`list` of served sockets addresses. - See :meth:`socket.getsockname` for items type. + See :meth:`socket.getsockname() ` for items type. .. versionadded:: 3.3 @@ -2633,7 +2667,7 @@ application on specific TCP or Unix socket, e.g.:: :param int backlog: a number of unaccepted connections that the system will allow before refusing new - connections, see :meth:`socket.listen` for details. + connections, see :meth:`socket.socket.listen` for details. ``128`` by default. @@ -2668,7 +2702,7 @@ application on specific TCP or Unix socket, e.g.:: :param int backlog: a number of unaccepted connections that the system will allow before refusing new - connections, see :meth:`socket.listen` for details. + connections, see :meth:`socket.socket.listen` for details. ``128`` by default. @@ -2692,7 +2726,7 @@ application on specific TCP or Unix socket, e.g.:: :param runner: a runner to serve. - :param sock: :class:`socket.socket` to listen. + :param sock: A :ref:`socket instance ` to listen to. :param float shutdown_timeout: a timeout for closing opened connections on :meth:`BaseSite.stop` @@ -2704,7 +2738,7 @@ application on specific TCP or Unix socket, e.g.:: :param int backlog: a number of unaccepted connections that the system will allow before refusing new - connections, see :meth:`socket.listen` for details. + connections, see :meth:`socket.socket.listen` for details. ``128`` by default. @@ -2713,8 +2747,8 @@ Utilities .. class:: FileField - A :class:`~collections.namedtuple` instance that is returned as - multidict value by :meth:`Request.POST` if field is uploaded file. + A :mod:`dataclass ` instance that is returned as + multidict value by :meth:`aiohttp.web.BaseRequest.post` if field is uploaded file. .. attribute:: name @@ -2781,7 +2815,7 @@ Utilities multiple domain sockets. Listening on Unix domain sockets is not supported by all operating systems. - :param socket sock: a preexisting socket object to accept connections on. + :param socket.socket sock: a preexisting socket object to accept connections on. A sequence of socket objects can be passed. :param int shutdown_timeout: a delay to wait for graceful server diff --git a/docs/websocket_utilities.rst b/docs/websocket_utilities.rst index 29dc8108e07..5b212544415 100644 --- a/docs/websocket_utilities.rst +++ b/docs/websocket_utilities.rst @@ -43,8 +43,8 @@ WebSocket utilities received a message that violates its policy. This is a generic status code that can be returned when there is no other more suitable status code (e.g., - :attr:`~WSCloseCode.unsupported_data` or - :attr:`~WSCloseCode.message_too_big`) or if there is a need to + :attr:`~aiohttp.WSCloseCode.UNSUPPORTED_DATA` or + :attr:`~aiohttp.WSCloseCode.MESSAGE_TOO_BIG`) or if there is a need to hide specific details about the policy. .. attribute:: MESSAGE_TOO_BIG diff --git a/docs/whats_new_1_1.rst b/docs/whats_new_1_1.rst index db71e10e8b1..7b5305efbf5 100644 --- a/docs/whats_new_1_1.rst +++ b/docs/whats_new_1_1.rst @@ -20,8 +20,8 @@ e.g. ``session.get('http://example.com')`` works as well as Internal API has been switched to :class:`yarl.URL`. :class:`aiohttp.CookieJar` accepts :class:`~yarl.URL` instances only. -On server side has added :class:`web.Request.url` and -:class:`web.Request.rel_url` properties for representing relative and +On server side has added :attr:`aiohttp.web.BaseRequest.url` and +:attr:`aiohttp.web.BaseRequest.rel_url` properties for representing relative and absolute request's URL. URL using is the recommended way, already existed properties for @@ -32,7 +32,7 @@ parameter. :class:`str` is still supported and will be supported forever. Reverse URL processing for *router* has been changed. -The main API is :class:`aiohttp.web.Request.url_for(name, **kwargs)` +The main API is ``aiohttp.web.Request.url_for`` which returns a :class:`yarl.URL` instance for named resource. It does not support *query args* but adding *args* is trivial: ``request.url_for('named_resource', param='a').with_query(arg='val')``. diff --git a/examples/background_tasks.py b/examples/background_tasks.py index 5ce9a03d54f..dab7756ab86 100755 --- a/examples/background_tasks.py +++ b/examples/background_tasks.py @@ -45,7 +45,7 @@ async def listen_to_redis(app: web.Application) -> None: async def start_background_tasks(app: web.Application) -> None: - app["redis_listener"] = app.loop.create_task(listen_to_redis(app)) + app["redis_listener"] = asyncio.create_task(listen_to_redis(app)) async def cleanup_background_tasks(app: web.Application) -> None: diff --git a/examples/client_json.py b/examples/client_json.py index e24e0690708..2024697e657 100755 --- a/examples/client_json.py +++ b/examples/client_json.py @@ -4,7 +4,7 @@ import aiohttp -async def fetch(session: aiohttp.ClientSession): +async def fetch(session: aiohttp.ClientSession) -> None: print("Query http://httpbin.org/get") async with session.get("http://httpbin.org/get") as resp: print(resp.status) diff --git a/examples/fake_server.py b/examples/fake_server.py index 9105d990ea4..065d2d779eb 100755 --- a/examples/fake_server.py +++ b/examples/fake_server.py @@ -6,9 +6,10 @@ from typing import Any, Dict, List, Union from aiohttp import ClientSession, TCPConnector, resolver, test_utils, web +from aiohttp.abc import AbstractResolver -class FakeResolver: +class FakeResolver(AbstractResolver): _LOCAL_HOST = {0: "127.0.0.1", socket.AF_INET: "127.0.0.1", socket.AF_INET6: "::1"} def __init__(self, fakes: Dict[str, int]) -> None: @@ -50,7 +51,7 @@ def __init__(self) -> None: web.get("/v2.7/me/friends", self.on_my_friends), ] ) - self.runner = None + self.runner = web.AppRunner(self.app) here = pathlib.Path(__file__) ssl_cert = here.parent / "server.crt" ssl_key = here.parent / "server.key" diff --git a/examples/web_cookies.py b/examples/web_cookies.py index bd3d5961c4b..6836569183f 100755 --- a/examples/web_cookies.py +++ b/examples/web_cookies.py @@ -3,6 +3,7 @@ """ from pprint import pformat +from typing import NoReturn from aiohttp import web @@ -22,13 +23,13 @@ async def root(request: web.Request) -> web.StreamResponse: return resp -async def login(request: web.Request) -> None: +async def login(request: web.Request) -> NoReturn: exc = web.HTTPFound(location="/") exc.set_cookie("AUTH", "secret") raise exc -async def logout(request: web.Request) -> None: +async def logout(request: web.Request) -> NoReturn: exc = web.HTTPFound(location="/") exc.del_cookie("AUTH") raise exc diff --git a/examples/web_rewrite_headers_middleware.py b/examples/web_rewrite_headers_middleware.py index 7fc569bce7a..149dc28285d 100755 --- a/examples/web_rewrite_headers_middleware.py +++ b/examples/web_rewrite_headers_middleware.py @@ -2,18 +2,15 @@ """ Example for rewriting response headers by middleware. """ -from typing import Awaitable, Callable - from aiohttp import web - -_WebHandler = Callable[[web.Request], Awaitable[web.StreamResponse]] +from aiohttp.typedefs import Handler async def handler(request: web.Request) -> web.StreamResponse: return web.Response(text="Everything is fine") -async def middleware(request: web.Request, handler: _WebHandler) -> web.StreamResponse: +async def middleware(request: web.Request, handler: Handler) -> web.StreamResponse: try: response = await handler(request) except web.HTTPException as exc: diff --git a/requirements/base.txt b/requirements/base.txt index 639544c15b4..68e007c65b2 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,14 +1,14 @@ -r multidict.txt # required c-ares will not build on windows and has build problems on Macos Python<3.7 -aiodns==2.0.0; sys_platform=="linux" or sys_platform=="darwin" and python_version>="3.7" +aiodns==3.0.0; sys_platform=="linux" or sys_platform=="darwin" and python_version>="3.7" aiosignal==1.1.2 async-timeout==4.0.0a3 asynctest==0.13.0; python_version<"3.8" Brotli==1.0.9 cchardet==2.1.7 -chardet==3.0.4 +chardet==4.0.0 frozenlist==1.1.1 -gunicorn==20.0.4 +gunicorn==20.1.0 typing_extensions==3.7.4.3 uvloop==0.14.0; platform_system!="Windows" and implementation_name=="cpython" and python_version<"3.9" # MagicStack/uvloop#14 yarl==1.6.3 diff --git a/requirements/cython.in b/requirements/cython.in new file mode 100644 index 00000000000..af82ec193ce --- /dev/null +++ b/requirements/cython.in @@ -0,0 +1,3 @@ +-r multidict.txt +cython==0.29.24 +typing_extensions==3.7.4.3 # required for parsing aiohttp/hdrs.py by tools/gen.py diff --git a/requirements/cython.txt b/requirements/cython.txt index e478589498f..c981d498131 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -1,2 +1,12 @@ --r multidict.txt -cython==0.29.21 +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --allow-unsafe requirements/cython.in +# +cython==0.29.24 + # via -r requirements/cython.in +multidict==5.1.0 + # via -r requirements/multidict.txt +typing_extensions==3.7.4.3 + # via -r requirements/cython.in diff --git a/requirements/dev.in b/requirements/dev.in new file mode 100644 index 00000000000..31b14be9997 --- /dev/null +++ b/requirements/dev.in @@ -0,0 +1,4 @@ +-r lint.txt +-r test.txt +-r doc.txt +cherry_picker==2.0.0; python_version>="3.6" diff --git a/requirements/dev.txt b/requirements/dev.txt index fc7aee6945c..59ca566d7fe 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,4 +1,298 @@ --r lint.txt --r test.txt --r doc.txt -cherry_picker==1.3.2; python_version>="3.6" +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --allow-unsafe requirements/dev.in +# +aiodns==3.0.0 ; sys_platform == "linux" or sys_platform == "darwin" and python_version >= "3.7" + # via -r requirements/base.txt +aiohttp-theme==0.1.6 + # via -r requirements/doc.txt +aiosignal==1.1.2 + # via -r requirements/base.txt +alabaster==0.7.12 + # via sphinx +appdirs==1.4.4 + # via + # -r requirements/lint.txt + # black + # virtualenv +async-timeout==4.0.0a3 + # via -r requirements/base.txt +attrs==20.3.0 + # via + # -r requirements/lint.txt + # flake8-pyi + # pytest +babel==2.9.0 + # via sphinx +black==21.7b0 ; implementation_name == "cpython" + # via -r requirements/lint.txt +blockdiag==2.0.1 + # via sphinxcontrib-blockdiag +brotli==1.0.9 + # via + # -r requirements/base.txt + # -r requirements/test.txt +cchardet==2.1.7 + # via -r requirements/base.txt +certifi==2020.12.5 + # via requests +cffi==1.14.4 + # via + # cryptography + # pycares +cfgv==3.2.0 + # via + # -r requirements/lint.txt + # pre-commit +chardet==4.0.0 + # via + # -r requirements/base.txt + # requests +cherry_picker==2.0.0 ; python_version >= "3.6" + # via -r requirements/dev.in +click==7.1.2 + # via + # -r requirements/lint.txt + # black + # cherry-picker + # click-default-group + # towncrier +click-default-group==1.2.2 + # via towncrier +coverage==5.5 + # via + # -r requirements/test.txt + # pytest-cov +cryptography==3.3.1 + # via + # pyjwt + # trustme +distlib==0.3.1 + # via + # -r requirements/lint.txt + # virtualenv +docutils==0.16 + # via sphinx +filelock==3.0.12 + # via + # -r requirements/lint.txt + # virtualenv +flake8==3.9.2 + # via + # -r requirements/lint.txt + # flake8-pyi +flake8-pyi==20.10.0 + # via -r requirements/lint.txt +freezegun==1.1.0 + # via -r requirements/test.txt +frozenlist==1.1.1 + # via + # -r requirements/base.txt + # aiosignal +funcparserlib==1.0.0a0 + # via + # -r requirements/doc.txt + # blockdiag +gidgethub==5.0.0 + # via cherry-picker +gunicorn==20.1.0 + # via -r requirements/base.txt +identify==1.5.14 + # via + # -r requirements/lint.txt + # pre-commit +idna==2.10 + # via + # requests + # trustme + # yarl +imagesize==1.2.0 + # via sphinx +incremental==17.5.0 + # via towncrier +iniconfig==1.1.1 + # via + # -r requirements/lint.txt + # pytest +isort==5.9.3 + # via -r requirements/lint.txt +jinja2==2.11.3 + # via + # sphinx + # towncrier +markupsafe==1.1.1 + # via jinja2 +mccabe==0.6.1 + # via + # -r requirements/lint.txt + # flake8 +multidict==5.1.0 + # via + # -r requirements/multidict.txt + # yarl +mypy==0.910 ; implementation_name == "cpython" + # via + # -r requirements/lint.txt + # -r requirements/test.txt +mypy-extensions==0.4.3 ; implementation_name == "cpython" + # via + # -r requirements/lint.txt + # -r requirements/test.txt + # black + # mypy +nodeenv==1.5.0 + # via + # -r requirements/lint.txt + # pre-commit +packaging==20.9 + # via + # -r requirements/lint.txt + # pytest + # sphinx +pathspec==0.8.1 + # via + # -r requirements/lint.txt + # black +pillow==8.3.2 + # via blockdiag +pluggy==0.13.1 + # via + # -r requirements/lint.txt + # pytest +pre-commit==2.15.0 + # via -r requirements/lint.txt +py==1.10.0 + # via + # -r requirements/lint.txt + # pytest +pycares==4.0.0 + # via aiodns +pycodestyle==2.7.0 + # via + # -r requirements/lint.txt + # flake8 +pycparser==2.20 + # via cffi +pyflakes==2.3.0 + # via + # -r requirements/lint.txt + # flake8 + # flake8-pyi +pygments==2.10.0 + # via + # -r requirements/doc.txt + # sphinx +pyjwt[crypto]==2.0.0 + # via gidgethub +pyparsing==2.4.7 + # via + # -r requirements/lint.txt + # packaging +pytest==6.2.2 + # via + # -r requirements/lint.txt + # -r requirements/test.txt + # pytest-cov + # pytest-mock +pytest-cov==2.12.1 + # via -r requirements/test.txt +pytest-mock==3.6.1 + # via -r requirements/test.txt +python-dateutil==2.8.1 + # via freezegun +pytz==2020.5 + # via babel +pyyaml==5.4.1 + # via + # -r requirements/lint.txt + # pre-commit +re-assert==1.1.0 + # via -r requirements/test.txt +regex==2020.11.13 + # via + # -r requirements/lint.txt + # black + # re-assert +requests==2.25.1 + # via + # cherry-picker + # sphinx +setuptools-git==1.2 + # via -r requirements/test.txt +six==1.15.0 + # via + # -r requirements/lint.txt + # cryptography + # python-dateutil + # virtualenv +snowballstemmer==2.0.0 + # via sphinx +sphinx==4.2.0 + # via + # -r requirements/doc.txt + # sphinxcontrib-asyncio + # sphinxcontrib-blockdiag +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-asyncio==0.3.0 + # via -r requirements/doc.txt +sphinxcontrib-blockdiag==2.0.0 + # via -r requirements/doc.txt +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +toml==0.10.2 + # via + # -r requirements/lint.txt + # cherry-picker + # mypy + # pre-commit + # pytest + # pytest-cov + # towncrier +tomli==1.2.1 + # via + # -r requirements/lint.txt + # black +towncrier==21.3.0 + # via -r requirements/doc.txt +trustme==0.9.0 ; platform_machine != "i686" + # via -r requirements/test.txt +types-chardet==0.1.3 + # via + # -r requirements/lint.txt + # -r requirements/test.txt +typing-extensions==3.7.4.3 + # via + # -r requirements/base.txt + # -r requirements/lint.txt + # async-timeout + # mypy +uritemplate==3.0.1 + # via gidgethub +urllib3==1.26.5 + # via requests +virtualenv==20.4.2 + # via + # -r requirements/lint.txt + # pre-commit +webcolors==1.11.1 + # via blockdiag +yarl==1.6.3 + # via -r requirements/base.txt + +# The following packages are considered to be unsafe in a requirements file: +setuptools==51.3.1 + # via + # blockdiag + # gunicorn + # sphinx diff --git a/requirements/doc-spelling.in b/requirements/doc-spelling.in new file mode 100644 index 00000000000..59ecd6a18bc --- /dev/null +++ b/requirements/doc-spelling.in @@ -0,0 +1,2 @@ +-r doc.txt +sphinxcontrib-spelling==7.2.1; platform_system!="Windows" # We only use it in Travis CI diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 699f7e3f49e..e371d76e30a 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -1,2 +1,98 @@ --r doc.txt -sphinxcontrib-spelling==7.1.0; platform_system!="Windows" # We only use it in Travis CI +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --allow-unsafe requirements/doc-spelling.in +# +aiohttp-theme==0.1.6 + # via -r requirements/doc.txt +alabaster==0.7.12 + # via sphinx +babel==2.9.0 + # via sphinx +blockdiag==2.0.1 + # via sphinxcontrib-blockdiag +certifi==2020.12.5 + # via requests +chardet==4.0.0 + # via requests +click==7.1.2 + # via + # click-default-group + # towncrier +click-default-group==1.2.2 + # via towncrier +docutils==0.16 + # via sphinx +funcparserlib==1.0.0a0 + # via + # -r requirements/doc.txt + # blockdiag +idna==2.10 + # via requests +imagesize==1.2.0 + # via sphinx +incremental==17.5.0 + # via towncrier +jinja2==2.11.3 + # via + # sphinx + # towncrier +markupsafe==1.1.1 + # via jinja2 +packaging==20.9 + # via sphinx +pillow==8.3.2 + # via blockdiag +pyenchant==3.2.0 + # via sphinxcontrib-spelling +pygments==2.10.0 + # via + # -r requirements/doc.txt + # sphinx +pyparsing==2.4.7 + # via packaging +pytz==2021.1 + # via babel +requests==2.25.1 + # via sphinx +snowballstemmer==2.1.0 + # via sphinx +sphinx==4.2.0 + # via + # -r requirements/doc.txt + # sphinxcontrib-asyncio + # sphinxcontrib-blockdiag + # sphinxcontrib-spelling +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-asyncio==0.3.0 + # via -r requirements/doc.txt +sphinxcontrib-blockdiag==2.0.0 + # via -r requirements/doc.txt +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +sphinxcontrib-spelling==7.2.1 ; platform_system != "Windows" + # via -r requirements/doc-spelling.in +toml==0.10.2 + # via towncrier +towncrier==21.3.0 + # via -r requirements/doc.txt +urllib3==1.26.5 + # via requests +webcolors==1.11.1 + # via blockdiag + +# The following packages are considered to be unsafe in a requirements file: +setuptools==53.0.0 + # via + # blockdiag + # sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index 09d666a9f2c..8d1cd5453d4 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -1,6 +1,8 @@ aiohttp-theme==0.1.6 -pygments==2.7.3 -sphinx==3.3.1 +# Temp fix till updated: https://github.com/blockdiag/blockdiag/pull/148 +funcparserlib==1.0.0a0 +pygments==2.10.0 +sphinx==4.2.0 sphinxcontrib-asyncio==0.3.0 sphinxcontrib-blockdiag==2.0.0 -towncrier==19.2.0 +towncrier==21.3.0 diff --git a/requirements/lint.in b/requirements/lint.in new file mode 100644 index 00000000000..e76aeff67f3 --- /dev/null +++ b/requirements/lint.in @@ -0,0 +1,8 @@ +black==21.7b0; implementation_name=="cpython" +flake8==3.9.2 +flake8-pyi==20.10.0 +isort==5.9.3 +mypy==0.910; implementation_name=="cpython" +pre-commit==2.15.0 +pytest==6.2.2 +types-chardet==0.1.3 diff --git a/requirements/lint.txt b/requirements/lint.txt index bcae22d6763..5a9e0d5e6ad 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -1,6 +1,85 @@ -black==20.8b1; implementation_name=="cpython" -flake8==3.8.4 +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --allow-unsafe requirements/lint.in +# +appdirs==1.4.4 + # via + # black + # virtualenv +attrs==20.3.0 + # via + # flake8-pyi + # pytest +black==21.7b0 ; implementation_name == "cpython" + # via -r requirements/lint.in +cfgv==3.2.0 + # via pre-commit +click==7.1.2 + # via black +distlib==0.3.1 + # via virtualenv +filelock==3.0.12 + # via virtualenv +flake8==3.9.2 + # via + # -r requirements/lint.in + # flake8-pyi flake8-pyi==20.10.0 -isort==5.6.4 -mypy==0.790; implementation_name=="cpython" -pre-commit==2.9.3 + # via -r requirements/lint.in +identify==1.5.14 + # via pre-commit +iniconfig==1.1.1 + # via pytest +isort==5.9.3 + # via -r requirements/lint.in +mccabe==0.6.1 + # via flake8 +mypy==0.910 ; implementation_name == "cpython" + # via -r requirements/lint.in +mypy-extensions==0.4.3 + # via + # black + # mypy +nodeenv==1.5.0 + # via pre-commit +packaging==20.9 + # via pytest +pathspec==0.8.1 + # via black +pluggy==0.13.1 + # via pytest +pre-commit==2.15.0 + # via -r requirements/lint.in +py==1.10.0 + # via pytest +pycodestyle==2.7.0 + # via flake8 +pyflakes==2.3.0 + # via + # flake8 + # flake8-pyi +pyparsing==2.4.7 + # via packaging +pytest==6.2.2 + # via -r requirements/lint.in +pyyaml==5.4.1 + # via pre-commit +regex==2020.11.13 + # via black +six==1.15.0 + # via virtualenv +toml==0.10.2 + # via + # mypy + # pre-commit + # pytest +tomli==1.2.1 + # via black +types-chardet==0.1.3 + # via -r requirements/lint.in +typing-extensions==3.7.4.3 + # via mypy +virtualenv==20.4.2 + # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index 2804688b764..8ba2b11d792 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,14 +1,15 @@ -r base.txt Brotli==1.0.9 -coverage==5.3 -cryptography==3.2.1; platform_machine!="i686" and python_version<"3.9" # no 32-bit wheels; no python 3.9 wheels yet -freezegun==1.0.0 -mypy==0.790; implementation_name=="cpython" +coverage==5.5 +cryptography==3.3.1; platform_machine!="i686" and python_version<"3.9" # no 32-bit wheels; no python 3.9 wheels yet +freezegun==1.1.0 +mypy==0.910; implementation_name=="cpython" mypy-extensions==0.4.3; implementation_name=="cpython" -pytest==6.1.2 -pytest-cov==2.10.1 -pytest-mock==3.3.1 +pytest==6.2.2 +pytest-cov==2.12.1 +pytest-mock==3.6.1 re-assert==1.1.0 setuptools-git==1.2 -trustme==0.6.0; platform_machine!="i686" # no 32-bit wheels +trustme==0.9.0; platform_machine!="i686" # no 32-bit wheels +types-chardet==0.1.3 diff --git a/setup.cfg b/setup.cfg index b1cab9a2b6d..a3a33ad2ba0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -35,77 +35,33 @@ source = aiohttp, tests omit = site-packages [tool:pytest] -addopts = --cov=aiohttp -v -rxXs --durations 10 +addopts = + # show 10 slowest invocations: + --durations=10 + + # a bit of verbosity doesn't hurt: + -v + + # report all the things == -rxXs: + -ra + + # show values of the local vars in errors: + --showlocals + + # `pytest-cov`: + --cov=aiohttp filterwarnings = error ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning + ignore:Exception ignored in. :pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception + ignore:The loop argument is deprecated:DeprecationWarning:asyncio junit_suite_name = aiohttp_test_suite norecursedirs = dist docs build .tox .eggs minversion = 3.8.2 testpaths = tests/ junit_family=xunit2 xfail_strict = true - -[mypy] -follow_imports = silent -strict_optional = True -warn_redundant_casts = True -warn_unused_ignores = True - -# uncomment next lines -# to enable strict mypy mode -# -check_untyped_defs = True -disallow_any_generics = True -disallow_untyped_defs = True - - -[mypy-pytest] -ignore_missing_imports = true - - -[mypy-uvloop] -ignore_missing_imports = true - - -[mypy-tokio] -ignore_missing_imports = true - - -[mypy-aiodns] -ignore_missing_imports = true - - -[mypy-gunicorn.config] -ignore_missing_imports = true - -[mypy-gunicorn.workers] -ignore_missing_imports = true - - -[mypy-brotli] -ignore_missing_imports = true - - -[mypy-chardet] -ignore_missing_imports = true - - -[mypy-cchardet] -ignore_missing_imports = true - - -[mypy-idna_ssl] -ignore_missing_imports = true - - -[mypy-asynctest] -ignore_missing_imports = true - - -[mypy-re_assert] -ignore_missing_imports = true - - -[mypy-trustme] -ignore_missing_imports = true diff --git a/setup.py b/setup.py index a9edd5d0c8d..54b548c7b44 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,7 @@ raise RuntimeError("Unable to determine version.") install_requires = [ - "chardet>=2.0,<4.0", + "chardet>=2.0,<5.0", "multidict>=4.5,<7.0", "async_timeout>=4.0a2,<5.0", 'asynctest==0.13.0; python_version<"3.8"', diff --git a/tests/autobahn/.gitignore b/tests/autobahn/.gitignore new file mode 100644 index 00000000000..08ab34c5253 --- /dev/null +++ b/tests/autobahn/.gitignore @@ -0,0 +1 @@ +/reports diff --git a/tests/autobahn/Dockerfile.aiohttp b/tests/autobahn/Dockerfile.aiohttp new file mode 100644 index 00000000000..2d37683a1ad --- /dev/null +++ b/tests/autobahn/Dockerfile.aiohttp @@ -0,0 +1,7 @@ +FROM python:3.9.5 + +COPY ./ /src + +WORKDIR /src + +RUN pip install . diff --git a/tests/autobahn/client.py b/tests/autobahn/client.py deleted file mode 100644 index d4b109ce13c..00000000000 --- a/tests/autobahn/client.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python3 - -import asyncio - -import aiohttp - - -async def client(loop, url, name): - ws = await aiohttp.ws_connect(url + "/getCaseCount") - num_tests = int((await ws.receive()).data) - print("running %d cases" % num_tests) - await ws.close() - - for i in range(1, num_tests + 1): - print("running test case:", i) - text_url = url + "/runCase?case=%d&agent=%s" % (i, name) - ws = await aiohttp.ws_connect(text_url) - while True: - msg = await ws.receive() - - if msg.type == aiohttp.WSMsgType.TEXT: - await ws.send_str(msg.data) - elif msg.type == aiohttp.WSMsgType.BINARY: - await ws.send_bytes(msg.data) - elif msg.type == aiohttp.WSMsgType.CLOSE: - await ws.close() - break - else: - break - - url = url + "/updateReports?agent=%s" % name - ws = await aiohttp.ws_connect(url) - await ws.close() - - -async def run(loop, url, name): - try: - await client(loop, url, name) - except Exception: - import traceback - - traceback.print_exc() - - -if __name__ == "__main__": - loop = asyncio.get_event_loop() - try: - loop.run_until_complete(run(loop, "http://localhost:9001", "aiohttp")) - except KeyboardInterrupt: - pass - finally: - loop.close() diff --git a/tests/autobahn/client/client.py b/tests/autobahn/client/client.py new file mode 100644 index 00000000000..107c183070e --- /dev/null +++ b/tests/autobahn/client/client.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +import asyncio + +import aiohttp + + +async def client(url: str, name: str) -> None: + async with aiohttp.ClientSession() as session: + async with session.ws_connect(url + "/getCaseCount") as ws: + num_tests = int((await ws.receive()).data) + print("running %d cases" % num_tests) + + for i in range(1, num_tests + 1): + print("running test case:", i) + text_url = url + "/runCase?case=%d&agent=%s" % (i, name) + async with session.ws_connect(text_url) as ws: + async for msg in ws: + if msg.type == aiohttp.WSMsgType.TEXT: + await ws.send_str(msg.data) + elif msg.type == aiohttp.WSMsgType.BINARY: + await ws.send_bytes(msg.data) + else: + break + + url = url + "/updateReports?agent=%s" % name + async with session.ws_connect(url) as ws: + print("finally requesting %s" % url) + + +async def run(url: str, name: str) -> None: + try: + await client(url, name) + except Exception: + import traceback + + traceback.print_exc() + + +if __name__ == "__main__": + asyncio.run(run("http://autobahn:9001", "aiohttp")) diff --git a/tests/autobahn/client/docker-compose.yml b/tests/autobahn/client/docker-compose.yml new file mode 100644 index 00000000000..ac6a8bf3ab7 --- /dev/null +++ b/tests/autobahn/client/docker-compose.yml @@ -0,0 +1,17 @@ +version: "3.9" +services: + autobahn: + image: crossbario/autobahn-testsuite:0.8.2 + volumes: + - type: bind + source: ./fuzzingserver.json + target: /config/fuzzingserver.json + - type: bind + source: ../reports + target: /reports + + aiohttp: + image: aiohttp-autobahn_aiohttp + depends_on: + - autobahn + command: ["python", "tests/autobahn/client/client.py"] diff --git a/tests/autobahn/fuzzingserver.json b/tests/autobahn/client/fuzzingserver.json similarity index 100% rename from tests/autobahn/fuzzingserver.json rename to tests/autobahn/client/fuzzingserver.json diff --git a/tests/autobahn/docker-compose.yml b/tests/autobahn/docker-compose.yml new file mode 100644 index 00000000000..ea6b640810d --- /dev/null +++ b/tests/autobahn/docker-compose.yml @@ -0,0 +1,6 @@ +version: "3.9" +services: + aiohttp: + build: + context: ../.. + dockerfile: tests/autobahn/Dockerfile.aiohttp diff --git a/tests/autobahn/fuzzingclient.json b/tests/autobahn/fuzzingclient.json deleted file mode 100644 index 31c39d21ac5..00000000000 --- a/tests/autobahn/fuzzingclient.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "options": {"failByDrop": false}, - "outdir": "./reports/servers", - - "servers": [{"agent": "AutobahnServer", "url": "ws://localhost:9001", "options": {"version": 18}}], - - "cases": ["*"], - "exclude-cases": ["12.*", "13.*"], - "exclude-agent-cases": {} -} diff --git a/tests/autobahn/run-tests.sh b/tests/autobahn/run-tests.sh new file mode 100755 index 00000000000..d48894d8cb8 --- /dev/null +++ b/tests/autobahn/run-tests.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +rm -rf $PWD/reports +mkdir $PWD/reports + +docker-compose -p aiohttp-autobahn build + +docker-compose -f $PWD/client/docker-compose.yml up --abort-on-container-exit +docker-compose -f $PWD/client/docker-compose.yml down + +docker-compose -f $PWD/server/docker-compose.yml up --abort-on-container-exit +docker-compose -f $PWD/server/docker-compose.yml down diff --git a/tests/autobahn/server/docker-compose.yml b/tests/autobahn/server/docker-compose.yml new file mode 100644 index 00000000000..8f12f2d19cc --- /dev/null +++ b/tests/autobahn/server/docker-compose.yml @@ -0,0 +1,18 @@ +version: "3.9" +services: + autobahn: + image: crossbario/autobahn-testsuite:0.8.2 + depends_on: + - aiohttp + volumes: + - type: bind + source: ./fuzzingclient.json + target: /config/fuzzingclient.json + - type: bind + source: ../reports + target: /reports + command: ["wstest", "--mode", "fuzzingclient", "--spec", "/config/fuzzingclient.json"] + + aiohttp: + image: aiohttp-autobahn_aiohttp + command: ["python", "tests/autobahn/server/server.py"] diff --git a/tests/autobahn/server/fuzzingclient.json b/tests/autobahn/server/fuzzingclient.json new file mode 100644 index 00000000000..e9bef9591dc --- /dev/null +++ b/tests/autobahn/server/fuzzingclient.json @@ -0,0 +1,16 @@ +{ + "options": { "failByDrop": false }, + "outdir": "./reports/servers", + + "servers": [ + { + "agent": "AutobahnServer", + "url": "ws://aiohttp:9001", + "options": { "version": 18 } + } + ], + + "cases": ["*"], + "exclude-cases": ["12.*", "13.*"], + "exclude-agent-cases": {} +} diff --git a/tests/autobahn/server.py b/tests/autobahn/server/server.py similarity index 56% rename from tests/autobahn/server.py rename to tests/autobahn/server/server.py index e5efd49b5b1..d4ca04b1d5f 100644 --- a/tests/autobahn/server.py +++ b/tests/autobahn/server/server.py @@ -1,16 +1,15 @@ #!/usr/bin/env python3 -import asyncio import logging -from aiohttp import web +from aiohttp import WSCloseCode, web -async def wshandler(request): +async def wshandler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse(autoclose=False) is_ws = ws.can_prepare(request) if not is_ws: - return web.HTTPBadRequest() + raise web.HTTPBadRequest() await ws.prepare(request) @@ -30,29 +29,20 @@ async def wshandler(request): return ws -async def main(loop): - app = web.Application() - app.router.add_route("GET", "/", wshandler) - - handler = app._make_handler() - srv = await loop.create_server(handler, "127.0.0.1", 9001) - print("Server started at http://127.0.0.1:9001") - return app, srv, handler - - -async def finish(app, srv, handler): - srv.close() - await handler.shutdown() - await srv.wait_closed() +async def on_shutdown(app: web.Application) -> None: + for ws in set(app["websockets"]): + await ws.close(code=WSCloseCode.GOING_AWAY, message="Server shutdown") if __name__ == "__main__": - loop = asyncio.get_event_loop() logging.basicConfig( level=logging.DEBUG, format="%(asctime)s %(levelname)s %(message)s" ) - app, srv, handler = loop.run_until_complete(main(loop)) + + app = web.Application() + app.router.add_route("GET", "/", wshandler) + app.on_shutdown.append(on_shutdown) try: - loop.run_forever() + web.run_app(app, port=9001) except KeyboardInterrupt: - loop.run_until_complete(finish(app, srv, handler)) + print("Server stopped at http://127.0.0.1:9001") diff --git a/tests/conftest.py b/tests/conftest.py index 8946be9010c..eda5c60b727 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -46,7 +46,7 @@ def tls_certificate_authority() -> Any: @pytest.fixture def tls_certificate(tls_certificate_authority: Any) -> Any: - return tls_certificate_authority.issue_server_cert( + return tls_certificate_authority.issue_cert( "localhost", "127.0.0.1", "::1", diff --git a/tests/sample.txt b/tests/sample.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/tests/sample.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 52d74d98324..79e007537cd 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -8,11 +8,13 @@ import json import pathlib import socket +import ssl from typing import Any from unittest import mock import pytest from multidict import MultiDict +from yarl import URL import aiohttp from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web @@ -2333,25 +2335,85 @@ async def test_creds_in_auth_and_url() -> None: await session.close() -async def test_drop_auth_on_redirect_to_other_host(aiohttp_server: Any) -> None: - async def srv1(request): - assert request.host == "host1.com" +@pytest.fixture +def create_server_for_url_and_handler( + aiohttp_server: Any, tls_certificate_authority: Any +): + def create(url: URL, srv: Any): + app = web.Application() + app.router.add_route("GET", url.path, srv) + + kwargs = {} + if url.scheme == "https": + cert = tls_certificate_authority.issue_cert( + url.host, "localhost", "127.0.0.1" + ) + ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + cert.configure_cert(ssl_ctx) + kwargs["ssl"] = ssl_ctx + return aiohttp_server(app, **kwargs) + + return create + + +@pytest.mark.parametrize( + ["url_from", "url_to", "is_drop_header_expected"], + [ + [ + "http://host1.com/path1", + "http://host2.com/path2", + True, + ], + ["http://host1.com/path1", "https://host1.com/path1", False], + ["https://host1.com/path1", "http://host1.com/path2", True], + ], + ids=( + "entirely different hosts", + "http -> https", + "https -> http", + ), +) +async def test_drop_auth_on_redirect_to_other_host( + create_server_for_url_and_handler: Any, + url_from: str, + url_to: str, + is_drop_header_expected: bool, +) -> None: + url_from, url_to = URL(url_from), URL(url_to) + + async def srv_from(request): + assert request.host == url_from.host assert request.headers["Authorization"] == "Basic dXNlcjpwYXNz" - raise web.HTTPFound("http://host2.com/path2") + raise web.HTTPFound(url_to) - async def srv2(request): - assert request.host == "host2.com" - assert "Authorization" not in request.headers + async def srv_to(request): + assert request.host == url_to.host + if is_drop_header_expected: + assert "Authorization" not in request.headers, "Header wasn't dropped" + else: + assert "Authorization" in request.headers, "Header was dropped" return web.Response() - app = web.Application() - app.router.add_route("GET", "/path1", srv1) - app.router.add_route("GET", "/path2", srv2) + server_from = await create_server_for_url_and_handler(url_from, srv_from) + server_to = await create_server_for_url_and_handler(url_to, srv_to) - server = await aiohttp_server(app) + assert ( + url_from.host != url_to.host or server_from.scheme != server_to.scheme + ), "Invalid test case, host or scheme must differ" + + protocol_port_map = { + "http": 80, + "https": 443, + } + etc_hosts = { + (url_from.host, protocol_port_map[server_from.scheme]): server_from, + (url_to.host, protocol_port_map[server_to.scheme]): server_to, + } class FakeResolver(AbstractResolver): async def resolve(self, host, port=0, family=socket.AF_INET): + server = etc_hosts[(host, port)] + return [ { "hostname": host, @@ -2366,14 +2428,17 @@ async def resolve(self, host, port=0, family=socket.AF_INET): async def close(self): pass - connector = aiohttp.TCPConnector(resolver=FakeResolver()) + connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) + async with aiohttp.ClientSession(connector=connector) as client: resp = await client.get( - "http://host1.com/path1", auth=aiohttp.BasicAuth("user", "pass") + url_from, + auth=aiohttp.BasicAuth("user", "pass"), ) assert resp.status == 200 resp = await client.get( - "http://host1.com/path1", headers={"Authorization": "Basic dXNlcjpwYXNz"} + url_from, + headers={"Authorization": "Basic dXNlcjpwYXNz"}, ) assert resp.status == 200 diff --git a/tests/test_client_request.py b/tests/test_client_request.py index d2aea6a5990..cfe2a45edc7 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -580,6 +580,7 @@ async def test_content_type_auto_header_get(loop: Any, conn: Any) -> None: resp = await req.send(conn) assert "CONTENT-TYPE" not in req.headers resp.close() + await req.close() async def test_content_type_auto_header_form(loop: Any, conn: Any) -> None: @@ -686,6 +687,7 @@ async def test_pass_falsy_data_file(loop: Any, tmp_path: Any) -> None: ) assert req.headers.get("CONTENT-LENGTH", None) is not None await req.close() + testfile.close() # Elasticsearch API requires to send request body with GET-requests diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 9b81e3ea6f2..136d7853424 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -49,6 +49,7 @@ async def test_http_processing_error(session: Any) -> None: await response.start(connection) assert info.value.request_info is request_info + response.close() def test_del(session: Any) -> None: @@ -1243,3 +1244,24 @@ def test_response_links_empty(loop: Any, session: Any) -> None: ) response._headers = CIMultiDict() assert response.links == {} + + +def test_response_not_closed_after_get_ok(mocker) -> None: + response = ClientResponse( + "get", + URL("http://del-cl-resp.org"), + request_info=mock.Mock(), + writer=mock.Mock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=mock.Mock(), + session=mock.Mock(), + ) + response.status = 400 + response.reason = "Bad Request" + response._closed = False + spy = mocker.spy(response, "raise_for_status") + assert not response.ok + assert not response.closed + assert spy.call_count == 0 diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 859f5b38f06..effe011dbb6 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -31,7 +31,7 @@ async def make_conn(): proto = create_mocked_conn() conn._conns["a"] = [(proto, 123)] yield conn - conn.close() + loop.run_until_complete(conn.close()) @pytest.fixture @@ -293,7 +293,7 @@ async def test_connector(create_session: Any, loop: Any, mocker: Any) -> None: await session.close() assert connector.close.called - connector.close() + await connector.close() async def test_create_connector(create_session: Any, loop: Any, mocker: Any) -> None: @@ -327,7 +327,7 @@ async def make_sess(): ) -def test_detach(session: Any) -> None: +def test_detach(loop: Any, session: Any) -> None: conn = session.connector try: assert not conn.closed @@ -336,7 +336,7 @@ def test_detach(session: Any) -> None: assert session.closed assert not conn.closed finally: - conn.close() + loop.run_until_complete(conn.close()) async def test_request_closed_session(session: Any) -> None: @@ -510,6 +510,7 @@ async def handler(request): async def test_session_default_version(loop: Any) -> None: session = aiohttp.ClientSession() assert session.version == aiohttp.HttpVersion11 + await session.close() def test_proxy_str(session: Any, params: Any) -> None: @@ -627,6 +628,8 @@ async def test_request_tracing_exception() -> None: ) assert not on_request_end.called + await session.close() + async def test_request_tracing_interpose_headers( loop: Any, aiohttp_client: Any @@ -669,23 +672,37 @@ async def test_client_session_custom_attr() -> None: session = ClientSession() with pytest.raises(AttributeError): session.custom = None + await session.close() async def test_client_session_timeout_default_args(loop: Any) -> None: session1 = ClientSession() assert session1.timeout == client.DEFAULT_TIMEOUT + await session1.close() async def test_client_session_timeout_argument() -> None: session = ClientSession(timeout=500) assert session.timeout == 500 + await session.close() + + +async def test_client_session_timeout_zero() -> None: + timeout = client.ClientTimeout(total=10, connect=0, sock_connect=0, sock_read=0) + try: + async with ClientSession(timeout=timeout) as session: + await session.get("http://example.com") + except asyncio.TimeoutError: + pytest.fail("0 should disable timeout.") async def test_requote_redirect_url_default() -> None: session = ClientSession() assert session.requote_redirect_url + await session.close() async def test_requote_redirect_url_default_disable() -> None: session = ClientSession(requote_redirect_url=False) assert not session.requote_redirect_url + await session.close() diff --git a/tests/test_connector.py b/tests/test_connector.py index 765324cead8..12ab38cb5be 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -188,7 +188,7 @@ async def test_del_with_scheduled_cleanup(loop: Any) -> None: # obviously doesn't deletion because loop has a strong # reference to connector's instance method, isn't it? del conn - await asyncio.sleep(0.01, loop=loop) + await asyncio.sleep(0.01) gc.collect() assert not conns_impl @@ -646,7 +646,7 @@ def get_extra_info(param): conn._loop.create_connection = create_connection - await conn.connect(req, [], ClientTimeout()) + established_connection = await conn.connect(req, [], ClientTimeout()) assert ips == ips_tried assert os_error @@ -655,6 +655,8 @@ def get_extra_info(param): assert fingerprint_error assert connected + established_connection.close() + async def test_tcp_connector_resolve_host(loop: Any) -> None: conn = aiohttp.TCPConnector(use_dns_cache=True) @@ -1599,6 +1601,8 @@ async def test_connect_with_limit_cancelled(loop: Any) -> None: await asyncio.wait_for(conn.connect(req, None, ClientTimeout()), 0.01) connection.close() + await conn.close() + async def test_connect_with_capacity_release_waiters(loop: Any) -> None: def check_with_exc(err): @@ -2262,3 +2266,5 @@ async def allow_connection_and_add_dummy_waiter() -> None: await_connection_and_check_waiters(), allow_connection_and_add_dummy_waiter(), ) + + await connector.close() diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 859ef2728c8..e9b99e12170 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -6,6 +6,7 @@ import platform from math import isclose, modf from unittest import mock +from urllib.request import getproxies_environment import pytest from multidict import CIMultiDict, MultiDict @@ -515,6 +516,96 @@ def test_proxies_from_env_http_with_auth(mocker) -> None: assert proxy_auth.encoding == "latin1" +# --------------------- get_env_proxy_for_url ------------------------------ + + +@pytest.fixture +def proxy_env_vars(monkeypatch, request): + for schema in getproxies_environment().keys(): + monkeypatch.delenv(f"{schema}_proxy", False) + + for proxy_type, proxy_list in request.param.items(): + monkeypatch.setenv(proxy_type, proxy_list) + + return request.param + + +@pytest.mark.parametrize( + ("proxy_env_vars", "url_input", "expected_err_msg"), + ( + ( + {"no_proxy": "aiohttp.io"}, + "http://aiohttp.io/path", + r"Proxying is disallowed for `'aiohttp.io'`", + ), + ( + {"no_proxy": "aiohttp.io,proxy.com"}, + "http://aiohttp.io/path", + r"Proxying is disallowed for `'aiohttp.io'`", + ), + ( + {"http_proxy": "http://example.com"}, + "https://aiohttp.io/path", + r"No proxies found for `https://aiohttp.io/path` in the env", + ), + ( + {"https_proxy": "https://example.com"}, + "http://aiohttp.io/path", + r"No proxies found for `http://aiohttp.io/path` in the env", + ), + ( + {}, + "https://aiohttp.io/path", + r"No proxies found for `https://aiohttp.io/path` in the env", + ), + ( + {"https_proxy": "https://example.com"}, + "", + r"No proxies found for `` in the env", + ), + ), + indirect=["proxy_env_vars"], + ids=( + "url_matches_the_no_proxy_list", + "url_matches_the_no_proxy_list_multiple", + "url_scheme_does_not_match_http_proxy_list", + "url_scheme_does_not_match_https_proxy_list", + "no_proxies_are_set", + "url_is_empty", + ), +) +@pytest.mark.usefixtures("proxy_env_vars") +def test_get_env_proxy_for_url_negative(url_input, expected_err_msg) -> None: + url = URL(url_input) + with pytest.raises(LookupError, match=expected_err_msg): + helpers.get_env_proxy_for_url(url) + + +@pytest.mark.parametrize( + ("proxy_env_vars", "url_input"), + ( + ({"http_proxy": "http://example.com"}, "http://aiohttp.io/path"), + ({"https_proxy": "http://example.com"}, "https://aiohttp.io/path"), + ( + {"http_proxy": "http://example.com,http://proxy.org"}, + "http://aiohttp.io/path", + ), + ), + indirect=["proxy_env_vars"], + ids=( + "url_scheme_match_http_proxy_list", + "url_scheme_match_https_proxy_list", + "url_scheme_match_http_proxy_list_multiple", + ), +) +def test_get_env_proxy_for_url(proxy_env_vars, url_input) -> None: + url = URL(url_input) + proxy, proxy_auth = helpers.get_env_proxy_for_url(url) + proxy_list = proxy_env_vars[url.scheme + "_proxy"] + assert proxy == URL(proxy_list) + assert proxy_auth is None + + # ------------- set_result / set_exception ---------------------- diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 77c62addf6b..172d7bc30cf 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -530,6 +530,7 @@ def test_http_request_parser_two_slashes(parser: Any) -> None: assert msg.method == "GET" assert msg.path == "//path" + assert msg.url.path == "//path" assert msg.version == (1, 1) assert not msg.should_close assert msg.compression is None diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index ab62ffc31b5..3fb5531ca1d 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -5,6 +5,7 @@ from unittest import mock import pytest +from multidict import CIMultiDict from aiohttp import http from aiohttp.test_utils import make_mocked_coro @@ -272,3 +273,16 @@ async def test_drain_no_transport(protocol: Any, transport: Any, loop: Any) -> N msg._protocol.transport = None await msg.drain() assert not protocol._drain_helper.called + + +async def test_write_headers_prevents_injection( + protocol: Any, transport: Any, loop: Any +) -> None: + msg = http.StreamWriter(protocol, loop) + status_line = "HTTP/1.1 200 OK" + wrong_headers = CIMultiDict({"Set-Cookie: abc=123\r\nContent-Length": "256"}) + with pytest.raises(ValueError): + await msg.write_headers(status_line, wrong_headers) + wrong_headers = CIMultiDict({"Content-Length": "256\r\nSet-Cookie: abc=123"}) + with pytest.raises(ValueError): + await msg.write_headers(status_line, wrong_headers) diff --git a/tests/test_locks.py b/tests/test_locks.py index d2d3e8f141e..60a816ed647 100644 --- a/tests/test_locks.py +++ b/tests/test_locks.py @@ -19,7 +19,7 @@ async def c() -> Union[int, Exception]: return 1 t = loop.create_task(c()) - await asyncio.sleep(0, loop=loop) + await asyncio.sleep(0) e = Exception() ev.set(exc=e) assert (await t) == e @@ -32,7 +32,7 @@ async def c() -> int: return 1 t = loop.create_task(c()) - await asyncio.sleep(0, loop=loop) + await asyncio.sleep(0) ev.set() assert (await t) == 1 @@ -44,7 +44,7 @@ async def c() -> None: t1 = loop.create_task(c()) t2 = loop.create_task(c()) - await asyncio.sleep(0, loop=loop) + await asyncio.sleep(0) ev.cancel() ev.set() diff --git a/tests/test_loop.py b/tests/test_loop.py index 3211b756702..b72771a175a 100644 --- a/tests/test_loop.py +++ b/tests/test_loop.py @@ -6,14 +6,15 @@ import pytest from aiohttp import web -from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop +from aiohttp.helpers import PY_38 +from aiohttp.test_utils import AioHTTPTestCase, loop_context @pytest.mark.skipif( platform.system() == "Windows", reason="the test is not valid for Windows" ) async def test_subprocess_co(loop: Any) -> None: - assert threading.current_thread() is threading.main_thread() + assert PY_38 or threading.current_thread() is threading.main_thread() proc = await asyncio.create_subprocess_shell( "exit 0", stdin=asyncio.subprocess.DEVNULL, @@ -34,7 +35,6 @@ async def get_application(self) -> web.Application: async def on_startup_hook(self, app: Any) -> None: self.on_startup_called = True - @unittest_run_loop async def test_on_startup_hook(self) -> None: self.assertTrue(self.on_startup_called) @@ -44,3 +44,24 @@ def test_default_loop(self) -> None: def test_default_loop(loop: Any) -> None: assert asyncio.get_event_loop() is loop + + +@pytest.mark.xfail(not PY_38, reason="ThreadedChildWatcher is only available in 3.8+") +def test_setup_loop_non_main_thread() -> None: + child_exc = None + + def target() -> None: + try: + with loop_context() as loop: + assert asyncio.get_event_loop() is loop + loop.run_until_complete(test_subprocess_co(loop)) + except Exception as exc: + nonlocal child_exc + child_exc = exc + + # Ensures setup_test_loop can be called by pytest-xdist in non-main thread. + t = threading.Thread(target=target) + t.start() + t.join() + + assert child_exc is None diff --git a/tests/test_payload.py b/tests/test_payload.py index 5f4fc998835..d3ca69861e0 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -34,7 +34,9 @@ class TestProvider: pass with pytest.raises(ValueError): - payload.register_payload(Payload, TestProvider, order=object()) # type: ignore + payload.register_payload( + Payload, TestProvider, order=object() # type: ignore[arg-type] + ) def test_payload_ctor() -> None: @@ -63,7 +65,7 @@ def test_bytes_payload_explicit_content_type() -> None: def test_bytes_payload_bad_type() -> None: with pytest.raises(TypeError): - payload.BytesPayload(object()) # type: ignore + payload.BytesPayload(object()) # type: ignore[arg-type] def test_bytes_payload_memoryview_correct_size() -> None: @@ -115,4 +117,4 @@ async def gen() -> AsyncIterator[bytes]: def test_async_iterable_payload_not_async_iterable() -> None: with pytest.raises(TypeError): - payload.AsyncIterablePayload(object()) # type: ignore + payload.AsyncIterablePayload(object()) # type: ignore[arg-type] diff --git a/tests/test_proxy.py b/tests/test_proxy.py index e4aa81f4c2e..c778f85f531 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -75,6 +75,8 @@ async def make_conn(): ssl=None, ) + conn.close() + @mock.patch("aiohttp.connector.ClientRequest") def test_proxy_headers(self, ClientRequestMock: Any) -> None: req = ClientRequest( @@ -115,6 +117,8 @@ async def make_conn(): ssl=None, ) + conn.close() + def test_proxy_auth(self) -> None: with self.assertRaises(ValueError) as ctx: ClientRequest( diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 81107e849b3..835f2c6714f 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -94,7 +94,7 @@ def test_run_app_http(patched_loop: Any) -> None: cleanup_handler = make_mocked_coro() app.on_cleanup.append(cleanup_handler) - web.run_app(app, print=stopper(patched_loop)) + web.run_app(app, print=stopper(patched_loop), loop=patched_loop) patched_loop.create_server.assert_called_with( mock.ANY, None, 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None @@ -105,7 +105,7 @@ def test_run_app_http(patched_loop: Any) -> None: def test_run_app_close_loop(patched_loop: Any) -> None: app = web.Application() - web.run_app(app, print=stopper(patched_loop)) + web.run_app(app, print=stopper(patched_loop), loop=patched_loop) patched_loop.create_server.assert_called_with( mock.ANY, None, 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None @@ -428,7 +428,7 @@ def test_run_app_mixed_bindings( patched_loop: Any, ) -> None: app = web.Application() - web.run_app(app, print=stopper(patched_loop), **run_app_kwargs) + web.run_app(app, print=stopper(patched_loop), **run_app_kwargs, loop=patched_loop) assert patched_loop.create_unix_server.mock_calls == expected_unix_server_calls assert patched_loop.create_server.mock_calls == expected_server_calls @@ -438,7 +438,9 @@ def test_run_app_https(patched_loop: Any) -> None: app = web.Application() ssl_context = ssl.create_default_context() - web.run_app(app, ssl_context=ssl_context, print=stopper(patched_loop)) + web.run_app( + app, ssl_context=ssl_context, print=stopper(patched_loop), loop=patched_loop + ) patched_loop.create_server.assert_called_with( mock.ANY, @@ -458,7 +460,9 @@ def test_run_app_nondefault_host_port( host = "127.0.0.1" app = web.Application() - web.run_app(app, host=host, port=port, print=stopper(patched_loop)) + web.run_app( + app, host=host, port=port, print=stopper(patched_loop), loop=patched_loop + ) patched_loop.create_server.assert_called_with( mock.ANY, host, port, ssl=None, backlog=128, reuse_address=None, reuse_port=None @@ -469,7 +473,7 @@ def test_run_app_multiple_hosts(patched_loop: Any) -> None: hosts = ("127.0.0.1", "127.0.0.2") app = web.Application() - web.run_app(app, host=hosts, print=stopper(patched_loop)) + web.run_app(app, host=hosts, print=stopper(patched_loop), loop=patched_loop) calls = map( lambda h: mock.call( @@ -488,7 +492,7 @@ def test_run_app_multiple_hosts(patched_loop: Any) -> None: def test_run_app_custom_backlog(patched_loop: Any) -> None: app = web.Application() - web.run_app(app, backlog=10, print=stopper(patched_loop)) + web.run_app(app, backlog=10, print=stopper(patched_loop), loop=patched_loop) patched_loop.create_server.assert_called_with( mock.ANY, None, 8080, ssl=None, backlog=10, reuse_address=None, reuse_port=None @@ -497,7 +501,13 @@ def test_run_app_custom_backlog(patched_loop: Any) -> None: def test_run_app_custom_backlog_unix(patched_loop: Any) -> None: app = web.Application() - web.run_app(app, path="/tmp/tmpsock.sock", backlog=10, print=stopper(patched_loop)) + web.run_app( + app, + path="/tmp/tmpsock.sock", + backlog=10, + print=stopper(patched_loop), + loop=patched_loop, + ) patched_loop.create_unix_server.assert_called_with( mock.ANY, "/tmp/tmpsock.sock", ssl=None, backlog=10 @@ -510,7 +520,7 @@ def test_run_app_http_unix_socket(patched_loop: Any, tmp_path: Any) -> None: sock_path = str(tmp_path / "socket.sock") printer = mock.Mock(wraps=stopper(patched_loop)) - web.run_app(app, path=sock_path, print=printer) + web.run_app(app, path=sock_path, print=printer, loop=patched_loop) patched_loop.create_unix_server.assert_called_with( mock.ANY, sock_path, ssl=None, backlog=128 @@ -525,7 +535,9 @@ def test_run_app_https_unix_socket(patched_loop: Any, tmp_path: Any) -> None: sock_path = str(tmp_path / "socket.sock") ssl_context = ssl.create_default_context() printer = mock.Mock(wraps=stopper(patched_loop)) - web.run_app(app, path=sock_path, ssl_context=ssl_context, print=printer) + web.run_app( + app, path=sock_path, ssl_context=ssl_context, print=printer, loop=patched_loop + ) patched_loop.create_unix_server.assert_called_with( mock.ANY, sock_path, ssl=ssl_context, backlog=128 @@ -539,7 +551,10 @@ def test_run_app_abstract_linux_socket(patched_loop: Any) -> None: sock_path = b"\x00" + uuid4().hex.encode("ascii") app = web.Application() web.run_app( - app, path=sock_path.decode("ascii", "ignore"), print=stopper(patched_loop) + app, + path=sock_path.decode("ascii", "ignore"), + print=stopper(patched_loop), + loop=patched_loop, ) patched_loop.create_unix_server.assert_called_with( @@ -556,7 +571,7 @@ def test_run_app_preexisting_inet_socket(patched_loop: Any, mocker: Any) -> None _, port = sock.getsockname() printer = mock.Mock(wraps=stopper(patched_loop)) - web.run_app(app, sock=sock, print=printer) + web.run_app(app, sock=sock, print=printer, loop=patched_loop) patched_loop.create_server.assert_called_with( mock.ANY, sock=sock, backlog=128, ssl=None @@ -574,7 +589,7 @@ def test_run_app_preexisting_inet6_socket(patched_loop: Any) -> None: port = sock.getsockname()[1] printer = mock.Mock(wraps=stopper(patched_loop)) - web.run_app(app, sock=sock, print=printer) + web.run_app(app, sock=sock, print=printer, loop=patched_loop) patched_loop.create_server.assert_called_with( mock.ANY, sock=sock, backlog=128, ssl=None @@ -593,7 +608,7 @@ def test_run_app_preexisting_unix_socket(patched_loop: Any, mocker: Any) -> None os.unlink(sock_path) printer = mock.Mock(wraps=stopper(patched_loop)) - web.run_app(app, sock=sock, print=printer) + web.run_app(app, sock=sock, print=printer, loop=patched_loop) patched_loop.create_server.assert_called_with( mock.ANY, sock=sock, backlog=128, ssl=None @@ -613,7 +628,7 @@ def test_run_app_multiple_preexisting_sockets(patched_loop: Any) -> None: _, port2 = sock2.getsockname() printer = mock.Mock(wraps=stopper(patched_loop)) - web.run_app(app, sock=(sock1, sock2), print=printer) + web.run_app(app, sock=(sock1, sock2), print=printer, loop=patched_loop) patched_loop.create_server.assert_has_calls( [ @@ -636,27 +651,29 @@ def test_run_app_multiple_preexisting_sockets(patched_loop: Any) -> None: def test_sigint() -> None: skip_if_on_windows() - proc = subprocess.Popen( - [sys.executable, "-u", "-c", _script_test_signal], stdout=subprocess.PIPE - ) - for line in proc.stdout: - if line.startswith(b"======== Running on"): - break - proc.send_signal(signal.SIGINT) - assert proc.wait() == 0 + with subprocess.Popen( + [sys.executable, "-u", "-c", _script_test_signal], + stdout=subprocess.PIPE, + ) as proc: + for line in proc.stdout: + if line.startswith(b"======== Running on"): + break + proc.send_signal(signal.SIGINT) + assert proc.wait() == 0 def test_sigterm() -> None: skip_if_on_windows() - proc = subprocess.Popen( - [sys.executable, "-u", "-c", _script_test_signal], stdout=subprocess.PIPE - ) - for line in proc.stdout: - if line.startswith(b"======== Running on"): - break - proc.terminate() - assert proc.wait() == 0 + with subprocess.Popen( + [sys.executable, "-u", "-c", _script_test_signal], + stdout=subprocess.PIPE, + ) as proc: + for line in proc.stdout: + if line.startswith(b"======== Running on"): + break + proc.terminate() + assert proc.wait() == 0 def test_startup_cleanup_signals_even_on_failure(patched_loop: Any) -> None: @@ -669,7 +686,7 @@ def test_startup_cleanup_signals_even_on_failure(patched_loop: Any) -> None: app.on_cleanup.append(cleanup_handler) with pytest.raises(RuntimeError): - web.run_app(app, print=stopper(patched_loop)) + web.run_app(app, print=stopper(patched_loop), loop=patched_loop) startup_handler.assert_called_once_with(app) cleanup_handler.assert_called_once_with(app) @@ -687,7 +704,7 @@ async def make_app(): app.on_cleanup.append(cleanup_handler) return app - web.run_app(make_app(), print=stopper(patched_loop)) + web.run_app(make_app(), print=stopper(patched_loop), loop=patched_loop) patched_loop.create_server.assert_called_with( mock.ANY, None, 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None @@ -707,7 +724,13 @@ def test_run_app_default_logger(monkeypatch: Any, patched_loop: Any) -> None: mock_logger.configure_mock(**attrs) app = web.Application() - web.run_app(app, debug=True, print=stopper(patched_loop), access_log=mock_logger) + web.run_app( + app, + debug=True, + print=stopper(patched_loop), + access_log=mock_logger, + loop=patched_loop, + ) mock_logger.setLevel.assert_any_call(logging.DEBUG) mock_logger.hasHandlers.assert_called_with() assert isinstance(mock_logger.addHandler.call_args[0][0], logging.StreamHandler) @@ -724,7 +747,13 @@ def test_run_app_default_logger_setup_requires_debug(patched_loop: Any) -> None: mock_logger.configure_mock(**attrs) app = web.Application() - web.run_app(app, debug=False, print=stopper(patched_loop), access_log=mock_logger) + web.run_app( + app, + debug=False, + print=stopper(patched_loop), + access_log=mock_logger, + loop=patched_loop, + ) mock_logger.setLevel.assert_not_called() mock_logger.hasHandlers.assert_not_called() mock_logger.addHandler.assert_not_called() @@ -743,7 +772,13 @@ def test_run_app_default_logger_setup_requires_default_logger( mock_logger.configure_mock(**attrs) app = web.Application() - web.run_app(app, debug=True, print=stopper(patched_loop), access_log=mock_logger) + web.run_app( + app, + debug=True, + print=stopper(patched_loop), + access_log=mock_logger, + loop=patched_loop, + ) mock_logger.setLevel.assert_not_called() mock_logger.hasHandlers.assert_not_called() mock_logger.addHandler.assert_not_called() @@ -760,7 +795,13 @@ def test_run_app_default_logger_setup_only_if_unconfigured(patched_loop: Any) -> mock_logger.configure_mock(**attrs) app = web.Application() - web.run_app(app, debug=True, print=stopper(patched_loop), access_log=mock_logger) + web.run_app( + app, + debug=True, + print=stopper(patched_loop), + access_log=mock_logger, + loop=patched_loop, + ) mock_logger.setLevel.assert_not_called() mock_logger.hasHandlers.assert_called_with() mock_logger.addHandler.assert_not_called() @@ -777,7 +818,7 @@ async def on_startup(app): app.on_startup.append(on_startup) - web.run_app(app, print=stopper(patched_loop)) + web.run_app(app, print=stopper(patched_loop), loop=patched_loop) assert task.cancelled() @@ -795,7 +836,7 @@ async def on_startup(app): app.on_startup.append(on_startup) - web.run_app(app, print=stopper(patched_loop)) + web.run_app(app, print=stopper(patched_loop), loop=patched_loop) assert task.done() @@ -821,7 +862,7 @@ async def on_startup(app): exc_handler = mock.Mock() patched_loop.set_exception_handler(exc_handler) - web.run_app(app, print=stopper(patched_loop)) + web.run_app(app, print=stopper(patched_loop), loop=patched_loop) assert task.done() msg = { @@ -844,7 +885,12 @@ def base_runner_init_spy(self, *args, **kwargs): app = web.Application() monkeypatch.setattr(BaseRunner, "__init__", base_runner_init_spy) - web.run_app(app, keepalive_timeout=new_timeout, print=stopper(patched_loop)) + web.run_app( + app, + keepalive_timeout=new_timeout, + print=stopper(patched_loop), + loop=patched_loop, + ) def test_run_app_context_vars(patched_loop: Any): @@ -875,5 +921,5 @@ async def init(): count += 1 return app - web.run_app(init(), print=stopper(patched_loop)) + web.run_app(init(), print=stopper(patched_loop), loop=patched_loop) assert count == 3 diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 9ef27154846..2a540f104e3 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -16,7 +16,6 @@ TestServer as _TestServer, loop_context, make_mocked_request, - unittest_run_loop, ) _hello_world_str = "Hello, world" @@ -90,7 +89,6 @@ class TestAioHTTPTestCase(AioHTTPTestCase): def get_app(self): return _create_example_app() - @unittest_run_loop async def test_example_with_loop(self) -> None: request = await self.client.request("GET", "/") assert request.status == 200 diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 91b4f69274c..12c40293793 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -7,6 +7,7 @@ from aiohttp import log, web from aiohttp.test_utils import make_mocked_coro +from aiohttp.typedefs import Handler async def test_app_ctor() -> None: @@ -137,7 +138,7 @@ def test_app_run_middlewares() -> None: root.freeze() assert root._run_middlewares is False - async def middleware(request, handler): + async def middleware(request, handler: Handler): return await handler(request) root = web.Application(middlewares=[middleware]) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index bc66565f972..6a3b7a3fee5 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -17,6 +17,7 @@ from aiohttp import FormData, HttpVersion10, HttpVersion11, TraceConfig, multipart, web from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING from aiohttp.test_utils import make_mocked_coro +from aiohttp.typedefs import Handler try: import ssl @@ -328,7 +329,8 @@ async def handler(request): fname = here / "data.unknown_mime_type" - resp = await client.post("/", data=[fname.open("rb")]) + with fname.open("rb") as fd: + resp = await client.post("/", data=[fd]) assert 200 == resp.status @@ -813,12 +815,15 @@ async def handler(request): async def test_response_with_file(aiohttp_client: Any, fname: Any) -> None: + outer_file_descriptor = None with fname.open("rb") as f: data = f.read() async def handler(request): - return web.Response(body=fname.open("rb")) + nonlocal outer_file_descriptor + outer_file_descriptor = fname.open("rb") + return web.Response(body=outer_file_descriptor) app = web.Application() app.router.add_get("/", handler) @@ -837,15 +842,21 @@ async def handler(request): assert resp.headers.get("Content-Length") == str(len(resp_data)) assert resp.headers.get("Content-Disposition") == expected_content_disposition + outer_file_descriptor.close() + async def test_response_with_file_ctype(aiohttp_client: Any, fname: Any) -> None: + outer_file_descriptor = None with fname.open("rb") as f: data = f.read() async def handler(request): + nonlocal outer_file_descriptor + outer_file_descriptor = fname.open("rb") + return web.Response( - body=fname.open("rb"), headers={"content-type": "text/binary"} + body=outer_file_descriptor, headers={"content-type": "text/binary"} ) app = web.Application() @@ -861,14 +872,19 @@ async def handler(request): assert resp.headers.get("Content-Length") == str(len(resp_data)) assert resp.headers.get("Content-Disposition") == expected_content_disposition + outer_file_descriptor.close() + async def test_response_with_payload_disp(aiohttp_client: Any, fname: Any) -> None: + outer_file_descriptor = None with fname.open("rb") as f: data = f.read() async def handler(request): - pl = aiohttp.get_payload(fname.open("rb")) + nonlocal outer_file_descriptor + outer_file_descriptor = fname.open("rb") + pl = aiohttp.get_payload(outer_file_descriptor) pl.set_content_disposition("inline", filename="test.txt") return web.Response(body=pl, headers={"content-type": "text/binary"}) @@ -884,6 +900,8 @@ async def handler(request): assert resp.headers.get("Content-Length") == str(len(resp_data)) assert resp.headers.get("Content-Disposition") == 'inline; filename="test.txt"' + outer_file_descriptor.close() + async def test_response_with_payload_stringio(aiohttp_client: Any, fname: Any) -> None: async def handler(request): @@ -1196,7 +1214,7 @@ async def handler(request): with pytest.warns(DeprecationWarning, match="Middleware decorator is deprecated"): @web.middleware - async def middleware(request, handler): + async def middleware(request, handler: Handler): order.append((1, request.app["name"])) resp = await handler(request) assert 200 == resp.status @@ -1336,7 +1354,7 @@ async def test_subapp_middleware_context( values = [] def show_app_context(appname): - async def middleware(request, handler): + async def middleware(request, handler: Handler): values.append("{}: {}".format(appname, request.app["my_value"])) return await handler(request) @@ -1489,6 +1507,7 @@ async def handler(request): assert ( "Maximum request body size 10 exceeded, " "actual body size 1024" in resp_text ) + data["file"].close() async def test_post_max_client_size_for_file(aiohttp_client: Any) -> None: @@ -1539,11 +1558,12 @@ async def handler(request): f = tmp_path / "foobar.txt" f.write_text("test", encoding="utf8") - data = {"file": f.open("rb")} - resp = await client.post("/", data=data) + with f.open("rb") as fd: + data = {"file": fd} + resp = await client.post("/", data=data) - assert 200 == resp.status - body = await resp.read() + assert 200 == resp.status + body = await resp.read() assert body == b"test" disp = multipart.parse_content_disposition(resp.headers["content-disposition"]) @@ -1618,12 +1638,15 @@ async def handler(request): app = web.Application() app.router.add_route("GET", "/", handler) server = await aiohttp_server(app) - resp = await aiohttp.ClientSession().get(server.make_url("/")) + session = aiohttp.ClientSession() + resp = await session.get(server.make_url("/")) async with resp: assert resp.status == 200 assert resp.connection is None assert resp.connection is None + await session.close() + async def test_response_context_manager_error(aiohttp_server: Any) -> None: async def handler(request): @@ -1644,6 +1667,8 @@ async def handler(request): assert len(session._connector._conns) == 1 + await session.close() + async def aiohttp_client_api_context_manager(aiohttp_server: Any): async def handler(request): diff --git a/tests/test_web_log.py b/tests/test_web_log.py index b834f87310b..fa5fb27f744 100644 --- a/tests/test_web_log.py +++ b/tests/test_web_log.py @@ -10,6 +10,7 @@ import aiohttp from aiohttp import web from aiohttp.abc import AbstractAccessLogger, AbstractAsyncAccessLogger +from aiohttp.typedefs import Handler from aiohttp.web_log import AccessLogger from aiohttp.web_response import Response @@ -232,7 +233,7 @@ async def test_contextvars_logger(aiohttp_server: Any, aiohttp_client: Any): async def handler(request): return web.Response() - async def middleware(request, handler): + async def middleware(request, handler: Handler): VAR.set("uuid") return await handler(request) diff --git a/tests/test_web_middleware.py b/tests/test_web_middleware.py index 842dc9777f3..cfd85b5f95e 100644 --- a/tests/test_web_middleware.py +++ b/tests/test_web_middleware.py @@ -5,13 +5,14 @@ from yarl import URL from aiohttp import web +from aiohttp.typedefs import Handler async def test_middleware_modifies_response(loop: Any, aiohttp_client: Any) -> None: async def handler(request): return web.Response(body=b"OK") - async def middleware(request, handler): + async def middleware(request, handler: Handler): resp = await handler(request) assert 200 == resp.status resp.set_status(201) @@ -32,7 +33,7 @@ async def test_middleware_handles_exception(loop: Any, aiohttp_client: Any) -> N async def handler(request): raise RuntimeError("Error text") - async def middleware(request, handler): + async def middleware(request, handler: Handler): with pytest.raises(RuntimeError) as ctx: await handler(request) return web.Response(status=501, text=str(ctx.value) + "[MIDDLEWARE]") @@ -59,7 +60,7 @@ async def handler2(request): middleware_annotation_seen_values = [] def make_middleware(num): - async def middleware(request, handler): + async def middleware(request, handler: Handler): middleware_annotation_seen_values.append( getattr(handler, "annotation", None) ) @@ -104,7 +105,7 @@ async def handler(request): middleware_annotation_seen_values = [] def make_middleware(num): - async def middleware(request, handler): + async def middleware(request, handler: Handler): annotation = getattr(handler, "annotation", None) if annotation is not None: middleware_annotation_seen_values.append(f"{annotation}/{num}") @@ -361,6 +362,38 @@ async def test_cannot_remove_and_add_slash(self) -> None: with pytest.raises(AssertionError): web.normalize_path_middleware(append_slash=True, remove_slash=True) + @pytest.mark.parametrize( + ["append_slash", "remove_slash"], + [ + (True, False), + (False, True), + (False, False), + ], + ) + async def test_open_redirects( + self, append_slash: bool, remove_slash: bool, aiohttp_client: Any + ) -> None: + async def handle(request: web.Request) -> web.StreamResponse: + pytest.fail( + msg="Security advisory 'GHSA-v6wp-4m6f-gcjg' test handler " + "matched unexpectedly", + pytrace=False, + ) + + app = web.Application( + middlewares=[ + web.normalize_path_middleware( + append_slash=append_slash, remove_slash=remove_slash + ) + ] + ) + app.add_routes([web.get("/", handle), web.get("/google.com", handle)]) + client = await aiohttp_client(app, server_kwargs={"skip_url_asserts": True}) + resp = await client.get("//google.com", allow_redirects=False) + assert resp.status == 308 + assert resp.headers["Location"] == "/google.com" + assert resp.url.query == URL("//google.com").query + async def test_bug_3669(aiohttp_client: Any): async def paymethod(request): @@ -386,7 +419,7 @@ async def view_handler(request): with pytest.warns(DeprecationWarning, match="Middleware decorator is deprecated"): @web.middleware - async def middleware(request, handler): + async def middleware(request, handler: Handler): resp = await handler(request) assert 200 == resp.status resp.set_status(201) @@ -407,24 +440,21 @@ async def handler(request): return web.Response(body=b"OK") class Middleware: - async def __call__(self, request, handler): + async def __call__(self, request, handler: Handler): resp = await handler(request) assert 200 == resp.status resp.set_status(201) resp.text = resp.text + "[new style middleware]" return resp - with pytest.warns(None) as warning_checker: - app = web.Application() - app.middlewares.append(Middleware()) - app.router.add_route("GET", "/", handler) - client = await aiohttp_client(app) - resp = await client.get("/") - assert 201 == resp.status - txt = await resp.text() - assert "OK[new style middleware]" == txt - - assert len(warning_checker) == 0 + app = web.Application() + app.middlewares.append(Middleware()) + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.get("/") + assert 201 == resp.status + txt = await resp.text() + assert "OK[new style middleware]" == txt async def test_new_style_middleware_method(loop: Any, aiohttp_client: Any) -> None: @@ -432,21 +462,18 @@ async def handler(request): return web.Response(body=b"OK") class Middleware: - async def call(self, request, handler): + async def call(self, request, handler: Handler): resp = await handler(request) assert 200 == resp.status resp.set_status(201) resp.text = resp.text + "[new style middleware]" return resp - with pytest.warns(None) as warning_checker: - app = web.Application() - app.middlewares.append(Middleware().call) - app.router.add_route("GET", "/", handler) - client = await aiohttp_client(app) - resp = await client.get("/") - assert 201 == resp.status - txt = await resp.text() - assert "OK[new style middleware]" == txt - - assert len(warning_checker) == 0 + app = web.Application() + app.middlewares.append(Middleware().call) + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.get("/") + assert 201 == resp.status + txt = await resp.text() + assert "OK[new style middleware]" == txt diff --git a/tests/test_web_request.py b/tests/test_web_request.py index 8a9de26907c..eeeba2b44db 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -17,6 +17,7 @@ from aiohttp.streams import StreamReader from aiohttp.test_utils import make_mocked_request from aiohttp.web import HTTPRequestEntityTooLarge, HTTPUnsupportedMediaType +from aiohttp.web_request import ETag @pytest.fixture @@ -648,6 +649,8 @@ async def test_multipart_formdata_file(protocol: Any) -> None: content = result["a_file"].file.read() assert content == b"\ff" + req._finish() + async def test_make_too_big_request_limit_None(protocol: Any) -> None: payload = StreamReader(protocol, 2 ** 16, loop=asyncio.get_event_loop()) @@ -820,3 +823,47 @@ async def invalid_handler_1(request): async with client.get("/1") as resp: assert 500 == resp.status + + +@pytest.mark.parametrize( + ["header", "header_attr"], + [ + pytest.param("If-Match", "if_match"), + pytest.param("If-None-Match", "if_none_match"), + ], +) +@pytest.mark.parametrize( + ["header_val", "expected"], + [ + pytest.param( + '"67ab43", W/"54ed21", "7892,dd"', + ( + ETag(is_weak=False, value="67ab43"), + ETag(is_weak=True, value="54ed21"), + ETag(is_weak=False, value="7892,dd"), + ), + ), + pytest.param( + '"bfc1ef-5b2c2730249c88ca92d82d"', + (ETag(is_weak=False, value="bfc1ef-5b2c2730249c88ca92d82d"),), + ), + pytest.param( + '"valid-tag", "also-valid-tag",somegarbage"last-tag"', + ( + ETag(is_weak=False, value="valid-tag"), + ETag(is_weak=False, value="also-valid-tag"), + ), + ), + pytest.param( + '"ascii", "это точно не ascii", "ascii again"', + (ETag(is_weak=False, value="ascii"),), + ), + pytest.param( + "*", + (ETag(is_weak=False, value="*"),), + ), + ], +) +def test_etag_headers(header, header_attr, header_val, expected) -> None: + req = make_mocked_request("GET", "/", headers={header: header_val}) + assert getattr(req, header_attr) == expected diff --git a/tests/test_web_response.py b/tests/test_web_response.py index a80184029a5..5ff0aabbe6e 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -14,6 +14,8 @@ from re_assert import Matches from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs +from aiohttp.helpers import ETag +from aiohttp.http_writer import _serialize_headers from aiohttp.payload import BytesPayload from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import ContentCoding, Response, StreamResponse, json_response @@ -25,7 +27,7 @@ def make_request( headers: Any = CIMultiDict(), version: Any = HttpVersion11, on_response_prepare: Optional[Any] = None, - **kwargs: Any + **kwargs: Any, ): app = kwargs.pop("app", None) or mock.Mock() app._debug = False @@ -58,12 +60,7 @@ def write(chunk): buf.extend(chunk) async def write_headers(status_line, headers): - headers = ( - status_line - + "\r\n" - + "".join([k + ": " + v + "\r\n" for k, v in headers.items()]) - ) - headers = headers.encode("utf-8") + b"\r\n" + headers = _serialize_headers(status_line, headers) buf.extend(headers) async def write_eof(chunk=b""): @@ -257,6 +254,82 @@ def test_last_modified_reset() -> None: assert resp.last_modified is None +def test_etag_initial() -> None: + resp = StreamResponse() + assert resp.etag is None + + +def test_etag_string() -> None: + resp = StreamResponse() + value = "0123-kotik" + resp.etag = value + assert resp.etag == ETag(value=value) + assert resp.headers[hdrs.ETAG] == f'"{value}"' + + +@pytest.mark.parametrize( + ["etag", "expected_header"], + ( + (ETag(value="0123-weak-kotik", is_weak=True), 'W/"0123-weak-kotik"'), + (ETag(value="0123-strong-kotik", is_weak=False), '"0123-strong-kotik"'), + ), +) +def test_etag_class(etag, expected_header) -> None: + resp = StreamResponse() + resp.etag = etag + assert resp.etag == etag + assert resp.headers[hdrs.ETAG] == expected_header + + +def test_etag_any() -> None: + resp = StreamResponse() + resp.etag = "*" + assert resp.etag == ETag(value="*") + assert resp.headers[hdrs.ETAG] == "*" + + +@pytest.mark.parametrize( + "invalid_value", + ( + '"invalid"', + "повинен бути ascii", + ETag(value='"invalid"', is_weak=True), + ETag(value="bad ©®"), + ), +) +def test_etag_invalid_value_set(invalid_value) -> None: + resp = StreamResponse() + with pytest.raises(ValueError, match="is not a valid etag"): + resp.etag = invalid_value + + +@pytest.mark.parametrize( + "header", + ( + "forgotten quotes", + '"∀ x ∉ ascii"', + ), +) +def test_etag_invalid_value_get(header) -> None: + resp = StreamResponse() + resp.headers["ETag"] = header + assert resp.etag is None + + +@pytest.mark.parametrize("invalid", (123, ETag(value=123, is_weak=True))) +def test_etag_invalid_value_class(invalid) -> None: + resp = StreamResponse() + with pytest.raises(ValueError, match="Unsupported etag type"): + resp.etag = invalid + + +def test_etag_reset() -> None: + resp = StreamResponse() + resp.etag = "*" + resp.etag = None + assert resp.etag is None + + async def test_start() -> None: req = make_request("GET", "/") resp = StreamResponse() diff --git a/tests/test_web_runner.py b/tests/test_web_runner.py index 16a7285e2ab..48ec3944337 100644 --- a/tests/test_web_runner.py +++ b/tests/test_web_runner.py @@ -211,3 +211,27 @@ async def mock_create_server(*args, **kwargs): assert server is runner.server assert host is None assert port == 8080 + + +def test_run_after_asyncio_run() -> None: + async def nothing(): + pass + + def spy(): + spy.called = True + + spy.called = False + + async def shutdown(): + spy() + raise web.GracefulExit() + + # asyncio.run() creates a new loop and closes it. + asyncio.run(nothing()) + + app = web.Application() + # create_task() will delay the function until app is run. + app.on_startup.append(lambda a: asyncio.create_task(shutdown())) + + web.run_app(app) + assert spy.called, "run_app() should work after asyncio.run()." diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 5eafc3d2eb5..5462fadea87 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -15,7 +15,8 @@ def test_using_gzip_if_header_present_and_file_available(loop: Any) -> None: gz_filepath.open = mock.mock_open() gz_filepath.is_file.return_value = True gz_filepath.stat.return_value = mock.MagicMock() - gz_filepath.stat.st_size = 1024 + gz_filepath.stat.return_value.st_size = 1024 + gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 filepath = mock.Mock() filepath.name = "logo.png" @@ -23,7 +24,7 @@ def test_using_gzip_if_header_present_and_file_available(loop: Any) -> None: filepath.with_name.return_value = gz_filepath file_sender = FileResponse(filepath) - file_sender._sendfile = make_mocked_coro(None) # type: ignore + file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] loop.run_until_complete(file_sender.prepare(request)) @@ -43,10 +44,11 @@ def test_gzip_if_header_not_present_and_file_available(loop: Any) -> None: filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath filepath.stat.return_value = mock.MagicMock() - filepath.stat.st_size = 1024 + filepath.stat.return_value.st_size = 1024 + filepath.stat.return_value.st_mtime_ns = 1603733507222449291 file_sender = FileResponse(filepath) - file_sender._sendfile = make_mocked_coro(None) # type: ignore + file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] loop.run_until_complete(file_sender.prepare(request)) @@ -66,10 +68,11 @@ def test_gzip_if_header_not_present_and_file_not_available(loop: Any) -> None: filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath filepath.stat.return_value = mock.MagicMock() - filepath.stat.st_size = 1024 + filepath.stat.return_value.st_size = 1024 + filepath.stat.return_value.st_mtime_ns = 1603733507222449291 file_sender = FileResponse(filepath) - file_sender._sendfile = make_mocked_coro(None) # type: ignore + file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] loop.run_until_complete(file_sender.prepare(request)) @@ -91,10 +94,11 @@ def test_gzip_if_header_present_and_file_not_available(loop: Any) -> None: filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath filepath.stat.return_value = mock.MagicMock() - filepath.stat.st_size = 1024 + filepath.stat.return_value.st_size = 1024 + filepath.stat.return_value.st_mtime_ns = 1603733507222449291 file_sender = FileResponse(filepath) - file_sender._sendfile = make_mocked_coro(None) # type: ignore + file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] loop.run_until_complete(file_sender.prepare(request)) @@ -109,10 +113,11 @@ def test_status_controlled_by_user(loop: Any) -> None: filepath.name = "logo.png" filepath.open = mock.mock_open() filepath.stat.return_value = mock.MagicMock() - filepath.stat.st_size = 1024 + filepath.stat.return_value.st_size = 1024 + filepath.stat.return_value.st_mtime_ns = 1603733507222449291 file_sender = FileResponse(filepath, status=203) - file_sender._sendfile = make_mocked_coro(None) # type: ignore + file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] loop.run_until_complete(file_sender.prepare(request)) diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index b2cb91566b1..3351ead3754 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -3,7 +3,7 @@ import pathlib import socket import zlib -from typing import Any +from typing import Any, Iterable import pytest @@ -36,15 +36,23 @@ def maker(*args, **kwargs): return maker -async def test_static_file_ok(aiohttp_client: Any, sender: Any) -> None: - filepath = pathlib.Path(__file__).parent / "data.unknown_mime_type" +@pytest.fixture +def app_with_static_route(sender: Any) -> web.Application: + filename = "data.unknown_mime_type" + filepath = pathlib.Path(__file__).parent / filename async def handler(request): return sender(filepath) app = web.Application() app.router.add_get("/", handler) - client = await aiohttp_client(app) + return app + + +async def test_static_file_ok( + aiohttp_client: Any, app_with_static_route: web.Application +) -> None: + client = await aiohttp_client(app_with_static_route) resp = await client.get("/") assert resp.status == 200 @@ -74,15 +82,10 @@ async def handler(request): await resp.release() -async def test_static_file_ok_string_path(aiohttp_client: Any, sender: Any) -> None: - filepath = pathlib.Path(__file__).parent / "data.unknown_mime_type" - - async def handler(request): - return sender(str(filepath)) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) +async def test_static_file_ok_string_path( + aiohttp_client: Any, app_with_static_route: web.Application +) -> None: + client = await aiohttp_client(app_with_static_route) resp = await client.get("/") assert resp.status == 200 @@ -215,16 +218,10 @@ async def handler(request): resp.close() -async def test_static_file_if_modified_since(aiohttp_client: Any, sender: Any) -> None: - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) +async def test_static_file_if_modified_since( + aiohttp_client: Any, app_with_static_route: web.Application +) -> None: + client = await aiohttp_client(app_with_static_route) resp = await client.get("/") assert 200 == resp.status @@ -236,22 +233,15 @@ async def handler(request): body = await resp.read() assert 304 == resp.status assert resp.headers.get("Content-Length") is None + assert resp.headers.get("Last-Modified") == lastmod assert b"" == body resp.close() async def test_static_file_if_modified_since_past_date( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ) -> None: - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "Mon, 1 Jan 1990 01:01:01 GMT" @@ -261,17 +251,9 @@ async def handler(request): async def test_static_file_if_modified_since_invalid_date( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "not a valid HTTP-date" @@ -281,17 +263,9 @@ async def handler(request): async def test_static_file_if_modified_since_future_date( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "Fri, 31 Dec 9999 23:59:59 GMT" @@ -299,13 +273,121 @@ async def handler(request): body = await resp.read() assert 304 == resp.status assert resp.headers.get("Content-Length") is None + assert resp.headers.get("Last-Modified") + assert b"" == body + resp.close() + + +@pytest.mark.parametrize("if_unmodified_since", ("", "Fri, 31 Dec 0000 23:59:59 GMT")) +async def test_static_file_if_match( + aiohttp_client: Any, + app_with_static_route: web.Application, + if_unmodified_since: str, +) -> None: + client = await aiohttp_client(app_with_static_route) + + resp = await client.get("/") + assert 200 == resp.status + original_etag = resp.headers.get("ETag") + + assert original_etag is not None + resp.close() + + headers = {"If-Match": original_etag, "If-Unmodified-Since": if_unmodified_since} + resp = await client.head("/", headers=headers) + body = await resp.read() + assert 200 == resp.status + assert resp.headers.get("ETag") + assert resp.headers.get("Last-Modified") + assert b"" == body + resp.close() + + +@pytest.mark.parametrize("if_unmodified_since", ("", "Fri, 31 Dec 0000 23:59:59 GMT")) +@pytest.mark.parametrize( + "etags,expected_status", + [ + (("*",), 200), + (('"example-tag"', 'W/"weak-tag"'), 412), + ], +) +async def test_static_file_if_match_custom_tags( + aiohttp_client: Any, + app_with_static_route: web.Application, + if_unmodified_since: str, + etags: Iterable[str], + expected_status: Iterable[int], +) -> None: + client = await aiohttp_client(app_with_static_route) + + if_match = ", ".join(etags) + headers = {"If-Match": if_match, "If-Unmodified-Since": if_unmodified_since} + resp = await client.head("/", headers=headers) + body = await resp.read() + assert expected_status == resp.status + assert b"" == body + resp.close() + + +@pytest.mark.parametrize("if_modified_since", ("", "Fri, 31 Dec 9999 23:59:59 GMT")) +@pytest.mark.parametrize( + "additional_etags", + ( + (), + ('"some-other-strong-etag"', 'W/"weak-tag"', "invalid-tag"), + ), +) +async def test_static_file_if_none_match( + aiohttp_client: Any, + app_with_static_route: web.Application, + if_modified_since: str, + additional_etags: Iterable[str], +) -> None: + client = await aiohttp_client(app_with_static_route) + + resp = await client.get("/") + assert 200 == resp.status + original_etag = resp.headers.get("ETag") + + assert resp.headers.get("Last-Modified") is not None + assert original_etag is not None + resp.close() + + etag = ",".join((original_etag, *additional_etags)) + + resp = await client.get( + "/", headers={"If-None-Match": etag, "If-Modified-Since": if_modified_since} + ) + body = await resp.read() + assert 304 == resp.status + assert resp.headers.get("Content-Length") is None + assert resp.headers.get("ETag") == original_etag + assert b"" == body + resp.close() + + +async def test_static_file_if_none_match_star( + aiohttp_client: Any, + app_with_static_route: web.Application, +) -> None: + client = await aiohttp_client(app_with_static_route) + + resp = await client.head("/", headers={"If-None-Match": "*"}) + body = await resp.read() + assert 304 == resp.status + assert resp.headers.get("Content-Length") is None + assert resp.headers.get("ETag") + assert resp.headers.get("Last-Modified") assert b"" == body resp.close() @pytest.mark.skipif(not ssl, reason="ssl not supported") async def test_static_file_ssl( - aiohttp_server: Any, ssl_ctx: Any, aiohttp_client: Any, client_ssl_ctx: Any + aiohttp_server: Any, + ssl_ctx: Any, + aiohttp_client: Any, + client_ssl_ctx: Any, ) -> None: dirname = pathlib.Path(__file__).parent filename = "data.unknown_mime_type" @@ -389,7 +471,7 @@ async def test_static_file_huge(aiohttp_client: Any, tmp_path: Any) -> None: async def test_static_file_range(aiohttp_client: Any, sender: Any) -> None: - filepath = pathlib.Path(__file__).parent.parent / "LICENSE.txt" + filepath = pathlib.Path(__file__).parent / "sample.txt" filesize = filepath.stat().st_size @@ -564,17 +646,9 @@ async def handler(request): async def test_static_file_if_unmodified_since_past_with_range( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "Mon, 1 Jan 1990 01:01:01 GMT" @@ -586,17 +660,9 @@ async def handler(request): async def test_static_file_if_unmodified_since_future_with_range( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "Fri, 31 Dec 9999 23:59:59 GMT" @@ -609,16 +675,10 @@ async def handler(request): resp.close() -async def test_static_file_if_range_past_with_range(aiohttp_client: Any, sender: Any): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) +async def test_static_file_if_range_past_with_range( + aiohttp_client: Any, app_with_static_route: web.Application +): + client = await aiohttp_client(app_with_static_route) lastmod = "Mon, 1 Jan 1990 01:01:01 GMT" @@ -628,16 +688,10 @@ async def handler(request): resp.close() -async def test_static_file_if_range_future_with_range(aiohttp_client: Any, sender: Any): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) +async def test_static_file_if_range_future_with_range( + aiohttp_client: Any, app_with_static_route: web.Application +): + client = await aiohttp_client(app_with_static_route) lastmod = "Fri, 31 Dec 9999 23:59:59 GMT" @@ -649,17 +703,9 @@ async def handler(request): async def test_static_file_if_unmodified_since_past_without_range( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "Mon, 1 Jan 1990 01:01:01 GMT" @@ -669,17 +715,9 @@ async def handler(request): async def test_static_file_if_unmodified_since_future_without_range( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "Fri, 31 Dec 9999 23:59:59 GMT" @@ -690,17 +728,9 @@ async def handler(request): async def test_static_file_if_range_past_without_range( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "Mon, 1 Jan 1990 01:01:01 GMT" @@ -711,17 +741,9 @@ async def handler(request): async def test_static_file_if_range_future_without_range( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "Fri, 31 Dec 9999 23:59:59 GMT" @@ -732,17 +754,9 @@ async def handler(request): async def test_static_file_if_unmodified_since_invalid_date( - aiohttp_client: Any, sender: Any + aiohttp_client: Any, app_with_static_route: web.Application ): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) + client = await aiohttp_client(app_with_static_route) lastmod = "not a valid HTTP-date" @@ -751,16 +765,10 @@ async def handler(request): resp.close() -async def test_static_file_if_range_invalid_date(aiohttp_client: Any, sender: Any): - filename = "data.unknown_mime_type" - filepath = pathlib.Path(__file__).parent / filename - - async def handler(request): - return sender(filepath) - - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app) +async def test_static_file_if_range_invalid_date( + aiohttp_client: Any, app_with_static_route: web.Application +): + client = await aiohttp_client(app_with_static_route) lastmod = "not a valid HTTP-date" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index f520c4dc98d..ae19dec148d 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -6,6 +6,7 @@ from unittest.mock import MagicMock import pytest +import yarl from aiohttp import web from aiohttp.web_urldispatcher import SystemRoute @@ -134,6 +135,7 @@ async def test_access_to_the_file_with_spaces( r = await client.get(url) assert r.status == 200 assert (await r.text()) == data + await r.release() async def test_access_non_existing_resource(tmp_path: Any, aiohttp_client: Any) -> None: @@ -457,3 +459,35 @@ async def test_static_absolute_url(aiohttp_client: Any, tmp_path: Any) -> None: client = await aiohttp_client(app) resp = await client.get("/static/" + str(file_path.resolve())) assert resp.status == 403 + + +@pytest.mark.xfail( + raises=AssertionError, + reason="Regression in v3.7: https://github.com/aio-libs/aiohttp/issues/5621", +) +@pytest.mark.parametrize( + ("route_definition", "urlencoded_path", "expected_http_resp_status"), + ( + ("/467,802,24834/hello", "/467%2C802%2C24834/hello", 200), + ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467%2C802%2C24834/hello", 200), + ("/1%2C3/hello", "/1%2C3/hello", 404), + ), + ids=("urldecoded_route", "urldecoded_route_with_regex", "urlencoded_route"), +) +async def test_decoded_url_match( + aiohttp_client, + route_definition, + urlencoded_path, + expected_http_resp_status, +) -> None: + app = web.Application() + + async def handler(_): + return web.Response() + + app.router.add_get(route_definition, handler) + client = await aiohttp_client(app) + + r = await client.get(yarl.URL(urlencoded_path, encoded=True)) + assert r.status == expected_http_resp_status + await r.release() diff --git a/tools/gen.py b/tools/gen.py index fa916a8d925..d00780aa676 100755 --- a/tools/gen.py +++ b/tools/gen.py @@ -143,7 +143,7 @@ def gen(dct): out.write(HEADER) missing = set() gen_block(dct, "", set(), missing, out) - missing_labels = "\n".join(m for m in sorted(missing)) + missing_labels = "\n".join(sorted(missing)) out.write(FOOTER.format(missing=missing_labels)) return out