diff --git a/.cargo/config.toml b/.cargo/config.toml
index 3a7caaff78c0f..d6ef99b66f955 100644
--- a/.cargo/config.toml
+++ b/.cargo/config.toml
@@ -1,6 +1,6 @@
[alias]
dev = "run --package ruff_dev --bin ruff_dev"
-benchmark = "bench -p ruff_benchmark --"
+benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
[target.'cfg(all())']
rustflags = [
@@ -33,4 +33,5 @@ rustflags = [
"-Wclippy::rc_buffer",
"-Wclippy::rc_mutex",
"-Wclippy::rest_pat_in_fully_bound_structs",
+ "-Wunreachable_pub"
]
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 0000000000000..e46e1cc5c33f7
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,46 @@
+// For format details, see https://aka.ms/devcontainer.json. For config options, see the
+// README at: https://github.com/devcontainers/templates/tree/main/src/rust
+{
+ "name": "Ruff",
+ "image": "mcr.microsoft.com/devcontainers/rust:0-1-bullseye",
+ "mounts": [
+ {
+ "source": "devcontainer-cargo-cache-${devcontainerId}",
+ "target": "/usr/local/cargo",
+ "type": "volume"
+ }
+ ],
+ "customizations": {
+ "codespaces": {
+ "openFiles": [
+ "CONTRIBUTING.md"
+ ]
+ },
+ "vscode": {
+ "extensions": [
+ "ms-python.python",
+ "rust-lang.rust-analyzer",
+ "serayuzgur.crates",
+ "tamasfe.even-better-toml",
+ "Swellaby.vscode-rust-test-adapter",
+ "charliermarsh.ruff"
+ ],
+ "settings": {
+ "rust-analyzer.updates.askBeforeDownload": false
+ }
+ }
+ },
+ // Features to add to the dev container. More info: https://containers.dev/features.
+ "features": {
+ "ghcr.io/devcontainers/features/python": {
+ "installTools": false
+ }
+ },
+ // Use 'forwardPorts' to make a list of ports inside the container available locally.
+ // "forwardPorts": [],
+ "postCreateCommand": ".devcontainer/post-create.sh"
+ // Configure tool-specific properties.
+ // "customizations": {},
+ // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
+ // "remoteUser": "root"
+}
diff --git a/.devcontainer/post-create.sh b/.devcontainer/post-create.sh
new file mode 100755
index 0000000000000..0a81beacfdb6a
--- /dev/null
+++ b/.devcontainer/post-create.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+rustup default < rust-toolchain
+rustup component add clippy rustfmt
+cargo install cargo-insta
+cargo fetch
+
+pip install maturin pre-commit
diff --git a/.editorconfig b/.editorconfig
index 776563c1e1f50..11c29680d79e7 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -14,4 +14,7 @@ indent_size = 2
indent_size = 4
[*.snap]
-trim_trailing_whitespace = false
\ No newline at end of file
+trim_trailing_whitespace = false
+
+[*.md]
+max_line_length = 100
\ No newline at end of file
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000000..5b6455590f091
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,15 @@
+
+
+## Summary
+
+
+
+## Test Plan
+
+
diff --git a/.github/release.yml b/.github/release.yml
index ddaefcb7e5411..a27541985e3d8 100644
--- a/.github/release.yml
+++ b/.github/release.yml
@@ -1,5 +1,9 @@
# https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#configuring-automatically-generated-release-notes
changelog:
+ exclude:
+ labels:
+ - internal
+ - documentation
categories:
- title: Breaking Changes
labels:
@@ -11,6 +15,7 @@ changelog:
- title: Settings
labels:
- configuration
+ - cli
- title: Bug Fixes
labels:
- bug
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 2a75bd59ada7b..b59061fd59652 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -15,6 +15,8 @@ env:
CARGO_NET_RETRY: 10
CARGO_TERM_COLOR: always
RUSTUP_MAX_RETRIES: 10
+ PACKAGE_NAME: ruff
+ PYTHON_VERSION: "3.7" # to build abi3 wheels
jobs:
cargo-fmt:
@@ -85,6 +87,22 @@ jobs:
name: ruff
path: target/debug/ruff
+ cargo-fuzz:
+ runs-on: ubuntu-latest
+ name: "cargo fuzz"
+ steps:
+ - uses: actions/checkout@v3
+ - name: "Install Rust toolchain"
+ run: rustup show
+ - uses: Swatinem/rust-cache@v2
+ with:
+ workspaces: "fuzz -> target"
+ - name: "Install cargo-fuzz"
+ uses: taiki-e/install-action@v2
+ with:
+ tool: cargo-fuzz@0.11
+ - run: cargo fuzz build -s none
+
cargo-test-wasm:
runs-on: ubuntu-latest
name: "cargo test (wasm)"
@@ -181,18 +199,30 @@ jobs:
- name: "Install cargo-udeps"
uses: taiki-e/install-action@cargo-udeps
- name: "Run cargo-udeps"
+ run: cargo +nightly-2023-03-30 udeps
+
+
+ python-package:
+ name: "python package"
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
+ with:
+ python-version: ${{ env.PYTHON_VERSION }}
+ architecture: x64
+ - name: "Prep README.md"
+ run: python scripts/transform_readme.py --target pypi
+ - name: "Build wheels"
+ uses: PyO3/maturin-action@v1
+ with:
+ manylinux: auto
+ args: --out dist
+ - name: "Test wheel"
run: |
- unused_dependencies=$(cargo +nightly-2023-03-30 udeps > unused.txt && cat unused.txt | cut -d $'\n' -f 2-)
- if [ -z "$unused_dependencies" ]; then
- echo "No unused dependencies found" > $GITHUB_STEP_SUMMARY
- exit 0
- else
- echo "Found unused dependencies" > $GITHUB_STEP_SUMMARY
- echo '```console' >> $GITHUB_STEP_SUMMARY
- echo "$unused_dependencies" >> $GITHUB_STEP_SUMMARY
- echo '```' >> $GITHUB_STEP_SUMMARY
- exit 1
- fi
+ pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
+ ruff --help
+ python -m ruff --help
pre-commit:
name: "pre-commit"
@@ -237,5 +267,7 @@ jobs:
run: python scripts/transform_readme.py --target mkdocs
- name: "Generate docs"
run: python scripts/generate_mkdocs.py
+ - name: "Check docs formatting"
+ run: python scripts/check_docs_formatted.py
- name: "Build docs"
run: mkdocs build --strict
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 8722d251007cc..cb8f646df5126 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -1,9 +1,9 @@
name: mkdocs
on:
- release:
- types: [published]
workflow_dispatch:
+ release:
+ types: [ published ]
jobs:
mkdocs:
diff --git a/.github/workflows/flake8-to-ruff.yaml b/.github/workflows/flake8-to-ruff.yaml
index c207e1884ac48..28fa643788fce 100644
--- a/.github/workflows/flake8-to-ruff.yaml
+++ b/.github/workflows/flake8-to-ruff.yaml
@@ -52,7 +52,7 @@ jobs:
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
- args: --release --universal2 --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
+ args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
- name: "Install built wheel - universal2"
run: |
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
diff --git a/.github/workflows/playground.yaml b/.github/workflows/playground.yaml
index 40060bcbb7185..dcb582815ea7a 100644
--- a/.github/workflows/playground.yaml
+++ b/.github/workflows/playground.yaml
@@ -2,8 +2,8 @@ name: "[Playground] Release"
on:
workflow_dispatch:
- push:
- branches: [main]
+ release:
+ types: [ published ]
env:
CARGO_INCREMENTAL: 0
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 4d31e24540d8c..1240298e76174 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -3,7 +3,7 @@ name: "[ruff] Release"
on:
workflow_dispatch:
release:
- types: [published]
+ types: [ published ]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -18,6 +18,32 @@ env:
RUSTUP_MAX_RETRIES: 10
jobs:
+ sdist:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
+ with:
+ python-version: ${{ env.PYTHON_VERSION }}
+ - name: "Prep README.md"
+ run: python scripts/transform_readme.py --target pypi
+ - name: "Build sdist"
+ uses: PyO3/maturin-action@v1
+ with:
+ command: sdist
+ args: --out dist
+ - name: "Test sdist"
+ run: |
+ rustup default $(cat rust-toolchain)
+ pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
+ ruff --help
+ python -m ruff --help
+ - name: "Upload sdist"
+ uses: actions/upload-artifact@v3
+ with:
+ name: wheels
+ path: dist
+
macos-x86_64:
runs-on: macos-latest
steps:
@@ -32,10 +58,12 @@ jobs:
uses: PyO3/maturin-action@v1
with:
target: x86_64
- args: --release --out dist --sdist
- - name: "Install built wheel - x86_64"
+ args: --release --out dist
+ - name: "Test wheel - x86_64"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
+ ruff --help
+ python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
@@ -43,9 +71,9 @@ jobs:
path: dist
- name: "Archive binary"
run: |
- ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
- tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
- shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
+ ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
+ tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
+ shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
@@ -67,10 +95,12 @@ jobs:
- name: "Build wheels - universal2"
uses: PyO3/maturin-action@v1
with:
- args: --release --universal2 --out dist
- - name: "Install built wheel - universal2"
+ args: --release --target universal2-apple-darwin --out dist
+ - name: "Test wheel - universal2"
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall
+ ruff --help
+ python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
@@ -78,9 +108,9 @@ jobs:
path: dist
- name: "Archive binary"
run: |
- ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
- tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
- shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
+ ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
+ tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
+ shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
@@ -113,11 +143,13 @@ jobs:
with:
target: ${{ matrix.platform.target }}
args: --release --out dist
- - name: "Install built wheel"
+ - name: "Test wheel"
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
shell: bash
run: |
python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
+ ruff --help
+ python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
@@ -126,9 +158,9 @@ jobs:
- name: "Archive binary"
shell: bash
run: |
- ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
- 7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
- sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
+ ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
+ 7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
+ sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
@@ -158,10 +190,12 @@ jobs:
target: ${{ matrix.target }}
manylinux: auto
args: --release --out dist
- - name: "Install built wheel"
+ - name: "Test wheel"
if: ${{ startsWith(matrix.target, 'x86_64') }}
run: |
pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall
+ ruff --help
+ python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
@@ -169,9 +203,9 @@ jobs:
path: dist
- name: "Archive binary"
run: |
- ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
- tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
- shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
+ ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
+ tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
+ shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
@@ -187,6 +221,9 @@ jobs:
platform:
- target: aarch64-unknown-linux-gnu
arch: aarch64
+ # see https://github.com/charliermarsh/ruff/issues/3791
+ # and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963
+ maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: armv7-unknown-linux-gnueabihf
arch: armv7
- target: s390x-unknown-linux-gnu
@@ -195,6 +232,7 @@ jobs:
arch: ppc64le
- target: powerpc64-unknown-linux-gnu
arch: ppc64
+
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
@@ -207,10 +245,11 @@ jobs:
with:
target: ${{ matrix.platform.target }}
manylinux: auto
+ docker-options: ${{ matrix.platform.maturin_docker_options }}
args: --release --out dist
- uses: uraimo/run-on-arch-action@v2
if: matrix.platform.arch != 'ppc64'
- name: Install built wheel
+ name: Test wheel
with:
arch: ${{ matrix.platform.arch }}
distro: ubuntu20.04
@@ -221,6 +260,7 @@ jobs:
pip3 install -U pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
+ ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
@@ -228,9 +268,9 @@ jobs:
path: dist
- name: "Archive binary"
run: |
- ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
- tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
- shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
+ ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
+ tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
+ shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
@@ -260,7 +300,7 @@ jobs:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
args: --release --out dist
- - name: "Install built wheel"
+ - name: "Test wheel"
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
with:
@@ -269,6 +309,8 @@ jobs:
run: |
apk add py3-pip
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
+ ruff --help
+ python -m ruff --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
@@ -276,9 +318,9 @@ jobs:
path: dist
- name: "Archive binary"
run: |
- ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
- tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
- shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
+ ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
+ tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
+ shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
@@ -294,8 +336,10 @@ jobs:
platform:
- target: aarch64-unknown-linux-musl
arch: aarch64
+ maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16
- target: armv7-unknown-linux-musleabihf
arch: armv7
+
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
@@ -309,8 +353,9 @@ jobs:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist
+ docker-options: ${{ matrix.platform.maturin_docker_options }}
- uses: uraimo/run-on-arch-action@v2
- name: Install built wheel
+ name: Test wheel
with:
arch: ${{ matrix.platform.arch }}
distro: alpine_latest
@@ -319,6 +364,7 @@ jobs:
apk add py3-pip
run: |
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
+ ruff check --help
- name: "Upload wheels"
uses: actions/upload-artifact@v3
with:
@@ -326,9 +372,9 @@ jobs:
path: dist
- name: "Archive binary"
run: |
- ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
- tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
- shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
+ ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
+ tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
+ shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
- name: "Upload binary"
uses: actions/upload-artifact@v3
with:
@@ -349,26 +395,48 @@ jobs:
- musllinux
- musllinux-cross
if: "startsWith(github.ref, 'refs/tags/')"
+ environment:
+ name: release
+ permissions:
+ # For pypi trusted publishing
+ id-token: write
+ # For GitHub release publishing
+ contents: write
steps:
- uses: actions/download-artifact@v3
with:
name: wheels
- - uses: actions/setup-python@v4
+ path: wheels
- name: "Publish to PyPi"
- env:
- TWINE_USERNAME: __token__
- TWINE_PASSWORD: ${{ secrets.RUFF_TOKEN }}
- run: |
- pip install --upgrade twine
- twine upload --skip-existing *
- - name: "Update pre-commit mirror"
- run: |
- curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.RUFF_PRE_COMMIT_PAT }}" -H "X-GitHub-Api-Version: 2022-11-28" https://api.github.com/repos/charliermarsh/ruff-pre-commit/dispatches --data '{"event_type": "pypi_release"}'
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ skip-existing: true
+ packages-dir: wheels
+ verbose: true
- uses: actions/download-artifact@v3
with:
name: binaries
path: binaries
- - name: Release
+ - name: "Publish to GitHub"
uses: softprops/action-gh-release@v1
with:
files: binaries/*
+
+ # After the release has been published, we update downstream repositories
+ # This is separate because if this fails the release is still fine, we just need to do some manual workflow triggers
+ update-dependents:
+ name: Release
+ runs-on: ubuntu-latest
+ needs: release
+ steps:
+ - name: "Update pre-commit mirror"
+ uses: actions/github-script@v6
+ with:
+ github-token: ${{ secrets.RUFF_PRE_COMMIT_PAT }}
+ script: |
+ github.rest.actions.createWorkflowDispatch({
+ owner: 'astral-sh',
+ repo: 'ruff-pre-commit',
+ workflow_id: 'main.yml',
+ ref: 'main',
+ })
diff --git a/.gitignore b/.gitignore
index ef50cdef6f752..55de3ba557e24 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,11 @@
-# Local cache
-.ruff_cache
crates/ruff/resources/test/cpython
mkdocs.yml
.overrides
-github_search.jsonl
+ruff-old
+github_search*.jsonl
+schemastore
+.venv*
+scratch.py
###
# Rust.gitignore
diff --git a/.markdownlint.yaml b/.markdownlint.yaml
new file mode 100644
index 0000000000000..66c4aeca0e459
--- /dev/null
+++ b/.markdownlint.yaml
@@ -0,0 +1,14 @@
+# default to true for all rules
+default: true
+
+# MD033/no-inline-html
+MD033: false
+
+# MD041/first-line-h1
+MD041: false
+
+# MD013/line-length
+MD013:
+ line_length: 100
+ code_blocks: false
+ ignore_code_blocks: true
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index bb10740493b17..572edd2781a71 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,4 +1,12 @@
fail_fast: true
+
+exclude: |
+ (?x)^(
+ crates/ruff/resources/.*|
+ crates/ruff_python_formatter/resources/.*|
+ crates/ruff_python_formatter/src/snapshots/.*
+ )$
+
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.12.1
@@ -17,14 +25,9 @@ repos:
rev: v0.33.0
hooks:
- id: markdownlint-fix
- args:
- - --disable
- - MD013 # line-length
- - MD033 # no-inline-html
- - --
- repo: https://github.com/crate-ci/typos
- rev: v1.14.8
+ rev: v1.14.12
hooks:
- id: typos
@@ -63,11 +66,6 @@ repos:
rev: 23.1.0
hooks:
- id: black
- exclude: |
- (?x)^(
- crates/ruff/resources/.*|
- crates/ruff_python_formatter/resources/.*
- )$
ci:
skip: [cargo-fmt, clippy, dev-generate-all]
diff --git a/BREAKING_CHANGES.md b/BREAKING_CHANGES.md
index 087c8fddbd01d..b935fed04127e 100644
--- a/BREAKING_CHANGES.md
+++ b/BREAKING_CHANGES.md
@@ -1,5 +1,25 @@
# Breaking Changes
+## 0.0.268
+
+### The `keep-runtime-typing` setting has been removed ([#4427](https://github.com/charliermarsh/ruff/pull/4427))
+
+Enabling the `keep-runtime-typing` option, located under the `pyupgrade` section, is equivalent
+to ignoring the `UP006` and `UP007` rules via Ruff's standard `ignore` mechanism. As there's no
+need for a dedicated setting to disable these rules, the `keep-runtime-typing` option has been
+removed.
+
+## 0.0.267
+
+### `update-check` is no longer a valid configuration option ([#4313](https://github.com/charliermarsh/ruff/pull/4313))
+
+The `update-check` functionality was deprecated in [#2530](https://github.com/charliermarsh/ruff/pull/2530),
+in that the behavior itself was removed, and Ruff was changed to warn when that option was enabled.
+
+Now, Ruff will throw an error when `update-check` is provided via a configuration file (e.g.,
+`update-check = false`) or through the command-line, since it has no effect. Users should remove
+this option from their configuration.
+
## 0.0.265
### `--fix-only` now exits with a zero exit code, unless `--exit-non-zero-on-fix` is specified ([#4146](https://github.com/charliermarsh/ruff/pull/4146))
@@ -66,7 +86,8 @@ the intention of adding a stable public API in the future.
### `select`, `extend-select`, `ignore`, and `extend-ignore` have new semantics ([#2312](https://github.com/charliermarsh/ruff/pull/2312))
Previously, the interplay between `select` and its related options could lead to unexpected
-behavior. For example, `ruff --select E501 --ignore ALL` and `ruff --select E501 --extend-ignore ALL` behaved differently. (See [#2312](https://github.com/charliermarsh/ruff/pull/2312) for more
+behavior. For example, `ruff --select E501 --ignore ALL` and `ruff --select E501 --extend-ignore ALL`
+behaved differently. (See [#2312](https://github.com/charliermarsh/ruff/pull/2312) for more
examples.)
When Ruff determines the enabled rule set, it has to reconcile `select` and `ignore` from a variety
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index d8ce7afdeee82..a1be271990735 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -8,6 +8,7 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
- [Project Structure](#project-structure)
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
- [Rule naming convention](#rule-naming-convention)
+ - [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
- [MkDocs](#mkdocs)
- [Release Process](#release-process)
@@ -93,9 +94,11 @@ At time of writing, the repository includes the following crates:
- `crates/ruff`: library crate containing all lint rules and the core logic for running them.
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
-- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g., `cargo dev generate-all`).
+- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g.,
+ `cargo dev generate-all`).
- `crates/ruff_macros`: library crate containing macros used by Ruff.
-- `crates/ruff_python`: library crate implementing Python-specific functionality (e.g., lists of standard library modules by versionb).
+- `crates/ruff_python`: library crate implementing Python-specific functionality (e.g., lists of
+ standard library modules by version).
- `crates/flake8_to_ruff`: binary crate for generating Ruff configuration from Flake8 configuration.
### Example: Adding a new lint rule
@@ -103,14 +106,20 @@ At time of writing, the repository includes the following crates:
At a high level, the steps involved in adding a new lint rule are as follows:
1. Determine a name for the new rule as per our [rule naming convention](#rule-naming-convention).
+
1. Create a file for your rule (e.g., `crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs`).
+
1. In that file, define a violation struct. You can grep for `#[violation]` to see examples.
-1. Map the violation struct to a rule code in `crates/ruff/src/registry.rs` (e.g., `E402`).
-1. Define the logic for triggering the violation in `crates/ruff/src/checkers/ast.rs` (for AST-based
- checks), `crates/ruff/src/checkers/tokens.rs` (for token-based checks), `crates/ruff/src/checkers/lines.rs`
- (for text-based checks), or `crates/ruff/src/checkers/filesystem.rs` (for filesystem-based
- checks).
-1. Add a test fixture.
+
+1. Map the violation struct to a rule code in `crates/ruff/src/codes.rs` (e.g., `E402`).
+
+1. Define the logic for triggering the violation in `crates/ruff/src/checkers/ast/mod.rs` (for
+ AST-based checks), `crates/ruff/src/checkers/tokens.rs` (for token-based checks),
+ `crates/ruff/src/checkers/lines.rs` (for text-based checks), or
+ `crates/ruff/src/checkers/filesystem.rs` (for filesystem-based checks).
+
+1. Add proper [testing](#rule-testing-fixtures-and-snapshots) for your rule.
+
1. Update the generated files (documentation and generated code).
To define the violation, start by creating a dedicated file for your rule under the appropriate
@@ -125,18 +134,8 @@ collecting diagnostics as it goes.
If you need to inspect the AST, you can run `cargo dev print-ast` with a Python file. Grep
for the `Check::new` invocations to understand how other, similar rules are implemented.
-To add a test fixture, create a file under `crates/ruff/resources/test/fixtures/[linter]`, named to match
-the code you defined earlier (e.g., `crates/ruff/resources/test/fixtures/pycodestyle/E402.py`). This file should
-contain a variety of violations and non-violations designed to evaluate and demonstrate the behavior
-of your lint rule.
-
-Run `cargo dev generate-all` to generate the code for your new fixture. Then run Ruff
-locally with (e.g.) `cargo run -p ruff_cli -- check crates/ruff/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402`.
-
-Once you're satisfied with the output, codify the behavior as a snapshot test by adding a new
-`test_case` macro in the relevant `crates/ruff/src/[linter]/mod.rs` file. Then, run `cargo test`.
-Your test will fail, but you'll be prompted to follow-up with `cargo insta review`. Accept the
-generated snapshot, then commit the snapshot file alongside the rest of your changes.
+Once you're satisfied with your code, add tests for your rule. See [rule testing](#rule-testing-fixtures-and-snapshots)
+for more details.
Finally, regenerate the documentation and generated code with `cargo dev generate-all`.
@@ -148,12 +147,44 @@ This implies that rule names:
- should state the bad thing being checked for
-- should not contain instructions on what you what you should use instead
+- should not contain instructions on what you should use instead
(these belong in the rule documentation and the `autofix_title` for rules that have autofix)
When re-implementing rules from other linters, this convention is given more importance than
preserving the original rule name.
+#### Rule testing: fixtures and snapshots
+
+To test rules, Ruff uses snapshots of Ruff's output for a given file (fixture). Generally, there
+will be one file per rule (e.g., `E402.py`), and each file will contain all necessary examples of
+both violations and non-violations. `cargo insta review` will generate a snapshot file containing
+Ruff's output for each fixture, which you can then commit alongside your changes.
+
+Once you've completed the code for the rule itself, you can define tests with the following steps:
+
+1. Add a Python file to `crates/ruff/resources/test/fixtures/[linter]` that contains the code you
+ want to test. The file name should match the rule name (e.g., `E402.py`), and it should include
+ examples of both violations and non-violations.
+
+1. Run Ruff locally against your file and verify the output is as expected. Once you're satisfied
+ with the output (you see the violations you expect, and no others), proceed to the next step.
+ For example, if you're adding a new rule named `E402`, you would run:
+
+ ```shell
+ cargo run -p ruff_cli -- check crates/ruff/resources/test/fixtures/pycodestyle/E402.py --no-cache
+ ```
+
+1. Add the test to the relevant `crates/ruff/src/rules/[linter]/mod.rs` file. If you're contributing
+ a rule to a pre-existing set, you should be able to find a similar example to pattern-match
+ against. If you're adding a new linter, you'll need to create a new `mod.rs` file (see,
+ e.g., `crates/ruff/src/rules/flake8_bugbear/mod.rs`)
+
+1. Run `cargo test`. Your test will fail, but you'll be prompted to follow-up
+ with `cargo insta review`. Run `cargo insta review`, review and accept the generated snapshot,
+ then commit the snapshot file alongside the rest of your changes.
+
+1. Run `cargo test` again to ensure that your test passes.
+
### Example: Adding a new configuration option
Ruff's user-facing settings live in a few different places.
@@ -184,6 +215,8 @@ Finally, regenerate the documentation and generated code with `cargo dev generat
To preview any changes to the documentation locally:
+1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
+
1. Install MkDocs and Material for MkDocs with:
```shell
diff --git a/Cargo.lock b/Cargo.lock
index 98183d8a3a4ef..89efc25897a14 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -14,17 +14,6 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
-[[package]]
-name = "ahash"
-version = "0.7.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
-dependencies = [
- "getrandom",
- "once_cell",
- "version_check",
-]
-
[[package]]
name = "aho-corasick"
version = "0.7.20"
@@ -43,6 +32,12 @@ dependencies = [
"memchr",
]
+[[package]]
+name = "android-tzdata"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
+
[[package]]
name = "android_system_properties"
version = "0.1.5"
@@ -76,9 +71,9 @@ dependencies = [
[[package]]
name = "anstream"
-version = "0.3.0"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e579a7752471abc2a8268df8b20005e3eadd975f585398f17efcfd8d4927371"
+checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163"
dependencies = [
"anstyle",
"anstyle-parse",
@@ -115,9 +110,9 @@ dependencies = [
[[package]]
name = "anstyle-wincon"
-version = "1.0.0"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4bcd8291a340dd8ac70e18878bc4501dd7b4ff970cfa21c207d36ece51ea88fd"
+checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188"
dependencies = [
"anstyle",
"windows-sys 0.48.0",
@@ -125,9 +120,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.70"
+version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
+checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "argfile"
@@ -138,21 +133,6 @@ dependencies = [
"os_str_bytes",
]
-[[package]]
-name = "ascii"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16"
-
-[[package]]
-name = "ascii-canvas"
-version = "3.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6"
-dependencies = [
- "term",
-]
-
[[package]]
name = "assert_cmd"
version = "2.0.11"
@@ -160,7 +140,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86d6b683edf8d1119fe420a94f8a7e389239666aa72e65495d91c00462510151"
dependencies = [
"anstyle",
- "bstr 1.4.0",
+ "bstr",
"doc-comment",
"predicates",
"predicates-core",
@@ -200,21 +180,6 @@ dependencies = [
"serde",
]
-[[package]]
-name = "bit-set"
-version = "0.5.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
-dependencies = [
- "bit-vec",
-]
-
-[[package]]
-name = "bit-vec"
-version = "0.6.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
-
[[package]]
name = "bitflags"
version = "1.3.2"
@@ -223,26 +188,15 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c70beb79cbb5ce9c4f8e20849978f34225931f665bb49efa6982875a4d5facb3"
-
-[[package]]
-name = "bstr"
-version = "0.2.17"
+version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"
-dependencies = [
- "lazy_static",
- "memchr",
- "regex-automata",
-]
+checksum = "6776fc96284a0bb647b615056fc496d1fe1644a7ab01829818a6d91cae888b84"
[[package]]
name = "bstr"
-version = "1.4.0"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3d4260bcc2e8fc9df1eac4919a720effeb63a3f0952f5bf4944adfa18897f09"
+checksum = "a246e68bb43f6cd9db24bea052a53e40405417c5fb372e3d1a8a7f770a564ef5"
dependencies = [
"memchr",
"once_cell",
@@ -252,9 +206,9 @@ dependencies = [
[[package]]
name = "bumpalo"
-version = "3.12.1"
+version = "3.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b1ce199063694f33ffb7dd4e0ee620741495c32833cde5aa08f02a0bf96f0c8"
+checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "cachedir"
@@ -294,13 +248,13 @@ dependencies = [
[[package]]
name = "chrono"
-version = "0.4.24"
+version = "0.4.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b"
+checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5"
dependencies = [
+ "android-tzdata",
"iana-time-zone",
"js-sys",
- "num-integer",
"num-traits",
"time",
"wasm-bindgen",
@@ -309,9 +263,9 @@ dependencies = [
[[package]]
name = "ciborium"
-version = "0.2.0"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f"
+checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926"
dependencies = [
"ciborium-io",
"ciborium-ll",
@@ -320,15 +274,15 @@ dependencies = [
[[package]]
name = "ciborium-io"
-version = "0.2.0"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369"
+checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656"
[[package]]
name = "ciborium-ll"
-version = "0.2.0"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b"
+checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b"
dependencies = [
"ciborium-io",
"half",
@@ -336,21 +290,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "3.2.23"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5"
-dependencies = [
- "bitflags 1.3.2",
- "clap_lex 0.2.4",
- "indexmap",
- "textwrap",
-]
-
-[[package]]
-name = "clap"
-version = "4.2.4"
+version = "4.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "956ac1f6381d8d82ab4684768f89c0ea3afe66925ceadb4eeb3fc452ffc55d62"
+checksum = "b4ed2379f8603fa2b7509891660e802b88c70a79a6427a70abb5968054de2c28"
dependencies = [
"clap_builder",
"clap_derive",
@@ -359,24 +301,24 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.2.4"
+version = "4.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84080e799e54cff944f4b4a4b0e71630b0e0443b25b985175c7dddc1a859b749"
+checksum = "72394f3339a76daf211e57d4bcb374410f3965dcc606dd0e03738c7888766980"
dependencies = [
"anstream",
"anstyle",
"bitflags 1.3.2",
- "clap_lex 0.4.1",
+ "clap_lex",
"strsim",
]
[[package]]
name = "clap_complete"
-version = "4.2.1"
+version = "4.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a19591b2ab0e3c04b588a0e04ddde7b9eaa423646d1b4a8092879216bf47473"
+checksum = "7f6b5c519bab3ea61843a7923d074b04245624bb84a64a8c150f5deb014e388b"
dependencies = [
- "clap 4.2.4",
+ "clap",
]
[[package]]
@@ -385,7 +327,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d"
dependencies = [
- "clap 4.2.4",
+ "clap",
"clap_complete",
"clap_complete_fig",
"clap_complete_nushell",
@@ -393,50 +335,41 @@ dependencies = [
[[package]]
name = "clap_complete_fig"
-version = "4.2.0"
+version = "4.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f3af28956330989baa428ed4d3471b853715d445c62de21b67292e22cf8a41fa"
+checksum = "99fee1d30a51305a6c2ed3fc5709be3c8af626c9c958e04dd9ae94e27bcbce9f"
dependencies = [
- "clap 4.2.4",
+ "clap",
"clap_complete",
]
[[package]]
name = "clap_complete_nushell"
-version = "0.1.10"
+version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7fa41f5e6aa83bd151b70fd0ceaee703d68cd669522795dc812df9edad1252c"
+checksum = "5d02bc8b1a18ee47c4d2eec3fb5ac034dc68ebea6125b1509e9ccdffcddce66e"
dependencies = [
- "clap 4.2.4",
+ "clap",
"clap_complete",
]
[[package]]
name = "clap_derive"
-version = "4.2.0"
+version = "4.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4"
+checksum = "59e9ef9a08ee1c0e1f2e162121665ac45ac3783b0f897db7244ae75ad9a8f65b"
dependencies = [
"heck",
"proc-macro2",
"quote",
- "syn 2.0.15",
-]
-
-[[package]]
-name = "clap_lex"
-version = "0.2.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
-dependencies = [
- "os_str_bytes",
+ "syn 2.0.18",
]
[[package]]
name = "clap_lex"
-version = "0.4.1"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1"
+checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
[[package]]
name = "clearscreen"
@@ -451,16 +384,6 @@ dependencies = [
"winapi",
]
-[[package]]
-name = "codespan-reporting"
-version = "0.11.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
-dependencies = [
- "termcolor",
- "unicode-width",
-]
-
[[package]]
name = "colorchoice"
version = "1.0.0"
@@ -486,14 +409,14 @@ checksum = "5458d9d1a587efaf5091602c59d299696a3877a439c8f6d461a2d3cce11df87a"
[[package]]
name = "console"
-version = "0.15.5"
+version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3d79fbe8970a77e3e34151cc13d3b3e248aa0faaecb9f6091fa07ebefe5ad60"
+checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8"
dependencies = [
"encode_unicode",
"lazy_static",
"libc",
- "windows-sys 0.42.0",
+ "windows-sys 0.45.0",
]
[[package]]
@@ -522,6 +445,12 @@ version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
+[[package]]
+name = "countme"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
+
[[package]]
name = "crc32fast"
version = "1.3.2"
@@ -533,19 +462,19 @@ dependencies = [
[[package]]
name = "criterion"
-version = "0.4.0"
+version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb"
+checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
dependencies = [
"anes",
- "atty",
"cast",
"ciborium",
- "clap 3.2.23",
+ "clap",
"criterion-plot",
+ "is-terminal",
"itertools",
- "lazy_static",
"num-traits",
+ "once_cell",
"oorandom",
"plotters",
"rayon",
@@ -626,50 +555,6 @@ dependencies = [
"syn 1.0.109",
]
-[[package]]
-name = "cxx"
-version = "1.0.94"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f61f1b6389c3fe1c316bf8a4dccc90a38208354b330925bce1f74a6c4756eb93"
-dependencies = [
- "cc",
- "cxxbridge-flags",
- "cxxbridge-macro",
- "link-cplusplus",
-]
-
-[[package]]
-name = "cxx-build"
-version = "1.0.94"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "12cee708e8962df2aeb38f594aae5d827c022b6460ac71a7a3e2c3c2aae5a07b"
-dependencies = [
- "cc",
- "codespan-reporting",
- "once_cell",
- "proc-macro2",
- "quote",
- "scratch",
- "syn 2.0.15",
-]
-
-[[package]]
-name = "cxxbridge-flags"
-version = "1.0.94"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7944172ae7e4068c533afbb984114a56c46e9ccddda550499caa222902c7f7bb"
-
-[[package]]
-name = "cxxbridge-macro"
-version = "1.0.94"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.15",
-]
-
[[package]]
name = "diff"
version = "0.1.13"
@@ -693,21 +578,11 @@ dependencies = [
[[package]]
name = "dirs"
-version = "5.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dece029acd3353e3a58ac2e3eb3c8d6c35827a892edc6cc4138ef9c33df46ecd"
-dependencies = [
- "dirs-sys 0.4.0",
-]
-
-[[package]]
-name = "dirs-next"
-version = "2.0.0"
+version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
+checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
dependencies = [
- "cfg-if",
- "dirs-sys-next",
+ "dirs-sys 0.4.1",
]
[[package]]
@@ -723,24 +598,14 @@ dependencies = [
[[package]]
name = "dirs-sys"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "04414300db88f70d74c5ff54e50f9e1d1737d9a5b90f53fcf2e95ca2a9ab554b"
-dependencies = [
- "libc",
- "redox_users",
- "windows-sys 0.45.0",
-]
-
-[[package]]
-name = "dirs-sys-next"
-version = "0.1.2"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
+checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
dependencies = [
"libc",
+ "option-ext",
"redox_users",
- "winapi",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -767,15 +632,6 @@ version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
-[[package]]
-name = "ena"
-version = "0.14.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1"
-dependencies = [
- "log",
-]
-
[[package]]
name = "encode_unicode"
version = "0.3.6"
@@ -833,18 +689,12 @@ dependencies = [
"windows-sys 0.48.0",
]
-[[package]]
-name = "fixedbitset"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
-
[[package]]
name = "flake8-to-ruff"
-version = "0.0.265"
+version = "0.0.272"
dependencies = [
"anyhow",
- "clap 4.2.4",
+ "clap",
"colored",
"configparser",
"once_cell",
@@ -860,9 +710,9 @@ dependencies = [
[[package]]
name = "flate2"
-version = "1.0.25"
+version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
+checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
dependencies = [
"crc32fast",
"miniz_oxide",
@@ -899,10 +749,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4"
dependencies = [
"cfg-if",
- "js-sys",
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
- "wasm-bindgen",
]
[[package]]
@@ -918,7 +766,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc"
dependencies = [
"aho-corasick 0.7.20",
- "bstr 1.4.0",
+ "bstr",
"fnv",
"log",
"regex",
@@ -935,9 +783,6 @@ name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
-dependencies = [
- "ahash",
-]
[[package]]
name = "heck"
@@ -991,12 +836,11 @@ dependencies = [
[[package]]
name = "iana-time-zone-haiku"
-version = "0.1.1"
+version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
+checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
dependencies = [
- "cxx",
- "cxx-build",
+ "cc",
]
[[package]]
@@ -1044,6 +888,7 @@ checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown",
+ "serde",
]
[[package]]
@@ -1090,9 +935,9 @@ dependencies = [
[[package]]
name = "io-lifetimes"
-version = "1.0.10"
+version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220"
+checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
dependencies = [
"hermit-abi 0.3.1",
"libc",
@@ -1141,9 +986,9 @@ checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
[[package]]
name = "js-sys"
-version = "0.3.61"
+version = "0.3.63"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730"
+checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790"
dependencies = [
"wasm-bindgen",
]
@@ -1168,37 +1013,11 @@ dependencies = [
"libc",
]
-[[package]]
-name = "lalrpop"
-version = "0.19.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f34313ec00c2eb5c3c87ca6732ea02dcf3af99c3ff7a8fb622ffb99c9d860a87"
-dependencies = [
- "ascii-canvas",
- "bit-set",
- "diff",
- "ena",
- "is-terminal",
- "itertools",
- "lalrpop-util",
- "petgraph",
- "pico-args",
- "regex",
- "regex-syntax 0.6.29",
- "string_cache",
- "term",
- "tiny-keccak",
- "unicode-xid",
-]
-
[[package]]
name = "lalrpop-util"
-version = "0.19.9"
+version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e5c1f7869c94d214466c5fd432dfed12c379fd87786768d36455892d46b18edd"
-dependencies = [
- "regex",
-]
+checksum = "3f35c735096c0293d313e8f2a641627472b83d01b937177fe76e5e2708d31e0d"
[[package]]
name = "lazy_static"
@@ -1238,9 +1057,9 @@ dependencies = [
[[package]]
name = "libc"
-version = "0.2.142"
+version = "0.2.144"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a987beff54b60ffa6d51982e1aa1146bc42f19bd26be28b0586f252fccf5317"
+checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1"
[[package]]
name = "libcst"
@@ -1268,23 +1087,14 @@ dependencies = [
[[package]]
name = "libmimalloc-sys"
-version = "0.1.32"
+version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43a558e3d911bc3c7bfc8c78bc580b404d6e51c1cefbf656e176a94b49b0df40"
+checksum = "f4ac0e912c8ef1b735e92369695618dc5b1819f5a7bf3f167301a3ba1cea515e"
dependencies = [
"cc",
"libc",
]
-[[package]]
-name = "link-cplusplus"
-version = "1.0.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5"
-dependencies = [
- "cc",
-]
-
[[package]]
name = "linked-hash-map"
version = "0.5.6"
@@ -1293,37 +1103,15 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "linux-raw-sys"
-version = "0.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b085a4f2cde5781fc4b1717f2e86c62f5cda49de7ba99a7c2eae02b61c9064c"
-
-[[package]]
-name = "lock_api"
-version = "0.4.9"
+version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
-dependencies = [
- "autocfg",
- "scopeguard",
-]
+checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
[[package]]
name = "log"
-version = "0.4.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
-dependencies = [
- "cfg-if",
-]
-
-[[package]]
-name = "lz4_flex"
-version = "0.9.5"
+version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a8cbbb2831780bc3b9c15a41f5b49222ef756b6730a95f3decfdd15903eb5a3"
-dependencies = [
- "twox-hash",
-]
+checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de"
[[package]]
name = "matches"
@@ -1348,9 +1136,9 @@ dependencies = [
[[package]]
name = "mimalloc"
-version = "0.1.36"
+version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d88dad3f985ec267a3fcb7a1726f5cb1a7e8cad8b646e70a84f967210df23da"
+checksum = "4e2894987a3459f3ffb755608bd82188f8ed00d0ae077f1edea29c068d639d98"
dependencies = [
"libmimalloc-sys",
]
@@ -1363,23 +1151,23 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
-version = "0.6.2"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
+checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
dependencies = [
"adler",
]
[[package]]
name = "mio"
-version = "0.8.6"
+version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
+checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
dependencies = [
"libc",
"log",
"wasi 0.11.0+wasi-snapshot-preview1",
- "windows-sys 0.45.0",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -1388,12 +1176,6 @@ version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308d96db8debc727c3fd9744aac51751243420e46edf401010908da7f8d5e57c"
-[[package]]
-name = "new_debug_unreachable"
-version = "1.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
-
[[package]]
name = "nextest-workspace-hack"
version = "0.1.0"
@@ -1430,9 +1212,9 @@ dependencies = [
[[package]]
name = "notify"
-version = "5.1.0"
+version = "5.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9"
+checksum = "729f63e1ca555a43fe3efa4f3efdf4801c479da85b432242a7b726f353c88486"
dependencies = [
"bitflags 1.3.2",
"crossbeam-channel",
@@ -1443,7 +1225,7 @@ dependencies = [
"libc",
"mio",
"walkdir",
- "windows-sys 0.42.0",
+ "windows-sys 0.45.0",
]
[[package]]
@@ -1457,15 +1239,6 @@ dependencies = [
"num-traits",
]
-[[package]]
-name = "num-complex"
-version = "0.4.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d"
-dependencies = [
- "num-traits",
-]
-
[[package]]
name = "num-integer"
version = "0.1.45"
@@ -1497,9 +1270,9 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.17.1"
+version = "1.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
+checksum = "9670a07f94779e00908f3e686eab508878ebb390ba6e604d3a284c00e8d0487b"
[[package]]
name = "oorandom"
@@ -1507,6 +1280,12 @@ version = "11.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+[[package]]
+name = "option-ext"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
+
[[package]]
name = "os_str_bytes"
version = "6.5.0"
@@ -1525,29 +1304,6 @@ dependencies = [
"winapi",
]
-[[package]]
-name = "parking_lot"
-version = "0.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
-dependencies = [
- "lock_api",
- "parking_lot_core",
-]
-
-[[package]]
-name = "parking_lot_core"
-version = "0.9.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521"
-dependencies = [
- "cfg-if",
- "libc",
- "redox_syscall 0.2.16",
- "smallvec",
- "windows-sys 0.45.0",
-]
-
[[package]]
name = "paste"
version = "1.0.12"
@@ -1556,18 +1312,18 @@ checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79"
[[package]]
name = "path-absolutize"
-version = "3.0.14"
+version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f1d4993b16f7325d90c18c3c6a3327db7808752db8d208cea0acee0abd52c52"
+checksum = "43eb3595c63a214e1b37b44f44b0a84900ef7ae0b4c5efce59e123d246d7a0de"
dependencies = [
"path-dedot",
]
[[package]]
name = "path-dedot"
-version = "3.0.18"
+version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a81540d94551664b72b72829b12bd167c73c9d25fbac0e04fafa8023f7e4901"
+checksum = "9d55e486337acb9973cdea3ec5638c1b3bcb22e573b2b7b41969e0c744d5a15e"
dependencies = [
"once_cell",
]
@@ -1607,9 +1363,9 @@ checksum = "9fa00462b37ead6d11a82c9d568b26682d78e0477dc02d1966c013af80969739"
[[package]]
name = "pep440_rs"
-version = "0.3.5"
+version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aac177a025c60a4dd25d638bf33e746d1ead5f7123f6650f35b4394c7ce1a104"
+checksum = "fe1d15693a11422cfa7d401b00dc9ae9fb8edbfbcb711a77130663f4ddf67650"
dependencies = [
"lazy_static",
"regex",
@@ -1619,20 +1375,26 @@ dependencies = [
]
[[package]]
-name = "percent-encoding"
-version = "2.2.0"
+name = "pep508_rs"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+checksum = "969679a29dfdc8278a449f75b3dd45edf57e649bd59f7502429c2840751c46d8"
+dependencies = [
+ "once_cell",
+ "pep440_rs",
+ "regex",
+ "serde",
+ "thiserror",
+ "tracing",
+ "unicode-width",
+ "url",
+]
[[package]]
-name = "petgraph"
-version = "0.6.3"
+name = "percent-encoding"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4"
-dependencies = [
- "fixedbitset",
- "indexmap",
-]
+checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
[[package]]
name = "phf"
@@ -1640,7 +1402,7 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "928c6535de93548188ef63bb7c4036bd415cd8f36ad25af44b9789b2ee72a48c"
dependencies = [
- "phf_shared 0.11.1",
+ "phf_shared",
]
[[package]]
@@ -1650,7 +1412,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a56ac890c5e3ca598bbdeaa99964edb5b0258a583a9eb6ef4e89fc85d9224770"
dependencies = [
"phf_generator",
- "phf_shared 0.11.1",
+ "phf_shared",
]
[[package]]
@@ -1659,19 +1421,10 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1181c94580fa345f50f19d738aaa39c0ed30a600d95cb2d3e23f94266f14fbf"
dependencies = [
- "phf_shared 0.11.1",
+ "phf_shared",
"rand",
]
-[[package]]
-name = "phf_shared"
-version = "0.10.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
-dependencies = [
- "siphasher",
-]
-
[[package]]
name = "phf_shared"
version = "0.11.1"
@@ -1681,12 +1434,6 @@ dependencies = [
"siphasher",
]
-[[package]]
-name = "pico-args"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468"
-
[[package]]
name = "pin-project-lite"
version = "0.2.9"
@@ -1732,18 +1479,6 @@ dependencies = [
"syn 1.0.109",
]
-[[package]]
-name = "ppv-lite86"
-version = "0.2.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
-
-[[package]]
-name = "precomputed-hash"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
-
[[package]]
name = "predicates"
version = "3.0.3"
@@ -1810,13 +1545,26 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.56"
+version = "1.0.59"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
+checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b"
dependencies = [
"unicode-ident",
]
+[[package]]
+name = "pyproject-toml"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f04dbbb336bd88583943c7cd973a32fed323578243a7569f40cb0c7da673321b"
+dependencies = [
+ "indexmap",
+ "pep440_rs",
+ "pep508_rs",
+ "serde",
+ "toml",
+]
+
[[package]]
name = "quick-junit"
version = "0.3.2"
@@ -1842,37 +1590,19 @@ dependencies = [
[[package]]
name = "quote"
-version = "1.0.26"
+version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
+checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
-[[package]]
-name = "radium"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
-
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
- "libc",
- "rand_chacha",
- "rand_core",
-]
-
-[[package]]
-name = "rand_chacha"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
-dependencies = [
- "ppv-lite86",
"rand_core",
]
@@ -1881,9 +1611,6 @@ name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
-dependencies = [
- "getrandom",
-]
[[package]]
name = "rayon"
@@ -1938,13 +1665,13 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.8.1"
+version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af83e617f331cc6ae2da5443c602dfa5af81e517212d9d611a5b3ba1777b5370"
+checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390"
dependencies = [
"aho-corasick 1.0.1",
"memchr",
- "regex-syntax 0.7.1",
+ "regex-syntax",
]
[[package]]
@@ -1955,15 +1682,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
[[package]]
name = "regex-syntax"
-version = "0.6.29"
+version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
-
-[[package]]
-name = "regex-syntax"
-version = "0.7.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a5996294f19bd3aae0453a862ad728f60e6600695733dd5df01da90c54363a3c"
+checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
[[package]]
name = "result-like"
@@ -2004,15 +1725,15 @@ dependencies = [
[[package]]
name = "ruff"
-version = "0.0.265"
+version = "0.0.272"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
- "bitflags 2.1.0",
+ "bitflags 2.3.1",
"chrono",
- "clap 4.2.4",
+ "clap",
"colored",
- "dirs 5.0.0",
+ "dirs 5.0.1",
"fern",
"glob",
"globset",
@@ -2032,19 +1753,22 @@ dependencies = [
"pathdiff",
"pep440_rs",
"pretty_assertions",
+ "pyproject-toml",
"quick-junit",
"regex",
"result-like",
"ruff_cache",
"ruff_diagnostics",
"ruff_macros",
+ "ruff_newlines",
"ruff_python_ast",
"ruff_python_semantic",
"ruff_python_stdlib",
"ruff_rustpython",
"ruff_text_size",
+ "ruff_textwrap",
"rustc-hash",
- "rustpython-common",
+ "rustpython-format",
"rustpython-parser",
"schemars",
"semver",
@@ -2056,11 +1780,11 @@ dependencies = [
"strum",
"strum_macros",
"test-case",
- "textwrap",
"thiserror",
"toml",
"typed-arena",
"unicode-width",
+ "unicode_names2",
]
[[package]]
@@ -2072,6 +1796,7 @@ dependencies = [
"once_cell",
"ruff",
"ruff_python_ast",
+ "ruff_python_formatter",
"rustpython-parser",
"serde",
"serde_json",
@@ -2093,7 +1818,7 @@ dependencies = [
[[package]]
name = "ruff_cli"
-version = "0.0.265"
+version = "0.0.272"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -2101,10 +1826,10 @@ dependencies = [
"assert_cmd",
"atty",
"bincode",
- "bitflags 2.1.0",
+ "bitflags 2.3.1",
"cachedir",
"chrono",
- "clap 4.2.4",
+ "clap",
"clap_complete_command",
"clearscreen",
"colored",
@@ -2122,15 +1847,16 @@ dependencies = [
"ruff_cache",
"ruff_diagnostics",
"ruff_python_ast",
+ "ruff_python_formatter",
"ruff_python_stdlib",
"ruff_text_size",
+ "ruff_textwrap",
"rustc-hash",
"serde",
"serde_json",
"shellexpand",
"similar",
"strum",
- "textwrap",
"tikv-jemallocator",
"ureq",
"walkdir",
@@ -2142,7 +1868,7 @@ name = "ruff_dev"
version = "0.0.0"
dependencies = [
"anyhow",
- "clap 4.2.4",
+ "clap",
"itertools",
"libcst",
"once_cell",
@@ -2151,13 +1877,13 @@ dependencies = [
"ruff",
"ruff_cli",
"ruff_diagnostics",
- "rustpython-common",
+ "ruff_textwrap",
+ "rustpython-format",
"rustpython-parser",
"schemars",
"serde_json",
"strum",
"strum_macros",
- "textwrap",
]
[[package]]
@@ -2180,10 +1906,19 @@ dependencies = [
"rustc-hash",
"schemars",
"serde",
+ "static_assertions",
"tracing",
"unicode-width",
]
+[[package]]
+name = "ruff_index"
+version = "0.0.0"
+dependencies = [
+ "ruff_macros",
+ "static_assertions",
+]
+
[[package]]
name = "ruff_macros"
version = "0.0.0"
@@ -2191,8 +1926,16 @@ dependencies = [
"itertools",
"proc-macro2",
"quote",
- "syn 2.0.15",
- "textwrap",
+ "ruff_textwrap",
+ "syn 2.0.18",
+]
+
+[[package]]
+name = "ruff_newlines"
+version = "0.0.0"
+dependencies = [
+ "memchr",
+ "ruff_text_size",
]
[[package]]
@@ -2200,7 +1943,8 @@ name = "ruff_python_ast"
version = "0.0.0"
dependencies = [
"anyhow",
- "bitflags 2.1.0",
+ "bitflags 2.3.1",
+ "insta",
"is-macro",
"itertools",
"log",
@@ -2208,11 +1952,11 @@ dependencies = [
"num-bigint",
"num-traits",
"once_cell",
- "regex",
- "ruff_rustpython",
+ "ruff_newlines",
"ruff_text_size",
"rustc-hash",
- "rustpython-common",
+ "rustpython-ast",
+ "rustpython-literal",
"rustpython-parser",
"serde",
"smallvec",
@@ -2223,18 +1967,18 @@ name = "ruff_python_formatter"
version = "0.0.0"
dependencies = [
"anyhow",
- "clap 4.2.4",
+ "clap",
+ "countme",
"insta",
"is-macro",
"itertools",
"once_cell",
"ruff_formatter",
+ "ruff_newlines",
"ruff_python_ast",
- "ruff_rustpython",
"ruff_testing_macros",
"ruff_text_size",
"rustc-hash",
- "rustpython-common",
"rustpython-parser",
"similar",
"test-case",
@@ -2244,9 +1988,11 @@ dependencies = [
name = "ruff_python_semantic"
version = "0.0.0"
dependencies = [
- "bitflags 2.1.0",
+ "bitflags 2.3.1",
"is-macro",
"nohash-hasher",
+ "num-traits",
+ "ruff_index",
"ruff_python_ast",
"ruff_python_stdlib",
"ruff_text_size",
@@ -2268,8 +2014,6 @@ name = "ruff_rustpython"
version = "0.0.0"
dependencies = [
"anyhow",
- "once_cell",
- "rustpython-common",
"rustpython-parser",
]
@@ -2280,25 +2024,32 @@ dependencies = [
"glob",
"proc-macro2",
"quote",
- "syn 2.0.15",
+ "syn 2.0.18",
]
[[package]]
name = "ruff_text_size"
version = "0.0.0"
-source = "git+https://github.com/charliermarsh/RustPython.git?rev=c3147d2c1524ebd0e90cf1c2938d770314fd5a5a#c3147d2c1524ebd0e90cf1c2938d770314fd5a5a"
+source = "git+https://github.com/astral-sh/RustPython-Parser.git?rev=7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd#7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd"
dependencies = [
"schemars",
"serde",
]
+[[package]]
+name = "ruff_textwrap"
+version = "0.0.0"
+dependencies = [
+ "ruff_newlines",
+ "ruff_text_size",
+]
+
[[package]]
name = "ruff_wasm"
version = "0.0.0"
dependencies = [
"console_error_panic_hook",
"console_log",
- "getrandom",
"js-sys",
"log",
"ruff",
@@ -2330,9 +2081,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustix"
-version = "0.37.13"
+version = "0.37.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f79bef90eb6d984c72722595b5b1348ab39275a5e5123faca6863bf07d75a4e0"
+checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d"
dependencies = [
"bitflags 1.3.2",
"errno",
@@ -2357,75 +2108,70 @@ dependencies = [
[[package]]
name = "rustpython-ast"
version = "0.2.0"
-source = "git+https://github.com/charliermarsh/RustPython.git?rev=c3147d2c1524ebd0e90cf1c2938d770314fd5a5a#c3147d2c1524ebd0e90cf1c2938d770314fd5a5a"
+source = "git+https://github.com/astral-sh/RustPython-Parser.git?rev=7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd#7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd"
dependencies = [
+ "is-macro",
"num-bigint",
- "ruff_text_size",
+ "rustpython-parser-core",
+ "static_assertions",
]
[[package]]
-name = "rustpython-common"
+name = "rustpython-format"
version = "0.2.0"
-source = "git+https://github.com/charliermarsh/RustPython.git?rev=c3147d2c1524ebd0e90cf1c2938d770314fd5a5a#c3147d2c1524ebd0e90cf1c2938d770314fd5a5a"
+source = "git+https://github.com/astral-sh/RustPython-Parser.git?rev=7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd#7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd"
dependencies = [
- "ascii",
- "bitflags 1.3.2",
- "bstr 0.2.17",
- "cfg-if",
- "getrandom",
- "hexf-parse",
+ "bitflags 2.3.1",
"itertools",
- "lexical-parse-float",
- "libc",
- "lock_api",
"num-bigint",
"num-traits",
- "once_cell",
- "radium",
- "rand",
- "siphasher",
- "unic-ucd-category",
- "volatile",
- "widestring",
+ "rustpython-literal",
]
[[package]]
-name = "rustpython-compiler-core"
+name = "rustpython-literal"
version = "0.2.0"
-source = "git+https://github.com/charliermarsh/RustPython.git?rev=c3147d2c1524ebd0e90cf1c2938d770314fd5a5a#c3147d2c1524ebd0e90cf1c2938d770314fd5a5a"
+source = "git+https://github.com/astral-sh/RustPython-Parser.git?rev=7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd#7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd"
dependencies = [
- "bitflags 1.3.2",
- "itertools",
- "lz4_flex",
- "num-bigint",
- "num-complex",
- "ruff_text_size",
+ "hexf-parse",
+ "is-macro",
+ "lexical-parse-float",
+ "num-traits",
+ "unic-ucd-category",
]
[[package]]
name = "rustpython-parser"
version = "0.2.0"
-source = "git+https://github.com/charliermarsh/RustPython.git?rev=c3147d2c1524ebd0e90cf1c2938d770314fd5a5a#c3147d2c1524ebd0e90cf1c2938d770314fd5a5a"
+source = "git+https://github.com/astral-sh/RustPython-Parser.git?rev=7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd#7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd"
dependencies = [
"anyhow",
+ "is-macro",
"itertools",
- "lalrpop",
"lalrpop-util",
"log",
"num-bigint",
"num-traits",
"phf",
"phf_codegen",
- "ruff_text_size",
"rustc-hash",
"rustpython-ast",
- "rustpython-compiler-core",
+ "rustpython-parser-core",
"tiny-keccak",
"unic-emoji-char",
"unic-ucd-ident",
"unicode_names2",
]
+[[package]]
+name = "rustpython-parser-core"
+version = "0.2.0"
+source = "git+https://github.com/astral-sh/RustPython-Parser.git?rev=7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd#7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd"
+dependencies = [
+ "is-macro",
+ "ruff_text_size",
+]
+
[[package]]
name = "rustversion"
version = "1.0.12"
@@ -2483,12 +2229,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
-[[package]]
-name = "scratch"
-version = "1.0.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1"
-
[[package]]
name = "sct"
version = "0.7.0"
@@ -2507,9 +2247,9 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "serde"
-version = "1.0.160"
+version = "1.0.163"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c"
+checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
dependencies = [
"serde_derive",
]
@@ -2527,13 +2267,13 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.160"
+version = "1.0.163"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df"
+checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.15",
+ "syn 2.0.18",
]
[[package]]
@@ -2561,9 +2301,9 @@ dependencies = [
[[package]]
name = "serde_spanned"
-version = "0.6.1"
+version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4"
+checksum = "93107647184f6027e3b7dcb2e11034cf95ffa1e3a682c67951963ac69c1c007d"
dependencies = [
"serde",
]
@@ -2574,7 +2314,7 @@ version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b"
dependencies = [
- "dirs 5.0.0",
+ "dirs 5.0.1",
]
[[package]]
@@ -2595,12 +2335,6 @@ version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
-[[package]]
-name = "smawk"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f67ad224767faa3c7d8b6d91985b78e70a1324408abcb1cfcc2be4c06bc06043"
-
[[package]]
name = "spin"
version = "0.5.2"
@@ -2613,19 +2347,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
-[[package]]
-name = "string_cache"
-version = "0.8.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b"
-dependencies = [
- "new_debug_unreachable",
- "once_cell",
- "parking_lot",
- "phf_shared 0.10.0",
- "precomputed-hash",
-]
-
[[package]]
name = "strsim"
version = "0.10.0"
@@ -2667,9 +2388,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.15"
+version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822"
+checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
dependencies = [
"proc-macro2",
"quote",
@@ -2698,26 +2419,6 @@ dependencies = [
"windows-sys 0.45.0",
]
-[[package]]
-name = "term"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f"
-dependencies = [
- "dirs-next",
- "rustversion",
- "winapi",
-]
-
-[[package]]
-name = "termcolor"
-version = "1.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
-dependencies = [
- "winapi-util",
-]
-
[[package]]
name = "terminfo"
version = "0.8.0"
@@ -2772,17 +2473,6 @@ dependencies = [
"test-case-core",
]
-[[package]]
-name = "textwrap"
-version = "0.16.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
-dependencies = [
- "smawk",
- "unicode-linebreak",
- "unicode-width",
-]
-
[[package]]
name = "thiserror"
version = "1.0.40"
@@ -2800,7 +2490,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.15",
+ "syn 2.0.18",
]
[[package]]
@@ -2880,9 +2570,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "toml"
-version = "0.7.3"
+version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b403acf6f2bb0859c93c7f0d967cb4a75a7ac552100f9322faf64dc047669b21"
+checksum = "d6135d499e69981f9ff0ef2167955a5333c35e36f6937d382974566b3d5b94ec"
dependencies = [
"serde",
"serde_spanned",
@@ -2892,18 +2582,18 @@ dependencies = [
[[package]]
name = "toml_datetime"
-version = "0.6.1"
+version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622"
+checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
-version = "0.19.8"
+version = "0.19.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13"
+checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739"
dependencies = [
"indexmap",
"serde",
@@ -2919,6 +2609,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
dependencies = [
"cfg-if",
+ "log",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
@@ -2926,34 +2617,24 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.23"
+version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
+checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
name = "tracing-core"
-version = "0.1.30"
+version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
+checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
dependencies = [
"once_cell",
]
-[[package]]
-name = "twox-hash"
-version = "1.6.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
-dependencies = [
- "cfg-if",
- "static_assertions",
-]
-
[[package]]
name = "typed-arena"
version = "2.0.2"
@@ -3032,19 +2713,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
[[package]]
name = "unicode-ident"
-version = "1.0.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
-
-[[package]]
-name = "unicode-linebreak"
-version = "0.1.4"
+version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5faade31a542b8b35855fff6e8def199853b2da8da256da52f52f1316ee3137"
-dependencies = [
- "hashbrown",
- "regex",
-]
+checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "unicode-normalization"
@@ -3061,12 +2732,6 @@ version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
-[[package]]
-name = "unicode-xid"
-version = "0.2.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
-
[[package]]
name = "unicode_names2"
version = "0.6.0"
@@ -3106,6 +2771,7 @@ dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
+ "serde",
]
[[package]]
@@ -3116,9 +2782,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "uuid"
-version = "1.3.1"
+version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b55a3fef2a1e3b3a00ce878640918820d3c51081576ac657d23af9fc7928fdb"
+checksum = "345444e32442451b267fc254ae85a209c64be56d2890e601a0c37ff0c3c5ecd2"
[[package]]
name = "version_check"
@@ -3126,12 +2792,6 @@ version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
-[[package]]
-name = "volatile"
-version = "0.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8e76fae08f03f96e166d2dfda232190638c10e0383841252416f9cfe2ae60e6"
-
[[package]]
name = "wait-timeout"
version = "0.2.0"
@@ -3165,9 +2825,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
-version = "0.2.84"
+version = "0.2.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b"
+checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
@@ -3175,24 +2835,24 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
-version = "0.2.84"
+version = "0.2.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9"
+checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
-version = "0.4.34"
+version = "0.4.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454"
+checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e"
dependencies = [
"cfg-if",
"js-sys",
@@ -3202,9 +2862,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
-version = "0.2.84"
+version = "0.2.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5"
+checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -3212,28 +2872,28 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
-version = "0.2.84"
+version = "0.2.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6"
+checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
-version = "0.2.84"
+version = "0.2.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d"
+checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93"
[[package]]
name = "wasm-bindgen-test"
-version = "0.3.34"
+version = "0.3.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6db36fc0f9fb209e88fb3642590ae0205bb5a56216dabd963ba15879fe53a30b"
+checksum = "c9e636f3a428ff62b3742ebc3c70e254dfe12b8c2b469d688ea59cdd4abcf502"
dependencies = [
"console_error_panic_hook",
"js-sys",
@@ -3245,9 +2905,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-test-macro"
-version = "0.3.34"
+version = "0.3.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0734759ae6b3b1717d661fe4f016efcfb9828f5edb4520c18eaee05af3b43be9"
+checksum = "f18c1fad2f7c4958e7bcce014fa212f59a65d5e3721d0f77e6c0b27ede936ba3"
dependencies = [
"proc-macro2",
"quote",
@@ -3255,9 +2915,9 @@ dependencies = [
[[package]]
name = "web-sys"
-version = "0.3.61"
+version = "0.3.63"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97"
+checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2"
dependencies = [
"js-sys",
"wasm-bindgen",
@@ -3293,12 +2953,6 @@ dependencies = [
"once_cell",
]
-[[package]]
-name = "widestring"
-version = "0.5.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983"
-
[[package]]
name = "wild"
version = "2.1.0"
@@ -3348,21 +3002,6 @@ dependencies = [
"windows-targets 0.48.0",
]
-[[package]]
-name = "windows-sys"
-version = "0.42.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
-dependencies = [
- "windows_aarch64_gnullvm 0.42.2",
- "windows_aarch64_msvc 0.42.2",
- "windows_i686_gnu 0.42.2",
- "windows_i686_msvc 0.42.2",
- "windows_x86_64_gnu 0.42.2",
- "windows_x86_64_gnullvm 0.42.2",
- "windows_x86_64_msvc 0.42.2",
-]
-
[[package]]
name = "windows-sys"
version = "0.45.0"
@@ -3497,9 +3136,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
[[package]]
name = "winnow"
-version = "0.4.1"
+version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ae8970b36c66498d8ff1d66685dc86b91b29db0c7739899012f63a63814b4b28"
+checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699"
dependencies = [
"memchr",
]
diff --git a/Cargo.toml b/Cargo.toml
index cc5b1275c7306..8e477ffd6fa68 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -3,7 +3,7 @@ members = ["crates/*"]
[workspace.package]
edition = "2021"
-rust-version = "1.69"
+rust-version = "1.70"
homepage = "https://beta.ruff.rs/docs/"
documentation = "https://beta.ruff.rs/docs/"
repository = "https://github.com/charliermarsh/ruff"
@@ -11,7 +11,7 @@ authors = ["Charlie Marsh "]
[workspace.dependencies]
anyhow = { version = "1.0.69" }
-bitflags = { version = "2.1.0" }
+bitflags = { version = "2.3.1" }
chrono = { version = "0.4.23", default-features = false, features = ["clock"] }
clap = { version = "4.1.8", features = ["derive"] }
colored = { version = "2.0.0" }
@@ -24,16 +24,21 @@ is-macro = { version = "0.2.2" }
itertools = { version = "0.10.5" }
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "80e4c1399f95e5beb532fdd1e209ad2dbb470438" }
log = { version = "0.4.17" }
+memchr = "2.5.0"
nohash-hasher = { version = "0.2.0" }
+num-bigint = { version = "0.4.3" }
+num-traits = { version = "0.2.15" }
once_cell = { version = "1.17.1" }
path-absolutize = { version = "3.0.14" }
proc-macro2 = { version = "1.0.51" }
quote = { version = "1.0.23" }
regex = { version = "1.7.1" }
-ruff_text_size = { git = "https://github.com/charliermarsh/RustPython.git", rev = "c3147d2c1524ebd0e90cf1c2938d770314fd5a5a" }
rustc-hash = { version = "1.1.0" }
-rustpython-common = { git = "https://github.com/charliermarsh/RustPython.git", rev = "c3147d2c1524ebd0e90cf1c2938d770314fd5a5a" }
-rustpython-parser = { git = "https://github.com/charliermarsh/RustPython.git", rev = "c3147d2c1524ebd0e90cf1c2938d770314fd5a5a" }
+ruff_text_size = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd" }
+rustpython-ast = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd", default-features = false, features = ["all-nodes-with-ranges"]}
+rustpython-format = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd" }
+rustpython-literal = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd" }
+rustpython-parser = { git = "https://github.com/astral-sh/RustPython-Parser.git", rev = "7a3eedbf6fb4ea7068a1bf7fe0e97e963ea95ffd", default-features = false, features = ["full-lexer", "all-nodes-with-ranges"] }
schemars = { version = "0.8.12" }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = { version = "1.0.93", features = ["preserve_order"] }
@@ -44,7 +49,6 @@ strum = { version = "0.24.1", features = ["strum_macros"] }
strum_macros = { version = "0.24.3" }
syn = { version = "2.0.15" }
test-case = { version = "3.0.0" }
-textwrap = { version = "0.16.0" }
toml = { version = "0.7.2" }
[profile.release]
diff --git a/LICENSE b/LICENSE
index 590eda6e8b055..534e3357426bf 100644
--- a/LICENSE
+++ b/LICENSE
@@ -354,6 +354,37 @@ are:
SOFTWARE.
"""
+- flake8-todos, licensed as follows:
+ """
+ Copyright (c) 2019 EclecticIQ. All rights reserved.
+ Copyright (c) 2020 Gram . All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from this
+ software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ """
+
- flake8-unused-arguments, licensed as follows:
"""
MIT License
@@ -550,6 +581,30 @@ are:
THE SOFTWARE.
"""
+- flynt, licensed as follows:
+ """
+ MIT License
+
+ Copyright (c) 2019-2022 Ilya Kamenshchikov
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+ """
- isort, licensed as follows:
"""
@@ -759,6 +814,31 @@ are:
SOFTWARE.
"""
+- flake8-async, licensed as follows:
+ """
+ MIT License
+
+ Copyright (c) 2022 Cooper Lees
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+ """
+
- flake8-type-checking, licensed as follows:
"""
Copyright (c) 2021, Sondre Lillebø Gundersen
diff --git a/README.md b/README.md
index 1daff088453e7..daa96bbb7d7ad 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
# Ruff
-[](https://github.com/charliermarsh/ruff)
+[](https://github.com/charliermarsh/ruff)
[](https://pypi.python.org/pypi/ruff)
[](https://pypi.python.org/pypi/ruff)
[](https://pypi.python.org/pypi/ruff)
@@ -24,17 +24,18 @@ An extremely fast Python linter, written in Rust.
Linting the CPython codebase from scratch.
-- ⚡️ 10-100x faster than existing linters
-- 🐍 Installable via `pip`
-- 🛠️ `pyproject.toml` support
-- 🤝 Python 3.11 compatibility
-- 📦 Built-in caching, to avoid re-analyzing unchanged files
-- 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
-- 📏 Over [500 built-in rules](https://beta.ruff.rs/docs/rules/)
-- ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
-- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
-- ⌨️ First-party editor integrations for [VS Code](https://github.com/charliermarsh/ruff-vscode) and [more](https://github.com/charliermarsh/ruff-lsp)
-- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://beta.ruff.rs/docs/configuration/#pyprojecttoml-discovery)
+- ⚡️ 10-100x faster than existing linters
+- 🐍 Installable via `pip`
+- 🛠️ `pyproject.toml` support
+- 🤝 Python 3.11 compatibility
+- 📦 Built-in caching, to avoid re-analyzing unchanged files
+- 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
+- 📏 Over [500 built-in rules](https://beta.ruff.rs/docs/rules/)
+- ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the
+ built-in Flake8 rule set
+- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
+- ⌨️ First-party editor integrations for [VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
+- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://beta.ruff.rs/docs/configuration/#pyprojecttoml-discovery)
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
functionality behind a single, common interface.
@@ -84,7 +85,8 @@ of [Conda](https://docs.conda.io/en/latest/):
[**Timothy Crosley**](https://twitter.com/timothycrosley/status/1606420868514877440),
creator of [isort](https://github.com/PyCQA/isort):
-> Just switched my first project to Ruff. Only one downside so far: it's so fast I couldn't believe it was working till I intentionally introduced some errors.
+> Just switched my first project to Ruff. Only one downside so far: it's so fast I couldn't believe
+> it was working till I intentionally introduced some errors.
[**Tim Abbott**](https://github.com/charliermarsh/ruff/issues/465#issuecomment-1317400028), lead
developer of [Zulip](https://github.com/zulip/zulip):
@@ -135,15 +137,15 @@ ruff check path/to/code/to/file.py # Lint `file.py`
Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
```yaml
-- repo: https://github.com/charliermarsh/ruff-pre-commit
+- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
- rev: 'v0.0.265'
+ rev: v0.0.272
hooks:
- id: ruff
```
-Ruff can also be used as a [VS Code extension](https://github.com/charliermarsh/ruff-vscode) or
-alongside any other editor through the [Ruff LSP](https://github.com/charliermarsh/ruff-lsp).
+Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
+alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
[`ruff-action`](https://github.com/chartboost/ruff-action):
@@ -183,6 +185,7 @@ exclude = [
".direnv",
".eggs",
".git",
+ ".git-rewrite",
".hg",
".mypy_cache",
".nox",
@@ -241,6 +244,8 @@ stylistic rules made obsolete by the use of an autoformatter, like
If you're just getting started with Ruff, **the default rule set is a great place to start**: it
catches a wide variety of common errors (like unused imports) with zero configuration.
+
+
Beyond the defaults, Ruff re-implements some of the most popular Flake8 plugins and related code
quality tools, including:
@@ -248,6 +253,7 @@ quality tools, including:
- [eradicate](https://pypi.org/project/eradicate/)
- [flake8-2020](https://pypi.org/project/flake8-2020/)
- [flake8-annotations](https://pypi.org/project/flake8-annotations/)
+- [flake8-async](https://pypi.org/project/flake8-async)
- [flake8-bandit](https://pypi.org/project/flake8-bandit/) ([#1646](https://github.com/charliermarsh/ruff/issues/1646))
- [flake8-blind-except](https://pypi.org/project/flake8-blind-except/)
- [flake8-boolean-trap](https://pypi.org/project/flake8-boolean-trap/)
@@ -262,6 +268,7 @@ quality tools, including:
- [flake8-eradicate](https://pypi.org/project/flake8-eradicate/)
- [flake8-errmsg](https://pypi.org/project/flake8-errmsg/)
- [flake8-executable](https://pypi.org/project/flake8-executable/)
+- [flake8-future-annotations](https://pypi.org/project/flake8-future-annotations/)
- [flake8-gettext](https://pypi.org/project/flake8-gettext/)
- [flake8-implicit-str-concat](https://pypi.org/project/flake8-implicit-str-concat/)
- [flake8-import-conventions](https://github.com/joaopalmeiro/flake8-import-conventions)
@@ -278,20 +285,21 @@ quality tools, including:
- [flake8-simplify](https://pypi.org/project/flake8-simplify/)
- [flake8-super](https://pypi.org/project/flake8-super/)
- [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/)
+- [flake8-todos](https://pypi.org/project/flake8-todos/)
- [flake8-type-checking](https://pypi.org/project/flake8-type-checking/)
- [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
+- [flynt](https://pypi.org/project/flynt/) ([#2102](https://github.com/charliermarsh/ruff/issues/2102))
- [isort](https://pypi.org/project/isort/)
- [mccabe](https://pypi.org/project/mccabe/)
- [pandas-vet](https://pypi.org/project/pandas-vet/)
- [pep8-naming](https://pypi.org/project/pep8-naming/)
- [pydocstyle](https://pypi.org/project/pydocstyle/)
-- [pygrep-hooks](https://github.com/pre-commit/pygrep-hooks) ([#980](https://github.com/charliermarsh/ruff/issues/980))
+- [pygrep-hooks](https://github.com/pre-commit/pygrep-hooks)
+- [pylint-airflow](https://pypi.org/project/pylint-airflow/)
- [pyupgrade](https://pypi.org/project/pyupgrade/)
- [tryceratops](https://pypi.org/project/tryceratops/)
- [yesqa](https://pypi.org/project/yesqa/)
-
-
For a complete enumeration of the supported rules, see [_Rules_](https://beta.ruff.rs/docs/rules/).
## Contributing
@@ -341,13 +349,15 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Babel](https://github.com/python-babel/babel)
- [Bokeh](https://github.com/bokeh/bokeh)
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
+- [DVC](https://github.com/iterative/dvc)
- [Dagger](https://github.com/dagger/dagger)
- [Dagster](https://github.com/dagster-io/dagster)
-- [DVC](https://github.com/iterative/dvc)
- [FastAPI](https://github.com/tiangolo/fastapi)
- [Gradio](https://github.com/gradio-app/gradio)
- [Great Expectations](https://github.com/great-expectations/great_expectations)
-- Hugging Face ([Transformers](https://github.com/huggingface/transformers), [Datasets](https://github.com/huggingface/datasets), [Diffusers](https://github.com/huggingface/diffusers))
+- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
+ [Datasets](https://github.com/huggingface/datasets),
+ [Diffusers](https://github.com/huggingface/diffusers))
- [Hatch](https://github.com/pypa/hatch)
- [Home Assistant](https://github.com/home-assistant/core)
- [Ibis](https://github.com/ibis-project/ibis)
@@ -359,7 +369,9 @@ Ruff is used by a number of major open-source projects and companies, including:
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python-sdk))
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
- [MegaLinter](https://github.com/oxsecurity/megalinter)
-- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel), [ONNX Runtime](https://github.com/microsoft/onnxruntime))
+- Microsoft ([Semantic Kernel](https://github.com/microsoft/semantic-kernel),
+ [ONNX Runtime](https://github.com/microsoft/onnxruntime),
+ [LightGBM](https://github.com/microsoft/LightGBM))
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
- [Neon](https://github.com/neondatabase/neon)
- [ONNX](https://github.com/onnx/onnx)
@@ -371,8 +383,9 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Polars](https://github.com/pola-rs/polars)
- [PostHog](https://github.com/PostHog/posthog)
- Prefect ([Python SDK](https://github.com/PrefectHQ/prefect), [Marvin](https://github.com/PrefectHQ/marvin))
-- [Pydantic](https://github.com/pydantic/pydantic)
- [PyInstaller](https://github.com/pyinstaller/pyinstaller)
+- [PyTorch](https://github.com/pytorch/pytorch)
+- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [Pynecone](https://github.com/pynecone-io/pynecone)
- [Robyn](https://github.com/sansyrox/robyn)
@@ -382,7 +395,7 @@ Ruff is used by a number of major open-source projects and companies, including:
- [SciPy](https://github.com/scipy/scipy)
- [Sphinx](https://github.com/sphinx-doc/sphinx)
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
-- [Starlite](https://github.com/starlite-api/starlite)
+- [Litestar](https://litestar.dev/)
- [The Algorithms](https://github.com/TheAlgorithms/Python)
- [Vega-Altair](https://github.com/altair-viz/altair)
- WordPress ([Openverse](https://github.com/WordPress/openverse))
@@ -395,6 +408,34 @@ Ruff is used by a number of major open-source projects and companies, including:
- [meson-python](https://github.com/mesonbuild/meson-python)
- [nox](https://github.com/wntrblm/nox)
+### Show Your Support
+
+If you're using Ruff, consider adding the Ruff badge to project's `README.md`:
+
+```md
+[](https://github.com/charliermarsh/ruff)
+```
+
+...or `README.rst`:
+
+```rst
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+ :target: https://github.com/charliermarsh/ruff
+ :alt: Ruff
+```
+
+...or, as HTML:
+
+```html
+
+```
+
## License
MIT
+
+
diff --git a/_typos.toml b/_typos.toml
index 8f5f834f9b409..778ba59eaf71e 100644
--- a/_typos.toml
+++ b/_typos.toml
@@ -1,5 +1,5 @@
[files]
-extend-exclude = ["snapshots", "black"]
+extend-exclude = ["resources", "snapshots"]
[default.extend-words]
trivias = "trivias"
diff --git a/assets/badge/v2.json b/assets/badge/v2.json
new file mode 100644
index 0000000000000..9379c3479837e
--- /dev/null
+++ b/assets/badge/v2.json
@@ -0,0 +1,8 @@
+{
+ "label": "",
+ "message": "Ruff",
+ "logoSvg": " ",
+ "logoWidth": 10,
+ "labelColor": "grey",
+ "color": "#261230"
+}
diff --git a/assets/svg/Astral.svg b/assets/svg/Astral.svg
new file mode 100644
index 0000000000000..461be113154df
--- /dev/null
+++ b/assets/svg/Astral.svg
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/crates/flake8_to_ruff/Cargo.toml b/crates/flake8_to_ruff/Cargo.toml
index dd78fb485a6fa..05109e74955e9 100644
--- a/crates/flake8_to_ruff/Cargo.toml
+++ b/crates/flake8_to_ruff/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
-version = "0.0.265"
+version = "0.0.272"
edition = { workspace = true }
rust-version = { workspace = true }
diff --git a/crates/flake8_to_ruff/pyproject.toml b/crates/flake8_to_ruff/pyproject.toml
index 16173f7d0d130..b7278f549aeef 100644
--- a/crates/flake8_to_ruff/pyproject.toml
+++ b/crates/flake8_to_ruff/pyproject.toml
@@ -26,7 +26,7 @@ requires-python = ">=3.7"
repository = "https://github.com/charliermarsh/ruff#subdirectory=crates/flake8_to_ruff"
[build-system]
-requires = ["maturin>=0.14,<0.15"]
+requires = ["maturin>=1.0,<2.0"]
build-backend = "maturin"
[tool.maturin]
diff --git a/crates/ruff/Cargo.toml b/crates/ruff/Cargo.toml
index c6c45a038ded9..77cfba42c27cf 100644
--- a/crates/ruff/Cargo.toml
+++ b/crates/ruff/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "ruff"
-version = "0.0.265"
+version = "0.0.272"
authors.workspace = true
edition.workspace = true
rust-version.workspace = true
@@ -17,11 +17,13 @@ name = "ruff"
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics", features = ["serde"] }
ruff_macros = { path = "../ruff_macros" }
+ruff_newlines = { path = "../ruff_newlines" }
ruff_python_ast = { path = "../ruff_python_ast", features = ["serde"] }
ruff_python_semantic = { path = "../ruff_python_semantic" }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
ruff_rustpython = { path = "../ruff_rustpython" }
ruff_text_size = { workspace = true }
+ruff_textwrap = { path = "../ruff_textwrap" }
annotate-snippets = { version = "0.9.1", features = ["color"] }
anyhow = { workspace = true }
@@ -41,8 +43,8 @@ libcst = { workspace = true }
log = { workspace = true }
natord = { version = "1.0.9" }
nohash-hasher = { workspace = true }
-num-bigint = { version = "0.4.3" }
-num-traits = { version = "0.2.15" }
+num-bigint = { workspace = true }
+num-traits = { workspace = true }
once_cell = { workspace = true }
path-absolutize = { workspace = true, features = [
"once_cell_cache",
@@ -50,13 +52,14 @@ path-absolutize = { workspace = true, features = [
] }
pathdiff = { version = "0.2.1" }
pep440_rs = { version = "0.3.1", features = ["serde"] }
+pyproject-toml = { version = "0.6.0" }
quick-junit = { version = "0.3.2" }
regex = { workspace = true }
result-like = { version = "0.4.6" }
rustc-hash = { workspace = true }
-rustpython-common = { workspace = true }
+rustpython-format = { workspace = true }
rustpython-parser = { workspace = true }
-schemars = { workspace = true }
+schemars = { workspace = true, optional = true }
semver = { version = "1.0.16" }
serde = { workspace = true }
serde_json = { workspace = true }
@@ -65,11 +68,11 @@ shellexpand = { workspace = true }
smallvec = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
-textwrap = { workspace = true }
thiserror = { version = "1.0.38" }
toml = { workspace = true }
typed-arena = { version = "2.0.2" }
unicode-width = { version = "0.1.10" }
+unicode_names2 = { version = "0.6.0", git = "https://github.com/youknowone/unicode_names2.git", rev = "4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde" }
[dev-dependencies]
insta = { workspace = true }
@@ -80,5 +83,5 @@ colored = { workspace = true, features = ["no-color"] }
[features]
default = []
-logical_lines = []
+schemars = ["dep:schemars"]
jupyter_notebook = []
diff --git a/crates/ruff/resources/test/fixtures/airflow/AIR001.py b/crates/ruff/resources/test/fixtures/airflow/AIR001.py
new file mode 100644
index 0000000000000..6e8bffcd9754d
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/airflow/AIR001.py
@@ -0,0 +1,16 @@
+from airflow.operators import PythonOperator
+
+
+def my_callable():
+ pass
+
+
+my_task = PythonOperator(task_id="my_task", callable=my_callable)
+my_task_2 = PythonOperator(callable=my_callable, task_id="my_task_2")
+
+incorrect_name = PythonOperator(task_id="my_task")
+incorrect_name_2 = PythonOperator(callable=my_callable, task_id="my_task_2")
+
+from my_module import MyClass
+
+incorrect_name = MyClass(task_id="my_task")
diff --git a/crates/ruff/resources/test/fixtures/flake8_annotations/annotation_presence.py b/crates/ruff/resources/test/fixtures/flake8_annotations/annotation_presence.py
index 3258eaf736f37..d37f178bbb938 100644
--- a/crates/ruff/resources/test/fixtures/flake8_annotations/annotation_presence.py
+++ b/crates/ruff/resources/test/fixtures/flake8_annotations/annotation_presence.py
@@ -1,4 +1,5 @@
from typing import Any, Type
+from typing_extensions import override
# Error
def foo(a, b):
@@ -94,6 +95,31 @@ def foo(self: "Foo", a: int, *params: Any, **options: str) -> int:
def foo(self: "Foo", a: int, *params: str, **options: Any) -> int:
pass
+ # ANN401
+ @override
+ def foo(self: "Foo", a: Any, *params: str, **options: str) -> int:
+ pass
+
+ # ANN401
+ @override
+ def foo(self: "Foo", a: int, *params: str, **options: str) -> Any:
+ pass
+
+ # ANN401
+ @override
+ def foo(self: "Foo", a: int, *params: Any, **options: Any) -> int:
+ pass
+
+ # ANN401
+ @override
+ def foo(self: "Foo", a: int, *params: Any, **options: str) -> int:
+ pass
+
+ # ANN401
+ @override
+ def foo(self: "Foo", a: int, *params: str, **options: Any) -> int:
+ pass
+
# OK
@classmethod
def foo(cls: Type["Foo"], a: int, b: int) -> int:
diff --git a/crates/ruff/resources/test/fixtures/flake8_async/ASYNC100.py b/crates/ruff/resources/test/fixtures/flake8_async/ASYNC100.py
new file mode 100644
index 0000000000000..532273a7b4676
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_async/ASYNC100.py
@@ -0,0 +1,23 @@
+import urllib.request
+import requests
+import httpx
+
+
+async def foo():
+ urllib.request.urlopen("http://example.com/foo/bar").read()
+
+
+async def foo():
+ requests.get()
+
+
+async def foo():
+ httpx.get()
+
+
+async def foo():
+ requests.post()
+
+
+async def foo():
+ httpx.post()
diff --git a/crates/ruff/resources/test/fixtures/flake8_async/ASYNC101.py b/crates/ruff/resources/test/fixtures/flake8_async/ASYNC101.py
new file mode 100644
index 0000000000000..32fbaeb9aabbe
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_async/ASYNC101.py
@@ -0,0 +1,31 @@
+import os
+import subprocess
+import time
+
+
+async def foo():
+ open("foo")
+
+
+async def foo():
+ time.sleep(1)
+
+
+async def foo():
+ subprocess.run("foo")
+
+
+async def foo():
+ subprocess.call("foo")
+
+
+async def foo():
+ subprocess.foo(0)
+
+
+async def foo():
+ os.wait4(10)
+
+
+async def foo():
+ os.wait(12)
diff --git a/crates/ruff/resources/test/fixtures/flake8_async/ASYNC102.py b/crates/ruff/resources/test/fixtures/flake8_async/ASYNC102.py
new file mode 100644
index 0000000000000..7912bcc9decab
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_async/ASYNC102.py
@@ -0,0 +1,13 @@
+import os
+
+
+async def foo():
+ os.popen()
+
+
+async def foo():
+ os.spawnl()
+
+
+async def foo():
+ os.fspath("foo")
diff --git a/crates/ruff/resources/test/fixtures/flake8_bandit/S601.py b/crates/ruff/resources/test/fixtures/flake8_bandit/S601.py
new file mode 100644
index 0000000000000..1a76018616e35
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_bandit/S601.py
@@ -0,0 +1,3 @@
+import paramiko
+
+paramiko.exec_command('something; really; unsafe')
diff --git a/crates/ruff/resources/test/fixtures/flake8_bandit/S608.py b/crates/ruff/resources/test/fixtures/flake8_bandit/S608.py
index 4592f46e4f515..b4c2aba044a88 100644
--- a/crates/ruff/resources/test/fixtures/flake8_bandit/S608.py
+++ b/crates/ruff/resources/test/fixtures/flake8_bandit/S608.py
@@ -74,8 +74,8 @@ def query40():
def query41():
return (
- "SELECT *"
- "FROM table"
+ "SELECT * "
+ "FROM table "
f"WHERE var = {var}"
)
@@ -84,7 +84,7 @@ def query41():
query43 = cursor.execute(f"SELECT * FROM table WHERE var = {var}")
query44 = cursor.execute("SELECT * FROM table WHERE var = {}".format(var))
query45 = cursor.executemany("SELECT * FROM table WHERE var = %s" % var, [])
-
+
# # pass
query = "SELECT * FROM table WHERE id = 1"
query = "DELETE FROM table WHERE id = 1"
@@ -93,3 +93,12 @@ def query41():
cursor.execute('SELECT * FROM table WHERE id = %s', var)
cursor.execute('SELECT * FROM table WHERE id = 1')
cursor.executemany('SELECT * FROM table WHERE id = %s', [var, var2])
+
+# # INSERT without INTO (e.g. MySQL and derivatives)
+query = "INSERT table VALUES (%s)" % (var,)
+
+# # REPLACE (e.g. MySQL and derivatives, SQLite)
+query = "REPLACE INTO table VALUES (%s)" % (var,)
+query = "REPLACE table VALUES (%s)" % (var,)
+
+query = "Deselect something that is not SQL even though it has a ' from ' somewhere in %s." % "there"
diff --git a/crates/ruff/resources/test/fixtures/flake8_bandit/S609.py b/crates/ruff/resources/test/fixtures/flake8_bandit/S609.py
new file mode 100644
index 0000000000000..848eb4a2fce97
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_bandit/S609.py
@@ -0,0 +1,8 @@
+import os
+import subprocess
+
+os.popen("chmod +w foo*")
+subprocess.Popen("/bin/chown root: *", shell=True)
+subprocess.Popen(["/usr/local/bin/rsync", "*", "some_where:"], shell=True)
+subprocess.Popen("/usr/local/bin/rsync * no_injection_here:")
+os.system("tar cf foo.tar bar/*")
diff --git a/crates/ruff/resources/test/fixtures/flake8_boolean_trap/FBT.py b/crates/ruff/resources/test/fixtures/flake8_boolean_trap/FBT.py
index 44195828903da..581668e9320c3 100644
--- a/crates/ruff/resources/test/fixtures/flake8_boolean_trap/FBT.py
+++ b/crates/ruff/resources/test/fixtures/flake8_boolean_trap/FBT.py
@@ -57,12 +57,16 @@ def used(do):
{}.deploy(True, False)
getattr(someobj, attrname, False)
mylist.index(True)
+bool(False)
int(True)
str(int(False))
cfg.get("hello", True)
cfg.getint("hello", True)
cfg.getfloat("hello", True)
cfg.getboolean("hello", True)
+os.set_blocking(0, False)
+g_action.set_enabled(True)
+settings.set_enable_developer_extras(True)
class Registry:
@@ -80,3 +84,6 @@ def foo(self, value: bool) -> None:
# FBT001: Boolean positional arg in function definition
def foo(self, value: bool) -> None:
pass
+
+ def foo(self) -> None:
+ object.__setattr__(self, "flag", True)
diff --git a/crates/ruff/resources/test/fixtures/flake8_bugbear/B006_B008.py b/crates/ruff/resources/test/fixtures/flake8_bugbear/B006_B008.py
index 0fa3d6dc5bc6f..297e3ca49e837 100644
--- a/crates/ruff/resources/test/fixtures/flake8_bugbear/B006_B008.py
+++ b/crates/ruff/resources/test/fixtures/flake8_bugbear/B006_B008.py
@@ -1,6 +1,7 @@
import collections
import datetime as dt
from decimal import Decimal
+from fractions import Fraction
import logging
import operator
from pathlib import Path
@@ -158,12 +159,37 @@ def float_infinity_literal(value=float("1e999")):
pass
-# But don't allow standard floats
-def float_int_is_wrong(value=float(3)):
+# Allow standard floats
+def float_int_okay(value=float(3)):
pass
-def float_str_not_inf_or_nan_is_wrong(value=float("3.14")):
+def float_str_not_inf_or_nan_okay(value=float("3.14")):
+ pass
+
+
+# Allow immutable str() value
+def str_okay(value=str("foo")):
+ pass
+
+
+# Allow immutable bool() value
+def bool_okay(value=bool("bar")):
+ pass
+
+
+# Allow immutable int() value
+def int_okay(value=int("12")):
+ pass
+
+
+# Allow immutable complex() value
+def complex_okay(value=complex(1,2)):
+ pass
+
+
+# Allow immutable Fraction() value
+def fraction_okay(value=Fraction(1,2)):
pass
diff --git a/crates/ruff/resources/test/fixtures/flake8_bugbear/B007.py b/crates/ruff/resources/test/fixtures/flake8_bugbear/B007.py
index 6df22e0027c0d..6c855a7989c5c 100644
--- a/crates/ruff/resources/test/fixtures/flake8_bugbear/B007.py
+++ b/crates/ruff/resources/test/fixtures/flake8_bugbear/B007.py
@@ -73,7 +73,18 @@ def f():
def f():
- # Fixable.
+ # Unfixable.
+ for foo, bar, baz in (["1", "2", "3"],):
+ if foo or baz:
+ break
+ else:
+ bar = 1
+
+ print(bar)
+
+
+def f():
+ # Unfixable (false negative) due to usage of `bar` outside of loop.
for foo, bar, baz in (["1", "2", "3"],):
if foo or baz:
break
@@ -85,4 +96,4 @@ def f():
# Unfixable due to trailing underscore (`_line_` wouldn't be considered an ignorable
# variable name).
for line_ in range(self.header_lines):
- fp.readline()
+ fp.readline()
diff --git a/crates/ruff/resources/test/fixtures/flake8_bugbear/B027.py b/crates/ruff/resources/test/fixtures/flake8_bugbear/B027.py
index d7d563c36550e..803a660cd8a7d 100644
--- a/crates/ruff/resources/test/fixtures/flake8_bugbear/B027.py
+++ b/crates/ruff/resources/test/fixtures/flake8_bugbear/B027.py
@@ -4,7 +4,12 @@
"""
import abc
from abc import ABC
-from abc import abstractmethod, abstractproperty
+from abc import (
+ abstractmethod,
+ abstractproperty,
+ abstractclassmethod,
+ abstractstaticmethod,
+)
from abc import abstractmethod as notabstract
from abc import abstractproperty as notabstract_property
@@ -55,6 +60,22 @@ def abstract_5(self):
def abstract_6(self):
...
+ @abstractclassmethod
+ def abstract_7(self):
+ pass
+
+ @abc.abstractclassmethod
+ def abstract_8(self):
+ ...
+
+ @abstractstaticmethod
+ def abstract_9(self):
+ pass
+
+ @abc.abstractstaticmethod
+ def abstract_10(self):
+ ...
+
def body_1(self):
print("foo")
...
@@ -99,3 +120,11 @@ def empty_1(self, foo: float):
@abstractmethod
def empty_1(self, foo: Union[str, int, list, float]):
...
+
+
+from dataclasses import dataclass
+
+
+@dataclass
+class Foo(ABC): # noqa: B024
+ ...
diff --git a/crates/ruff/resources/test/fixtures/flake8_bugbear/B027_extended.py b/crates/ruff/resources/test/fixtures/flake8_bugbear/B027_extended.py
deleted file mode 100644
index 42ce3a66369a4..0000000000000
--- a/crates/ruff/resources/test/fixtures/flake8_bugbear/B027_extended.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-Should emit:
-B027 - on lines 13, 16, 19, 23
-"""
-from abc import ABC
-
-
-class AbstractClass(ABC):
- def empty_1(self): # error
- ...
-
- def empty_2(self): # error
- pass
-
- def body_1(self):
- print("foo")
- ...
-
- def body_2(self):
- self.body_1()
-
-
-def foo():
- class InnerAbstractClass(ABC):
- def empty_1(self): # error
- ...
-
- def empty_2(self): # error
- pass
-
- def body_1(self):
- print("foo")
- ...
-
- def body_2(self):
- self.body_1()
-
- return InnerAbstractClass
-
diff --git a/crates/ruff/resources/test/fixtures/flake8_bugbear/B033.py b/crates/ruff/resources/test/fixtures/flake8_bugbear/B033.py
new file mode 100644
index 0000000000000..61f2dd355ba5c
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_bugbear/B033.py
@@ -0,0 +1,11 @@
+###
+# Errors.
+###
+incorrect_set = {"value1", 23, 5, "value1"}
+incorrect_set = {1, 1}
+
+###
+# Non-errors.
+###
+correct_set = {"value1", 23, 5}
+correct_set = {5, "5"}
diff --git a/crates/ruff/resources/test/fixtures/flake8_bugbear/B905.py b/crates/ruff/resources/test/fixtures/flake8_bugbear/B905.py
index 1a01b5ebac327..4d94cabbceea6 100644
--- a/crates/ruff/resources/test/fixtures/flake8_bugbear/B905.py
+++ b/crates/ruff/resources/test/fixtures/flake8_bugbear/B905.py
@@ -1,3 +1,6 @@
+from itertools import count, cycle, repeat
+
+# Errors
zip()
zip(range(3))
zip("a", "b")
@@ -5,6 +8,18 @@
zip(zip("a"), strict=False)
zip(zip("a", strict=True))
+# OK
zip(range(3), strict=True)
zip("a", "b", strict=False)
zip("a", "b", "c", strict=True)
+
+# OK (infinite iterators).
+zip([1, 2, 3], cycle("ABCDEF"))
+zip([1, 2, 3], count())
+zip([1, 2, 3], repeat(1))
+zip([1, 2, 3], repeat(1, None))
+zip([1, 2, 3], repeat(1, times=None))
+
+# Errors (limited iterators).
+zip([1, 2, 3], repeat(1, 1))
+zip([1, 2, 3], repeat(1, times=4))
diff --git a/crates/ruff/resources/test/fixtures/flake8_commas/COM81.py b/crates/ruff/resources/test/fixtures/flake8_commas/COM81.py
index dc6db3a3c2889..f76059e98d095 100644
--- a/crates/ruff/resources/test/fixtures/flake8_commas/COM81.py
+++ b/crates/ruff/resources/test/fixtures/flake8_commas/COM81.py
@@ -631,3 +631,11 @@ def foo(
the_first_one = next(
(i for i in range(10) if i // 2 == 0) # COM812 fix should include the final bracket
)
+
+foo = namedtuple(
+ name="foo",
+ status="bar",
+ message="sfdsdfsdgs fsdfsdf output!dsfdfsdjkg ghfskdjghkdssd sd fsdf s\n"[
+ :20
+ ],
+)
diff --git a/crates/ruff/resources/test/fixtures/flake8_comprehensions/C419.py b/crates/ruff/resources/test/fixtures/flake8_comprehensions/C419.py
index 74aea42a533a4..4a9671b1ee020 100644
--- a/crates/ruff/resources/test/fixtures/flake8_comprehensions/C419.py
+++ b/crates/ruff/resources/test/fixtures/flake8_comprehensions/C419.py
@@ -17,3 +17,23 @@
async def f() -> bool:
return all([await use_greeting(greeting) for greeting in await greetings()])
+
+
+# Special comment handling
+any(
+ [ # lbracket comment
+ # second line comment
+ i.bit_count()
+ # random middle comment
+ for i in range(5) # rbracket comment
+ ] # rpar comment
+ # trailing comment
+)
+
+# Weird case where the function call, opening bracket, and comment are all
+# on the same line.
+any([ # lbracket comment
+ # second line comment
+ i.bit_count() for i in range(5) # rbracket comment
+ ] # rpar comment
+)
diff --git a/crates/ruff/resources/test/fixtures/flake8_errmsg/EM.py b/crates/ruff/resources/test/fixtures/flake8_errmsg/EM.py
index 1311bbc675f74..9a94763c36528 100644
--- a/crates/ruff/resources/test/fixtures/flake8_errmsg/EM.py
+++ b/crates/ruff/resources/test/fixtures/flake8_errmsg/EM.py
@@ -9,6 +9,10 @@ def f_a_short():
raise RuntimeError("Error")
+def f_a_empty():
+ raise RuntimeError("")
+
+
def f_b():
example = "example"
raise RuntimeError(f"This is an {example} exception")
diff --git a/crates/ruff/resources/test/fixtures/flake8_fixme/T00.py b/crates/ruff/resources/test/fixtures/flake8_fixme/T00.py
new file mode 100644
index 0000000000000..a398610c1c234
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_fixme/T00.py
@@ -0,0 +1,8 @@
+# TODO: todo
+# todo: todo
+# XXX: xxx
+# xxx: xxx
+# HACK: hack
+# hack: hack
+# FIXME: fixme
+# fixme: fixme
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/edge_case.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/edge_case.py
new file mode 100644
index 0000000000000..f98adefaf4cea
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/edge_case.py
@@ -0,0 +1,7 @@
+from typing import List
+import typing as t
+
+
+def main(_: List[int]) -> None:
+ a_list: t.List[str] = []
+ a_list.append("hello")
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/from_typing_import.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/from_typing_import.py
new file mode 100644
index 0000000000000..a8229aca12438
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/from_typing_import.py
@@ -0,0 +1,6 @@
+from typing import List
+
+
+def main() -> None:
+ a_list: List[str] = []
+ a_list.append("hello")
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/from_typing_import_many.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/from_typing_import_many.py
new file mode 100644
index 0000000000000..28ccc2e4c3c3c
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/from_typing_import_many.py
@@ -0,0 +1,8 @@
+from typing import Dict, List, Optional, Set, Union, cast
+
+
+def main() -> None:
+ a_list: List[Optional[str]] = []
+ a_list.append("hello")
+ a_dict = cast(Dict[int | None, Union[int, Set[bool]]], {})
+ a_dict[1] = {True, False}
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/import_typing.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/import_typing.py
new file mode 100644
index 0000000000000..fccfe30aa2612
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/import_typing.py
@@ -0,0 +1,6 @@
+import typing
+
+
+def main() -> None:
+ a_list: typing.List[str] = []
+ a_list.append("hello")
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/import_typing_as.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/import_typing_as.py
new file mode 100644
index 0000000000000..5f634a0334bc1
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/import_typing_as.py
@@ -0,0 +1,6 @@
+import typing as t
+
+
+def main() -> None:
+ a_list: t.List[str] = []
+ a_list.append("hello")
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_lowercase.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_lowercase.py
new file mode 100644
index 0000000000000..a573432cd5aad
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_lowercase.py
@@ -0,0 +1,7 @@
+def main() -> None:
+ a_list: list[str] = []
+ a_list.append("hello")
+
+
+def hello(y: dict[str, int]) -> None:
+ del y
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_union.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_union.py
new file mode 100644
index 0000000000000..50206192f181b
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_union.py
@@ -0,0 +1,7 @@
+def main() -> None:
+ a_list: list[str] | None = []
+ a_list.append("hello")
+
+
+def hello(y: dict[str, int] | None) -> None:
+ del y
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_union_inner.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_union_inner.py
new file mode 100644
index 0000000000000..9f9b5bd574b35
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/no_future_import_uses_union_inner.py
@@ -0,0 +1,8 @@
+def main() -> None:
+ a_list: list[str | None] = []
+ a_list.append("hello")
+
+
+def hello(y: dict[str | None, int]) -> None:
+ z: tuple[str, str | None, str] = tuple(y)
+ del z
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_no_types.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_no_types.py
new file mode 100644
index 0000000000000..54fff8090690b
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_no_types.py
@@ -0,0 +1,3 @@
+def main() -> str:
+ a_str = "hello"
+ return a_str
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_non_simplifiable_types.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_non_simplifiable_types.py
new file mode 100644
index 0000000000000..b5121a9297ee6
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_non_simplifiable_types.py
@@ -0,0 +1,10 @@
+from typing import NamedTuple
+
+
+class Stuff(NamedTuple):
+ x: int
+
+
+def main() -> None:
+ a_list = Stuff(5)
+ print(a_list)
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_uses_future.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_uses_future.py
new file mode 100644
index 0000000000000..281b96d393c33
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_uses_future.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+
+def main() -> None:
+ a_list: list[str] = []
+ a_list.append("hello")
diff --git a/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_variable_name.py b/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_variable_name.py
new file mode 100644
index 0000000000000..a1a8febe427d7
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_future_annotations/ok_variable_name.py
@@ -0,0 +1,8 @@
+import typing
+
+IRRELEVANT = typing.TypeVar
+
+
+def main() -> None:
+ List: list[str] = []
+ List.append("hello")
diff --git a/crates/ruff/resources/test/fixtures/flake8_logging_format/G001.py b/crates/ruff/resources/test/fixtures/flake8_logging_format/G001.py
index de1424f3a254a..7a3a49a6a7d10 100644
--- a/crates/ruff/resources/test/fixtures/flake8_logging_format/G001.py
+++ b/crates/ruff/resources/test/fixtures/flake8_logging_format/G001.py
@@ -7,3 +7,12 @@
logging.log(logging.INFO, msg="Hello {}".format("World!"))
logging.log(level=logging.INFO, msg="Hello {}".format("World!"))
logging.log(msg="Hello {}".format("World!"), level=logging.INFO)
+
+# Flask support
+import flask
+from flask import current_app
+from flask import current_app as app
+
+flask.current_app.logger.info("Hello {}".format("World!"))
+current_app.logger.info("Hello {}".format("World!"))
+app.logger.log(logging.INFO, "Hello {}".format("World!"))
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI013.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI013.py
new file mode 100644
index 0000000000000..9b3635962e29c
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI013.py
@@ -0,0 +1,65 @@
+class OneAttributeClass:
+ value: int
+ ...
+
+
+class OneAttributeClass2:
+ ...
+ value: int
+
+
+class TwoEllipsesClass:
+ ...
+ ...
+
+
+class DocstringClass:
+ """
+ My body only contains an ellipsis.
+ """
+
+ ...
+
+
+class NonEmptyChild(Exception):
+ value: int
+ ...
+
+
+class NonEmptyChild2(Exception):
+ ...
+ value: int
+
+
+class NonEmptyWithInit:
+ value: int
+ ...
+
+ def __init__():
+ pass
+
+
+class EmptyClass:
+ ...
+
+
+class EmptyEllipsis:
+ ...
+
+
+class Dog:
+ eyes: int = 2
+
+
+class WithInit:
+ value: int = 0
+
+ def __init__():
+ ...
+
+
+def function():
+ ...
+
+
+...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI013.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI013.pyi
new file mode 100644
index 0000000000000..aaf2cb0f794f2
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI013.pyi
@@ -0,0 +1,56 @@
+# Violations of PYI013
+
+class OneAttributeClass:
+ value: int
+ ... # Error
+
+class OneAttributeClass2:
+ ... # Error
+ value: int
+
+class MyClass:
+ ...
+ value: int
+
+class TwoEllipsesClass:
+ ...
+ ... # Error
+
+class DocstringClass:
+ """
+ My body only contains an ellipsis.
+ """
+
+ ... # Error
+
+class NonEmptyChild(Exception):
+ value: int
+ ... # Error
+
+class NonEmptyChild2(Exception):
+ ... # Error
+ value: int
+
+class NonEmptyWithInit:
+ value: int
+ ... # Error
+
+ def __init__():
+ pass
+
+# Not violations
+
+class EmptyClass: ...
+class EmptyEllipsis: ...
+
+class Dog:
+ eyes: int = 2
+
+class WithInit:
+ value: int = 0
+
+ def __init__(): ...
+
+def function(): ...
+
+...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI024.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI024.py
new file mode 100644
index 0000000000000..3090ae76c3192
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI024.py
@@ -0,0 +1,9 @@
+import collections
+
+person: collections.namedtuple # OK
+
+from collections import namedtuple
+
+person: namedtuple # OK
+
+person = namedtuple("Person", ["name", "age"]) # OK
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI024.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI024.pyi
new file mode 100644
index 0000000000000..b3d3b67b9ed03
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI024.pyi
@@ -0,0 +1,11 @@
+import collections
+
+person: collections.namedtuple # Y024 Use "typing.NamedTuple" instead of "collections.namedtuple"
+
+from collections import namedtuple
+
+person: namedtuple # Y024 Use "typing.NamedTuple" instead of "collections.namedtuple"
+
+person = namedtuple(
+ "Person", ["name", "age"]
+) # Y024 Use "typing.NamedTuple" instead of "collections.namedtuple"
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI025.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI025.py
new file mode 100644
index 0000000000000..5c8b8fa26baf8
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI025.py
@@ -0,0 +1,19 @@
+from collections.abc import Set as AbstractSet # Ok
+
+
+from collections.abc import Set # Ok
+
+
+from collections.abc import (
+ Container,
+ Sized,
+ Set, # Ok
+ ValuesView
+)
+
+from collections.abc import (
+ Container,
+ Sized,
+ Set as AbstractSet, # Ok
+ ValuesView
+)
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI025.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI025.pyi
new file mode 100644
index 0000000000000..c12ccdffb5a53
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI025.pyi
@@ -0,0 +1,19 @@
+from collections.abc import Set as AbstractSet # Ok
+
+
+from collections.abc import Set # PYI025
+
+
+from collections.abc import (
+ Container,
+ Sized,
+ Set, # PYI025
+ ValuesView
+)
+
+from collections.abc import (
+ Container,
+ Sized,
+ Set as AbstractSet,
+ ValuesView # Ok
+)
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI029.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI029.py
new file mode 100644
index 0000000000000..20cc7d6ae589a
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI029.py
@@ -0,0 +1,57 @@
+import builtins
+from abc import abstractmethod
+
+
+def __repr__(self) -> str:
+ ...
+
+
+def __str__(self) -> builtins.str:
+ ...
+
+
+def __repr__(self, /, foo) -> str:
+ ...
+
+
+def __repr__(self, *, foo) -> str:
+ ...
+
+
+class ShouldRemoveSingle:
+ def __str__(self) -> builtins.str:
+ ...
+
+
+class ShouldRemove:
+ def __repr__(self) -> str:
+ ...
+
+ def __str__(self) -> builtins.str:
+ ...
+
+
+class NoReturnSpecified:
+ def __str__(self):
+ ...
+
+ def __repr__(self):
+ ...
+
+
+class NonMatchingArgs:
+ def __str__(self, *, extra) -> builtins.str:
+ ...
+
+ def __repr__(self, /, extra) -> str:
+ ...
+
+
+class MatchingArgsButAbstract:
+ @abstractmethod
+ def __str__(self) -> builtins.str:
+ ...
+
+ @abstractmethod
+ def __repr__(self) -> str:
+ ...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI029.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI029.pyi
new file mode 100644
index 0000000000000..a082a733a2267
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI029.pyi
@@ -0,0 +1,28 @@
+import builtins
+from abc import abstractmethod
+
+def __repr__(self) -> str: ...
+def __str__(self) -> builtins.str: ...
+def __repr__(self, /, foo) -> str: ...
+def __repr__(self, *, foo) -> str: ...
+
+class ShouldRemoveSingle:
+ def __str__(self) -> builtins.str: ... # Error: PYI029
+
+class ShouldRemove:
+ def __repr__(self) -> str: ... # Error: PYI029
+ def __str__(self) -> builtins.str: ... # Error: PYI029
+
+class NoReturnSpecified:
+ def __str__(self): ...
+ def __repr__(self): ...
+
+class NonMatchingArgs:
+ def __str__(self, *, extra) -> builtins.str: ...
+ def __repr__(self, /, extra) -> str: ...
+
+class MatchingArgsButAbstract:
+ @abstractmethod
+ def __str__(self) -> builtins.str: ...
+ @abstractmethod
+ def __repr__(self) -> str: ...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI032.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI032.py
new file mode 100644
index 0000000000000..2d226ebe975f8
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI032.py
@@ -0,0 +1,24 @@
+from typing import Any
+import typing
+
+
+class Bad:
+ def __eq__(self, other: Any) -> bool: ... # Fine because not a stub file
+ def __ne__(self, other: typing.Any) -> typing.Any: ... # Fine because not a stub file
+
+
+class Good:
+ def __eq__(self, other: object) -> bool: ...
+
+ def __ne__(self, obj: object) -> int: ...
+
+
+class WeirdButFine:
+ def __eq__(self, other: Any, strange_extra_arg: list[str]) -> Any: ...
+ def __ne__(self, *, kw_only_other: Any) -> bool: ...
+
+
+class Unannotated:
+ def __eq__(self) -> Any: ...
+ def __ne__(self) -> bool: ...
+
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI032.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI032.pyi
new file mode 100644
index 0000000000000..82cb899e3b323
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI032.pyi
@@ -0,0 +1,24 @@
+from typing import Any
+import typing
+
+
+class Bad:
+ def __eq__(self, other: Any) -> bool: ... # Y032
+ def __ne__(self, other: typing.Any) -> typing.Any: ... # Y032
+
+
+class Good:
+ def __eq__(self, other: object) -> bool: ...
+
+ def __ne__(self, obj: object) -> int: ...
+
+
+class WeirdButFine:
+ def __eq__(self, other: Any, strange_extra_arg: list[str]) -> Any: ...
+ def __ne__(self, *, kw_only_other: Any) -> bool: ...
+
+
+class Unannotated:
+ def __eq__(self) -> Any: ...
+ def __ne__(self) -> bool: ...
+
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI034.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI034.py
new file mode 100644
index 0000000000000..850bd057ddd39
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI034.py
@@ -0,0 +1,280 @@
+# flags: --extend-ignore=Y023
+
+import abc
+import builtins
+import collections.abc
+import typing
+from abc import abstractmethod
+from collections.abc import AsyncIterable, AsyncIterator, Iterable, Iterator
+from typing import Any, overload
+
+import typing_extensions
+from _typeshed import Self
+from typing_extensions import final
+
+
+class Bad(
+ object
+): # Y040 Do not inherit from "object" explicitly, as it is redundant in Python 3
+ def __new__(cls, *args: Any, **kwargs: Any) -> Bad:
+ ... # Y034 "__new__" methods usually return "self" at runtime. Consider using "typing_extensions.Self" in "Bad.__new__", e.g. "def __new__(cls, *args: Any, **kwargs: Any) -> Self: ..."
+
+ def __repr__(self) -> str:
+ ... # Y029 Defining __repr__ or __str__ in a stub is almost always redundant
+
+ def __str__(self) -> builtins.str:
+ ... # Y029 Defining __repr__ or __str__ in a stub is almost always redundant
+
+ def __eq__(self, other: Any) -> bool:
+ ... # Y032 Prefer "object" to "Any" for the second parameter in "__eq__" methods
+
+ def __ne__(self, other: typing.Any) -> typing.Any:
+ ... # Y032 Prefer "object" to "Any" for the second parameter in "__ne__" methods
+
+ def __enter__(self) -> Bad:
+ ... # Y034 "__enter__" methods in classes like "Bad" usually return "self" at runtime. Consider using "typing_extensions.Self" in "Bad.__enter__", e.g. "def __enter__(self) -> Self: ..."
+
+ async def __aenter__(self) -> Bad:
+ ... # Y034 "__aenter__" methods in classes like "Bad" usually return "self" at runtime. Consider using "typing_extensions.Self" in "Bad.__aenter__", e.g. "async def __aenter__(self) -> Self: ..."
+
+ def __iadd__(self, other: Bad) -> Bad:
+ ... # Y034 "__iadd__" methods in classes like "Bad" usually return "self" at runtime. Consider using "typing_extensions.Self" in "Bad.__iadd__", e.g. "def __iadd__(self, other: Bad) -> Self: ..."
+
+
+class AlsoBad(int, builtins.object):
+ ... # Y040 Do not inherit from "object" explicitly, as it is redundant in Python 3
+
+
+class Good:
+ def __new__(cls: type[Self], *args: Any, **kwargs: Any) -> Self:
+ ...
+
+ @abstractmethod
+ def __str__(self) -> str:
+ ...
+
+ @abc.abstractmethod
+ def __repr__(self) -> str:
+ ...
+
+ def __eq__(self, other: object) -> bool:
+ ...
+
+ def __ne__(self, obj: object) -> int:
+ ...
+
+ def __enter__(self: Self) -> Self:
+ ...
+
+ async def __aenter__(self: Self) -> Self:
+ ...
+
+ def __ior__(self: Self, other: Self) -> Self:
+ ...
+
+
+class Fine:
+ @overload
+ def __new__(cls, foo: int) -> FineSubclass:
+ ...
+
+ @overload
+ def __new__(cls, *args: Any, **kwargs: Any) -> Fine:
+ ...
+
+ @abc.abstractmethod
+ def __str__(self) -> str:
+ ...
+
+ @abc.abstractmethod
+ def __repr__(self) -> str:
+ ...
+
+ def __eq__(self, other: Any, strange_extra_arg: list[str]) -> Any:
+ ...
+
+ def __ne__(self, *, kw_only_other: Any) -> bool:
+ ...
+
+ def __enter__(self) -> None:
+ ...
+
+ async def __aenter__(self) -> bool:
+ ...
+
+
+class FineSubclass(Fine):
+ ...
+
+
+class StrangeButAcceptable(str):
+ @typing_extensions.overload
+ def __new__(cls, foo: int) -> StrangeButAcceptableSubclass:
+ ...
+
+ @typing_extensions.overload
+ def __new__(cls, *args: Any, **kwargs: Any) -> StrangeButAcceptable:
+ ...
+
+ def __str__(self) -> StrangeButAcceptable:
+ ...
+
+ def __repr__(self) -> StrangeButAcceptable:
+ ...
+
+
+class StrangeButAcceptableSubclass(StrangeButAcceptable):
+ ...
+
+
+class FineAndDandy:
+ def __str__(self, weird_extra_arg) -> str:
+ ...
+
+ def __repr__(self, weird_extra_arg_with_default=...) -> str:
+ ...
+
+
+@final
+class WillNotBeSubclassed:
+ def __new__(cls, *args: Any, **kwargs: Any) -> WillNotBeSubclassed:
+ ...
+
+ def __enter__(self) -> WillNotBeSubclassed:
+ ...
+
+ async def __aenter__(self) -> WillNotBeSubclassed:
+ ...
+
+
+# we don't emit an error for these; out of scope for a linter
+class InvalidButPluginDoesNotCrash:
+ def __new__() -> InvalidButPluginDoesNotCrash:
+ ...
+
+ def __enter__() -> InvalidButPluginDoesNotCrash:
+ ...
+
+ async def __aenter__() -> InvalidButPluginDoesNotCrash:
+ ...
+
+
+class BadIterator1(Iterator[int]):
+ def __iter__(self) -> Iterator[int]:
+ ... # Y034 "__iter__" methods in classes like "BadIterator1" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadIterator1.__iter__", e.g. "def __iter__(self) -> Self: ..."
+
+
+class BadIterator2(
+ typing.Iterator[int]
+): # Y022 Use "collections.abc.Iterator[T]" instead of "typing.Iterator[T]" (PEP 585 syntax)
+ def __iter__(self) -> Iterator[int]:
+ ... # Y034 "__iter__" methods in classes like "BadIterator2" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadIterator2.__iter__", e.g. "def __iter__(self) -> Self: ..."
+
+
+class BadIterator3(
+ typing.Iterator[int]
+): # Y022 Use "collections.abc.Iterator[T]" instead of "typing.Iterator[T]" (PEP 585 syntax)
+ def __iter__(self) -> collections.abc.Iterator[int]:
+ ... # Y034 "__iter__" methods in classes like "BadIterator3" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadIterator3.__iter__", e.g. "def __iter__(self) -> Self: ..."
+
+
+class BadIterator4(Iterator[int]):
+ # Note: *Iterable*, not *Iterator*, returned!
+ def __iter__(self) -> Iterable[int]:
+ ... # Y034 "__iter__" methods in classes like "BadIterator4" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadIterator4.__iter__", e.g. "def __iter__(self) -> Self: ..."
+
+
+class IteratorReturningIterable:
+ def __iter__(self) -> Iterable[str]:
+ ... # Y045 "__iter__" methods should return an Iterator, not an Iterable
+
+
+class BadAsyncIterator(collections.abc.AsyncIterator[str]):
+ def __aiter__(self) -> typing.AsyncIterator[str]:
+ ... # Y034 "__aiter__" methods in classes like "BadAsyncIterator" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadAsyncIterator.__aiter__", e.g. "def __aiter__(self) -> Self: ..." # Y022 Use "collections.abc.AsyncIterator[T]" instead of "typing.AsyncIterator[T]" (PEP 585 syntax)
+
+
+class AsyncIteratorReturningAsyncIterable:
+ def __aiter__(self) -> AsyncIterable[str]:
+ ... # Y045 "__aiter__" methods should return an AsyncIterator, not an AsyncIterable
+
+
+class Abstract(Iterator[str]):
+ @abstractmethod
+ def __iter__(self) -> Iterator[str]:
+ ...
+
+ @abstractmethod
+ def __enter__(self) -> Abstract:
+ ...
+
+ @abstractmethod
+ async def __aenter__(self) -> Abstract:
+ ...
+
+
+class GoodIterator(Iterator[str]):
+ def __iter__(self: Self) -> Self:
+ ...
+
+
+class GoodAsyncIterator(AsyncIterator[int]):
+ def __aiter__(self: Self) -> Self:
+ ...
+
+
+class DoesNotInheritFromIterator:
+ def __iter__(self) -> DoesNotInheritFromIterator:
+ ...
+
+
+class Unannotated:
+ def __new__(cls, *args, **kwargs):
+ ...
+
+ def __iter__(self):
+ ...
+
+ def __aiter__(self):
+ ...
+
+ async def __aenter__(self):
+ ...
+
+ def __repr__(self):
+ ...
+
+ def __str__(self):
+ ...
+
+ def __eq__(self):
+ ...
+
+ def __ne__(self):
+ ...
+
+ def __iadd__(self):
+ ...
+
+ def __ior__(self):
+ ...
+
+
+def __repr__(self) -> str:
+ ...
+
+
+def __str__(self) -> str:
+ ...
+
+
+def __eq__(self, other: Any) -> bool:
+ ...
+
+
+def __ne__(self, other: Any) -> bool:
+ ...
+
+
+def __imul__(self, other: Any) -> list[str]:
+ ...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI034.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI034.pyi
new file mode 100644
index 0000000000000..800cf14512d9e
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI034.pyi
@@ -0,0 +1,188 @@
+# flags: --extend-ignore=Y023
+
+import abc
+import builtins
+import collections.abc
+import typing
+from abc import abstractmethod
+from collections.abc import AsyncIterable, AsyncIterator, Iterable, Iterator
+from typing import Any, overload
+
+import typing_extensions
+from _typeshed import Self
+from typing_extensions import final
+
+class Bad(
+ object
+): # Y040 Do not inherit from "object" explicitly, as it is redundant in Python 3
+ def __new__(
+ cls, *args: Any, **kwargs: Any
+ ) -> Bad: ... # Y034 "__new__" methods usually return "self" at runtime. Consider using "typing_extensions.Self" in "Bad.__new__", e.g. "def __new__(cls, *args: Any, **kwargs: Any) -> Self: ..."
+ def __repr__(
+ self,
+ ) -> str: ... # Y029 Defining __repr__ or __str__ in a stub is almost always redundant
+ def __str__(
+ self,
+ ) -> builtins.str: ... # Y029 Defining __repr__ or __str__ in a stub is almost always redundant
+ def __eq__(
+ self, other: Any
+ ) -> bool: ... # Y032 Prefer "object" to "Any" for the second parameter in "__eq__" methods
+ def __ne__(
+ self, other: typing.Any
+ ) -> typing.Any: ... # Y032 Prefer "object" to "Any" for the second parameter in "__ne__" methods
+ def __enter__(
+ self,
+ ) -> Bad: ... # Y034 "__enter__" methods in classes like "Bad" usually return "self" at runtime. Consider using "typing_extensions.Self" in "Bad.__enter__", e.g. "def __enter__(self) -> Self: ..."
+ async def __aenter__(
+ self,
+ ) -> Bad: ... # Y034 "__aenter__" methods in classes like "Bad" usually return "self" at runtime. Consider using "typing_extensions.Self" in "Bad.__aenter__", e.g. "async def __aenter__(self) -> Self: ..."
+ def __iadd__(
+ self, other: Bad
+ ) -> Bad: ... # Y034 "__iadd__" methods in classes like "Bad" usually return "self" at runtime. Consider using "typing_extensions.Self" in "Bad.__iadd__", e.g. "def __iadd__(self, other: Bad) -> Self: ..."
+
+class AlsoBad(
+ int, builtins.object
+): ... # Y040 Do not inherit from "object" explicitly, as it is redundant in Python 3
+
+class Good:
+ def __new__(cls: type[Self], *args: Any, **kwargs: Any) -> Self: ...
+ @abstractmethod
+ def __str__(self) -> str: ...
+ @abc.abstractmethod
+ def __repr__(self) -> str: ...
+ def __eq__(self, other: object) -> bool: ...
+ def __ne__(self, obj: object) -> int: ...
+ def __enter__(self: Self) -> Self: ...
+ async def __aenter__(self: Self) -> Self: ...
+ def __ior__(self: Self, other: Self) -> Self: ...
+
+class Fine:
+ @overload
+ def __new__(cls, foo: int) -> FineSubclass: ...
+ @overload
+ def __new__(cls, *args: Any, **kwargs: Any) -> Fine: ...
+ @abc.abstractmethod
+ def __str__(self) -> str: ...
+ @abc.abstractmethod
+ def __repr__(self) -> str: ...
+ def __eq__(self, other: Any, strange_extra_arg: list[str]) -> Any: ...
+ def __ne__(self, *, kw_only_other: Any) -> bool: ...
+ def __enter__(self) -> None: ...
+ async def __aenter__(self) -> bool: ...
+
+class FineSubclass(Fine): ...
+
+class StrangeButAcceptable(str):
+ @typing_extensions.overload
+ def __new__(cls, foo: int) -> StrangeButAcceptableSubclass: ...
+ @typing_extensions.overload
+ def __new__(cls, *args: Any, **kwargs: Any) -> StrangeButAcceptable: ...
+ def __str__(self) -> StrangeButAcceptable: ...
+ def __repr__(self) -> StrangeButAcceptable: ...
+
+class StrangeButAcceptableSubclass(StrangeButAcceptable): ...
+
+class FineAndDandy:
+ def __str__(self, weird_extra_arg) -> str: ...
+ def __repr__(self, weird_extra_arg_with_default=...) -> str: ...
+
+@final
+class WillNotBeSubclassed:
+ def __new__(cls, *args: Any, **kwargs: Any) -> WillNotBeSubclassed: ...
+ def __enter__(self) -> WillNotBeSubclassed: ...
+ async def __aenter__(self) -> WillNotBeSubclassed: ...
+
+# we don't emit an error for these; out of scope for a linter
+class InvalidButPluginDoesNotCrash:
+ def __new__() -> InvalidButPluginDoesNotCrash: ...
+ def __enter__() -> InvalidButPluginDoesNotCrash: ...
+ async def __aenter__() -> InvalidButPluginDoesNotCrash: ...
+
+class BadIterator1(Iterator[int]):
+ def __iter__(
+ self,
+ ) -> Iterator[
+ int
+ ]: ... # Y034 "__iter__" methods in classes like "BadIterator1" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadIterator1.__iter__", e.g. "def __iter__(self) -> Self: ..."
+
+class BadIterator2(
+ typing.Iterator[int]
+): # Y022 Use "collections.abc.Iterator[T]" instead of "typing.Iterator[T]" (PEP 585 syntax)
+ def __iter__(
+ self,
+ ) -> Iterator[
+ int
+ ]: ... # Y034 "__iter__" methods in classes like "BadIterator2" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadIterator2.__iter__", e.g. "def __iter__(self) -> Self: ..."
+
+class BadIterator3(
+ typing.Iterator[int]
+): # Y022 Use "collections.abc.Iterator[T]" instead of "typing.Iterator[T]" (PEP 585 syntax)
+ def __iter__(
+ self,
+ ) -> collections.abc.Iterator[
+ int
+ ]: ... # Y034 "__iter__" methods in classes like "BadIterator3" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadIterator3.__iter__", e.g. "def __iter__(self) -> Self: ..."
+
+class BadIterator4(Iterator[int]):
+ # Note: *Iterable*, not *Iterator*, returned!
+ def __iter__(
+ self,
+ ) -> Iterable[
+ int
+ ]: ... # Y034 "__iter__" methods in classes like "BadIterator4" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadIterator4.__iter__", e.g. "def __iter__(self) -> Self: ..."
+
+class IteratorReturningIterable:
+ def __iter__(
+ self,
+ ) -> Iterable[
+ str
+ ]: ... # Y045 "__iter__" methods should return an Iterator, not an Iterable
+
+class BadAsyncIterator(collections.abc.AsyncIterator[str]):
+ def __aiter__(
+ self,
+ ) -> typing.AsyncIterator[
+ str
+ ]: ... # Y034 "__aiter__" methods in classes like "BadAsyncIterator" usually return "self" at runtime. Consider using "typing_extensions.Self" in "BadAsyncIterator.__aiter__", e.g. "def __aiter__(self) -> Self: ..." # Y022 Use "collections.abc.AsyncIterator[T]" instead of "typing.AsyncIterator[T]" (PEP 585 syntax)
+
+class AsyncIteratorReturningAsyncIterable:
+ def __aiter__(
+ self,
+ ) -> AsyncIterable[
+ str
+ ]: ... # Y045 "__aiter__" methods should return an AsyncIterator, not an AsyncIterable
+
+class Abstract(Iterator[str]):
+ @abstractmethod
+ def __iter__(self) -> Iterator[str]: ...
+ @abstractmethod
+ def __enter__(self) -> Abstract: ...
+ @abstractmethod
+ async def __aenter__(self) -> Abstract: ...
+
+class GoodIterator(Iterator[str]):
+ def __iter__(self: Self) -> Self: ...
+
+class GoodAsyncIterator(AsyncIterator[int]):
+ def __aiter__(self: Self) -> Self: ...
+
+class DoesNotInheritFromIterator:
+ def __iter__(self) -> DoesNotInheritFromIterator: ...
+
+class Unannotated:
+ def __new__(cls, *args, **kwargs): ...
+ def __iter__(self): ...
+ def __aiter__(self): ...
+ async def __aenter__(self): ...
+ def __repr__(self): ...
+ def __str__(self): ...
+ def __eq__(self): ...
+ def __ne__(self): ...
+ def __iadd__(self): ...
+ def __ior__(self): ...
+
+def __repr__(self) -> str: ...
+def __str__(self) -> str: ...
+def __eq__(self, other: Any) -> bool: ...
+def __ne__(self, other: Any) -> bool: ...
+def __imul__(self, other: Any) -> list[str]: ...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI035.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI035.py
new file mode 100644
index 0000000000000..4343b93af6a84
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI035.py
@@ -0,0 +1,15 @@
+__all__: list[str]
+
+__all__: list[str] = ["foo"]
+
+
+class Foo:
+ __all__: list[str]
+ __match_args__: tuple[str, ...]
+ __slots__: tuple[str, ...]
+
+
+class Bar:
+ __all__: list[str] = ["foo"]
+ __match_args__: tuple[str, ...] = (1,)
+ __slots__: tuple[str, ...] = "foo"
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI035.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI035.pyi
new file mode 100644
index 0000000000000..81dfe26692acf
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI035.pyi
@@ -0,0 +1,13 @@
+__all__: list[str] # Error: PYI035
+
+__all__: list[str] = ["foo"]
+
+class Foo:
+ __all__: list[str]
+ __match_args__: tuple[str, ...] # Error: PYI035
+ __slots__: tuple[str, ...] # Error: PYI035
+
+class Bar:
+ __all__: list[str] = ["foo"]
+ __match_args__: tuple[str, ...] = (1,)
+ __slots__: tuple[str, ...] = "foo"
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI045.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI045.py
new file mode 100644
index 0000000000000..3bc27876b22df
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI045.py
@@ -0,0 +1,85 @@
+import collections.abc
+import typing
+from collections.abc import Iterator, Iterable
+
+
+class NoReturn:
+ def __iter__(self):
+ ...
+
+
+class TypingIterableTReturn:
+ def __iter__(self) -> typing.Iterable[int]:
+ ...
+
+ def not_iter(self) -> typing.Iterable[int]:
+ ...
+
+
+class TypingIterableReturn:
+ def __iter__(self) -> typing.Iterable:
+ ...
+
+ def not_iter(self) -> typing.Iterable:
+ ...
+
+
+class CollectionsIterableTReturn:
+ def __iter__(self) -> collections.abc.Iterable[int]:
+ ...
+
+ def not_iter(self) -> collections.abc.Iterable[int]:
+ ...
+
+
+class CollectionsIterableReturn:
+ def __iter__(self) -> collections.abc.Iterable:
+ ...
+
+ def not_iter(self) -> collections.abc.Iterable:
+ ...
+
+
+class IterableReturn:
+ def __iter__(self) -> Iterable:
+ ...
+
+
+class IteratorReturn:
+ def __iter__(self) -> Iterator:
+ ...
+
+
+class IteratorTReturn:
+ def __iter__(self) -> Iterator[int]:
+ ...
+
+
+class TypingIteratorReturn:
+ def __iter__(self) -> typing.Iterator:
+ ...
+
+
+class TypingIteratorTReturn:
+ def __iter__(self) -> typing.Iterator[int]:
+ ...
+
+
+class CollectionsIteratorReturn:
+ def __iter__(self) -> collections.abc.Iterator:
+ ...
+
+
+class CollectionsIteratorTReturn:
+ def __iter__(self) -> collections.abc.Iterator[int]:
+ ...
+
+
+class TypingAsyncIterableTReturn:
+ def __aiter__(self) -> typing.AsyncIterable[int]:
+ ...
+
+
+class TypingAsyncIterableReturn:
+ def __aiter__(self) -> typing.AsyncIterable:
+ ...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI045.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI045.pyi
new file mode 100644
index 0000000000000..a7141baa18518
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI045.pyi
@@ -0,0 +1,49 @@
+import collections.abc
+import typing
+from collections.abc import Iterator, Iterable
+
+class NoReturn:
+ def __iter__(self): ...
+
+class TypingIterableTReturn:
+ def __iter__(self) -> typing.Iterable[int]: ... # Error: PYI045
+ def not_iter(self) -> typing.Iterable[int]: ...
+
+class TypingIterableReturn:
+ def __iter__(self) -> typing.Iterable: ... # Error: PYI045
+ def not_iter(self) -> typing.Iterable: ...
+
+class CollectionsIterableTReturn:
+ def __iter__(self) -> collections.abc.Iterable[int]: ... # Error: PYI045
+ def not_iter(self) -> collections.abc.Iterable[int]: ...
+
+class CollectionsIterableReturn:
+ def __iter__(self) -> collections.abc.Iterable: ... # Error: PYI045
+ def not_iter(self) -> collections.abc.Iterable: ...
+
+class IterableReturn:
+ def __iter__(self) -> Iterable: ... # Error: PYI045
+
+class IteratorReturn:
+ def __iter__(self) -> Iterator: ...
+
+class IteratorTReturn:
+ def __iter__(self) -> Iterator[int]: ...
+
+class TypingIteratorReturn:
+ def __iter__(self) -> typing.Iterator: ...
+
+class TypingIteratorTReturn:
+ def __iter__(self) -> typing.Iterator[int]: ...
+
+class CollectionsIteratorReturn:
+ def __iter__(self) -> collections.abc.Iterator: ...
+
+class CollectionsIteratorTReturn:
+ def __iter__(self) -> collections.abc.Iterator[int]: ...
+
+class TypingAsyncIterableTReturn:
+ def __aiter__(self) -> typing.AsyncIterable[int]: ... # Error: PYI045
+
+class TypingAsyncIterableReturn:
+ def __aiter__(self) -> typing.AsyncIterable: ... # Error: PYI045
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI048.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI048.py
new file mode 100644
index 0000000000000..8ec21f2d31c1d
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI048.py
@@ -0,0 +1,19 @@
+def bar(): # OK
+ ...
+
+
+def oof(): # OK, docstrings are handled by another rule
+ """oof"""
+ print("foo")
+
+
+def foo(): # Ok not in Stub file
+ """foo"""
+ print("foo")
+ print("foo")
+
+
+def buzz(): # Ok not in Stub file
+ print("fizz")
+ print("buzz")
+ print("test")
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI048.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI048.pyi
new file mode 100644
index 0000000000000..29a2120f94359
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI048.pyi
@@ -0,0 +1,20 @@
+def bar():
+ ... # OK
+
+
+def oof(): # OK, docstrings are handled by another rule
+ """oof"""
+ print("foo")
+
+
+
+def foo(): # ERROR PYI048
+ """foo"""
+ print("foo")
+ print("foo")
+
+
+def buzz(): # ERROR PYI048
+ print("fizz")
+ print("buzz")
+ print("test")
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI050.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI050.py
new file mode 100644
index 0000000000000..042fe887ec36d
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI050.py
@@ -0,0 +1,32 @@
+from typing import NoReturn, Never
+import typing_extensions
+
+
+def foo(arg):
+ ...
+
+
+def foo_int(arg: int):
+ ...
+
+
+def foo_no_return(arg: NoReturn):
+ ...
+
+
+def foo_no_return_typing_extensions(
+ arg: typing_extensions.NoReturn,
+):
+ ...
+
+
+def foo_no_return_kwarg(arg: int, *, arg2: NoReturn):
+ ...
+
+
+def foo_no_return_pos_only(arg: int, /, arg2: NoReturn):
+ ...
+
+
+def foo_never(arg: Never):
+ ...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI050.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI050.pyi
new file mode 100644
index 0000000000000..4720ee7756b6a
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI050.pyi
@@ -0,0 +1,12 @@
+from typing import NoReturn, Never
+import typing_extensions
+
+def foo(arg): ...
+def foo_int(arg: int): ...
+def foo_no_return(arg: NoReturn): ... # Error: PYI050
+def foo_no_return_typing_extensions(
+ arg: typing_extensions.NoReturn,
+): ... # Error: PYI050
+def foo_no_return_kwarg(arg: int, *, arg2: NoReturn): ... # Error: PYI050
+def foo_no_return_pos_only(arg: int, /, arg2: NoReturn): ... # Error: PYI050
+def foo_never(arg: Never): ...
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI052.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI052.py
new file mode 100644
index 0000000000000..37a4f4d8671c5
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI052.py
@@ -0,0 +1,93 @@
+import builtins
+import typing
+from typing import TypeAlias, Final
+
+field1: int
+field2: int = ...
+field3 = ... # type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
+field4: int = 0
+field41: int = 0xFFFFFFFF
+field42: int = 1234567890
+field43: int = -0xFFFFFFFF
+field44: int = -1234567890
+field5 = 0 # type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int") # Y052 Need type annotation for "field5"
+field6 = 0 # Y052 Need type annotation for "field6"
+field7 = b"" # Y052 Need type annotation for "field7"
+field71 = "foo" # Y052 Need type annotation for "field71"
+field72: str = "foo"
+field8 = False # Y052 Need type annotation for "field8"
+field81 = -1 # Y052 Need type annotation for "field81"
+field82: float = -98.43
+field83 = -42j # Y052 Need type annotation for "field83"
+field84 = 5 + 42j # Y052 Need type annotation for "field84"
+field85 = -5 - 42j # Y052 Need type annotation for "field85"
+field9 = None # Y026 Use typing_extensions.TypeAlias for type aliases, e.g. "field9: TypeAlias = None"
+Field95: TypeAlias = None
+Field96: TypeAlias = int | None
+Field97: TypeAlias = None | typing.SupportsInt | builtins.str | float | bool
+field19 = [1, 2, 3] # Y052 Need type annotation for "field19"
+field191: list[int] = [1, 2, 3]
+field20 = (1, 2, 3) # Y052 Need type annotation for "field20"
+field201: tuple[int, ...] = (1, 2, 3)
+field21 = {1, 2, 3} # Y052 Need type annotation for "field21"
+field211: set[int] = {1, 2, 3}
+field212 = {"foo": "bar"} # Y052 Need type annotation for "field212"
+field213: dict[str, str] = {"foo": "bar"}
+field22: Final = {"foo": 5}
+field221: list[int] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] # Y015 Only simple default values are allowed for assignments
+field223: list[int] = [*range(10)] # Y015 Only simple default values are allowed for assignments
+field224: list[int] = list(range(10)) # Y015 Only simple default values are allowed for assignments
+field225: list[object] = [{}, 1, 2] # Y015 Only simple default values are allowed for assignments
+field226: tuple[str | tuple[str, ...], ...] = ("foo", ("foo", "bar")) # Y015 Only simple default values are allowed for assignments
+field227: dict[str, object] = {"foo": {"foo": "bar"}} # Y015 Only simple default values are allowed for assignments
+field228: dict[str, list[object]] = {"foo": []} # Y015 Only simple default values are allowed for assignments
+# When parsed, this case results in `None` being placed in the `.keys` list for the `ast.Dict` node
+field229: dict[int, int] = {1: 2, **{3: 4}} # Y015 Only simple default values are allowed for assignments
+field23 = "foo" + "bar" # Y015 Only simple default values are allowed for assignments
+field24 = b"foo" + b"bar" # Y015 Only simple default values are allowed for assignments
+field25 = 5 * 5 # Y015 Only simple default values are allowed for assignments
+
+# We shouldn't emit Y015 within functions
+def f():
+ field26: list[int] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+
+
+# We shouldn't emit Y015 for __slots__ or __match_args__
+class Class1:
+ __slots__ = (
+ '_one',
+ '_two',
+ '_three',
+ '_four',
+ '_five',
+ '_six',
+ '_seven',
+ '_eight',
+ '_nine',
+ '_ten',
+ '_eleven',
+ )
+
+ __match_args__ = (
+ 'one',
+ 'two',
+ 'three',
+ 'four',
+ 'five',
+ 'six',
+ 'seven',
+ 'eight',
+ 'nine',
+ 'ten',
+ 'eleven',
+ )
+
+# We shouldn't emit Y015 for __all__
+__all__ = ["Class1"]
+
+# Ignore the following for PYI015
+field26 = typing.Sequence[int]
+field27 = list[str]
+field28 = builtins.str
+field29 = str
+field30 = str | bytes | None
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI052.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI052.pyi
new file mode 100644
index 0000000000000..860ee255fb9b8
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI052.pyi
@@ -0,0 +1,100 @@
+import builtins
+import typing
+from typing import TypeAlias, Final, NewType, TypeVar, TypeVarTuple, ParamSpec
+
+# We shouldn't emit Y015 for simple default values
+field1: int
+field2: int = ...
+field3 = ... # type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
+field4: int = 0
+field41: int = 0xFFFFFFFF
+field42: int = 1234567890
+field43: int = -0xFFFFFFFF
+field44: int = -1234567890
+field5 = 0 # type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int") # Y052 Need type annotation for "field5"
+field6 = 0 # Y052 Need type annotation for "field6"
+field7 = b"" # Y052 Need type annotation for "field7"
+field71 = "foo" # Y052 Need type annotation for "field71"
+field72: str = "foo"
+field8 = False # Y052 Need type annotation for "field8"
+field81 = -1 # Y052 Need type annotation for "field81"
+field82: float = -98.43
+field83 = -42j # Y052 Need type annotation for "field83"
+field84 = 5 + 42j # Y052 Need type annotation for "field84"
+field85 = -5 - 42j # Y052 Need type annotation for "field85"
+field9 = None # Y026 Use typing_extensions.TypeAlias for type aliases, e.g. "field9: TypeAlias = None"
+Field95: TypeAlias = None
+Field96: TypeAlias = int | None
+Field97: TypeAlias = None | typing.SupportsInt | builtins.str | float | bool
+Field98 = NewType('MyInt', int)
+Field99 = TypeVar('Field99')
+Field100 = TypeVarTuple('Field100')
+Field101 = ParamSpec('Field101')
+field19 = [1, 2, 3] # Y052 Need type annotation for "field19"
+field191: list[int] = [1, 2, 3]
+field20 = (1, 2, 3) # Y052 Need type annotation for "field20"
+field201: tuple[int, ...] = (1, 2, 3)
+field21 = {1, 2, 3} # Y052 Need type annotation for "field21"
+field211: set[int] = {1, 2, 3}
+field212 = {"foo": "bar"} # Y052 Need type annotation for "field212"
+field213: dict[str, str] = {"foo": "bar"}
+field22: Final = {"foo": 5}
+
+# We *should* emit Y015 for more complex default values
+field221: list[int] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] # Y015 Only simple default values are allowed for assignments
+field223: list[int] = [*range(10)] # Y015 Only simple default values are allowed for assignments
+field224: list[int] = list(range(10)) # Y015 Only simple default values are allowed for assignments
+field225: list[object] = [{}, 1, 2] # Y015 Only simple default values are allowed for assignments
+field226: tuple[str | tuple[str, ...], ...] = ("foo", ("foo", "bar")) # Y015 Only simple default values are allowed for assignments
+field227: dict[str, object] = {"foo": {"foo": "bar"}} # Y015 Only simple default values are allowed for assignments
+field228: dict[str, list[object]] = {"foo": []} # Y015 Only simple default values are allowed for assignments
+# When parsed, this case results in `None` being placed in the `.keys` list for the `ast.Dict` node
+field229: dict[int, int] = {1: 2, **{3: 4}} # Y015 Only simple default values are allowed for assignments
+field23 = "foo" + "bar" # Y015 Only simple default values are allowed for assignments
+field24 = b"foo" + b"bar" # Y015 Only simple default values are allowed for assignments
+field25 = 5 * 5 # Y015 Only simple default values are allowed for assignments
+
+# We shouldn't emit Y015 within functions
+def f():
+ field26: list[int] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+
+
+# We shouldn't emit Y015 for __slots__ or __match_args__
+class Class1:
+ __slots__ = (
+ '_one',
+ '_two',
+ '_three',
+ '_four',
+ '_five',
+ '_six',
+ '_seven',
+ '_eight',
+ '_nine',
+ '_ten',
+ '_eleven',
+ )
+
+ __match_args__ = (
+ 'one',
+ 'two',
+ 'three',
+ 'four',
+ 'five',
+ 'six',
+ 'seven',
+ 'eight',
+ 'nine',
+ 'ten',
+ 'eleven',
+ )
+
+# We shouldn't emit Y015 for __all__
+__all__ = ["Class1"]
+
+# Ignore the following for PYI015
+field26 = typing.Sequence[int]
+field27 = list[str]
+field28 = builtins.str
+field29 = str
+field30 = str | bytes | None
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI053.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI053.py
new file mode 100644
index 0000000000000..15631d15f2acd
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI053.py
@@ -0,0 +1,38 @@
+def f1(x: str = "50 character stringggggggggggggggggggggggggggggggg") -> None:
+ ...
+
+
+def f2(x: str = "51 character stringgggggggggggggggggggggggggggggggg") -> None:
+ ...
+
+
+def f3(x: str = "50 character stringggggggggggggggggggggggggggggg\U0001f600") -> None:
+ ...
+
+
+def f4(x: str = "51 character stringgggggggggggggggggggggggggggggg\U0001f600") -> None:
+ ...
+
+
+def f5(x: bytes = b"50 character byte stringgggggggggggggggggggggggggg") -> None:
+ ...
+
+
+def f6(x: bytes = b"51 character byte stringgggggggggggggggggggggggggg") -> None:
+ ...
+
+
+def f7(x: bytes = b"50 character byte stringggggggggggggggggggggggggg\xff") -> None:
+ ...
+
+
+def f8(x: bytes = b"50 character byte stringgggggggggggggggggggggggggg\xff") -> None:
+ ...
+
+
+foo: str = "50 character stringggggggggggggggggggggggggggggggg"
+bar: str = "51 character stringgggggggggggggggggggggggggggggggg"
+
+baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg"
+
+qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff"
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI053.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI053.pyi
new file mode 100644
index 0000000000000..d2f55531a2660
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI053.pyi
@@ -0,0 +1,30 @@
+def f1(x: str = "50 character stringggggggggggggggggggggggggggggggg") -> None: ... # OK
+def f2(
+ x: str = "51 character stringgggggggggggggggggggggggggggggggg", # Error: PYI053
+) -> None: ...
+def f3(
+ x: str = "50 character stringgggggggggggggggggggggggggggggg\U0001f600", # OK
+) -> None: ...
+def f4(
+ x: str = "51 character stringggggggggggggggggggggggggggggggg\U0001f600", # Error: PYI053
+) -> None: ...
+def f5(
+ x: bytes = b"50 character byte stringgggggggggggggggggggggggggg", # OK
+) -> None: ...
+def f6(
+ x: bytes = b"51 character byte stringgggggggggggggggggggggggggg", # Error: PYI053
+) -> None: ...
+def f7(
+ x: bytes = b"50 character byte stringggggggggggggggggggggggggg\xff", # OK
+) -> None: ...
+def f8(
+ x: bytes = b"51 character byte stringgggggggggggggggggggggggggg\xff", # Error: PYI053
+) -> None: ...
+
+foo: str = "50 character stringggggggggggggggggggggggggggggggg" # OK
+
+bar: str = "51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
+
+baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg" # OK
+
+qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI054.py b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI054.py
new file mode 100644
index 0000000000000..9ea9eec654165
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI054.py
@@ -0,0 +1,20 @@
+field01: int = 0xFFFFFFFF
+field02: int = 0xFFFFFFFFF
+field03: int = -0xFFFFFFFF
+field04: int = -0xFFFFFFFFF
+
+field05: int = 1234567890
+field06: int = 12_456_890
+field07: int = 12345678901
+field08: int = -1234567801
+field09: int = -234_567_890
+
+field10: float = 123.456789
+field11: float = 123.4567890
+field12: float = -123.456789
+field13: float = -123.567_890
+
+field14: complex = 1e1234567j
+field15: complex = 1e12345678j
+field16: complex = -1e1234567j
+field17: complex = 1e123456789j
diff --git a/crates/ruff/resources/test/fixtures/flake8_pyi/PYI054.pyi b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI054.pyi
new file mode 100644
index 0000000000000..73c9a37347761
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_pyi/PYI054.pyi
@@ -0,0 +1,20 @@
+field01: int = 0xFFFFFFFF
+field02: int = 0xFFFFFFFFF # Error: PYI054
+field03: int = -0xFFFFFFFF
+field04: int = -0xFFFFFFFFF # Error: PYI054
+
+field05: int = 1234567890
+field06: int = 12_456_890
+field07: int = 12345678901 # Error: PYI054
+field08: int = -1234567801
+field09: int = -234_567_890 # Error: PYI054
+
+field10: float = 123.456789
+field11: float = 123.4567890 # Error: PYI054
+field12: float = -123.456789
+field13: float = -123.567_890 # Error: PYI054
+
+field14: complex = 1e1234567j
+field15: complex = 1e12345678j # Error: PYI054
+field16: complex = -1e1234567j
+field17: complex = 1e123456789j # Error: PYI054
diff --git a/crates/ruff/resources/test/fixtures/flake8_pytest_style/PT018.py b/crates/ruff/resources/test/fixtures/flake8_pytest_style/PT018.py
index f9261b8620960..9bc5fbe877561 100644
--- a/crates/ruff/resources/test/fixtures/flake8_pytest_style/PT018.py
+++ b/crates/ruff/resources/test/fixtures/flake8_pytest_style/PT018.py
@@ -39,3 +39,8 @@ def test_error():
message
"""
)
+
+
+assert something # OK
+assert something and something_else # Error
+assert something and something_else and something_third # Error
diff --git a/crates/ruff/resources/test/fixtures/flake8_return/RET504.py b/crates/ruff/resources/test/fixtures/flake8_return/RET504.py
index 96bc755687e32..80b92b3193cf3 100644
--- a/crates/ruff/resources/test/fixtures/flake8_return/RET504.py
+++ b/crates/ruff/resources/test/fixtures/flake8_return/RET504.py
@@ -272,3 +272,34 @@ def str_to_bool(val):
if isinstance(val, bool):
return some_obj
return val
+
+
+# Mixed assignments
+def function_assignment(x):
+ def f(): ...
+
+ return f
+
+
+def class_assignment(x):
+ class Foo: ...
+
+ return Foo
+
+
+def mixed_function_assignment(x):
+ if x:
+ def f(): ...
+ else:
+ f = 42
+
+ return f
+
+
+def mixed_class_assignment(x):
+ if x:
+ class Foo: ...
+ else:
+ Foo = 42
+
+ return Foo
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM102.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM102.py
index c1a9c03cf1053..3b47a3e33505f 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM102.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM102.py
@@ -86,9 +86,16 @@
):
print("Bad module!")
-# SIM102
-if node.module:
- if node.module == "multiprocessing" or node.module.startswith(
+# SIM102 (auto-fixable)
+if node.module012345678:
+ if node.module == "multiprocß9💣2ℝ" or node.module.startswith(
+ "multiprocessing."
+ ):
+ print("Bad module!")
+
+# SIM102 (not auto-fixable)
+if node.module0123456789:
+ if node.module == "multiprocß9💣2ℝ" or node.module.startswith(
"multiprocessing."
):
print("Bad module!")
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_0.py
similarity index 66%
rename from crates/ruff/resources/test/fixtures/flake8_simplify/SIM105.py
rename to crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_0.py
index d54cf3f9c72ab..8b19fef6f8866 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_0.py
@@ -1,73 +1,96 @@
def foo():
pass
+
+# SIM105
try:
foo()
-except ValueError: # SIM105
+except ValueError:
pass
+# SIM105
try:
foo()
-except (ValueError, OSError): # SIM105
+except (ValueError, OSError):
pass
+# SIM105
try:
foo()
-except: # SIM105
+except:
pass
+# SIM105
try:
foo()
-except (a.Error, b.Error): # SIM105
+except (a.Error, b.Error):
pass
+# OK
try:
foo()
except ValueError:
- print('foo')
+ print("foo")
except OSError:
pass
+# OK
try:
foo()
except ValueError:
pass
else:
- print('bar')
+ print("bar")
+# OK
try:
foo()
except ValueError:
pass
finally:
- print('bar')
+ print("bar")
+# OK
try:
foo()
foo()
except ValueError:
pass
+# OK
try:
for i in range(3):
foo()
except ValueError:
pass
+
def bar():
+ # OK
try:
return foo()
except ValueError:
pass
+
def with_ellipsis():
+ # OK
try:
foo()
except ValueError:
...
+
def with_ellipsis_and_return():
+ # OK
try:
return foo()
except ValueError:
...
+
+
+def with_comment():
+ try:
+ foo()
+ except (ValueError, OSError):
+ pass # Trailing comment.
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_1.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_1.py
index 4cd40b5af6e73..adb719760188d 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_1.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_1.py
@@ -1,7 +1,8 @@
"""Case: There's a random import, so it should add `contextlib` after it."""
import math
+# SIM105
try:
math.sqrt(-1)
-except ValueError: # SIM105
+except ValueError:
pass
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_2.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_2.py
index 66a9ba35c05da..625ca2161b0c0 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_2.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_2.py
@@ -6,6 +6,7 @@ def foo():
pass
+# SIM105
try:
foo()
except ValueError:
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_3.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_3.py
new file mode 100644
index 0000000000000..d630bcdd7a6c7
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM105_3.py
@@ -0,0 +1,16 @@
+"""Case: `contextlib` is imported after the call site."""
+
+
+def foo():
+ pass
+
+
+def bar():
+ # SIM105
+ try:
+ foo()
+ except ValueError:
+ pass
+
+
+import contextlib
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM108.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM108.py
index ab43e3f2b1d48..94b14f911a52a 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM108.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM108.py
@@ -80,17 +80,25 @@
# SIM108
if a:
- b = cccccccccccccccccccccccccccccccccccc
+ b = "cccccccccccccccccccccccccccccccccß"
else:
- b = ddddddddddddddddddddddddddddddddddddd
+ b = "ddddddddddddddddddddddddddddddddd💣"
# OK (too long)
if True:
if a:
- b = cccccccccccccccccccccccccccccccccccc
+ b = ccccccccccccccccccccccccccccccccccc
else:
- b = ddddddddddddddddddddddddddddddddddddd
+ b = ddddddddddddddddddddddddddddddddddd
+
+
+# OK (too long with tabs)
+if True:
+ if a:
+ b = ccccccccccccccccccccccccccccccccccc
+ else:
+ b = ddddddddddddddddddddddddddddddddddd
# SIM108 (without fix due to trailing comment)
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM110.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM110.py
index 30ce25bb0e550..b02ac7c28cb99 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM110.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM110.py
@@ -155,3 +155,19 @@ def f():
if check(x):
return False
return True
+
+
+def f():
+ # SIM110
+ for x in "012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ":
+ if x.isdigit():
+ return True
+ return False
+
+
+def f():
+ # OK (too long)
+ for x in "012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9":
+ if x.isdigit():
+ return True
+ return False
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM111.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM111.py
index f0afb793d4ca6..d6908461fcd8d 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM111.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM111.py
@@ -171,3 +171,19 @@ def f():
if x > y:
return False
return True
+
+
+def f():
+ # SIM111
+ for x in "012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9":
+ if x.isdigit():
+ return False
+ return True
+
+
+def f():
+ # OK (too long)
+ for x in "012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß90":
+ if x.isdigit():
+ return False
+ return True
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM117.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM117.py
index 34dd47e361ae2..3c99535e43088 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM117.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM117.py
@@ -90,3 +90,13 @@
D() as d,
):
print("hello")
+
+# SIM117 (auto-fixable)
+with A("01ß9💣2ℝ8901ß9💣2ℝ8901ß9💣2ℝ89") as a:
+ with B("01ß9💣2ℝ8901ß9💣2ℝ8901ß9💣2ℝ89") as b:
+ print("hello")
+
+# SIM117 (not auto-fixable too long)
+with A("01ß9💣2ℝ8901ß9💣2ℝ8901ß9💣2ℝ890") as a:
+ with B("01ß9💣2ℝ8901ß9💣2ℝ8901ß9💣2ℝ89") as b:
+ print("hello")
\ No newline at end of file
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM208.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM208.py
index 1a23f70773a01..5ad2dd6f820ba 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM208.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM208.py
@@ -12,3 +12,10 @@
if not a != b: # OK
pass
+
+a = not not b # SIM208
+
+f(not not a) # SIM208
+
+if 1 + (not (not a)): # SIM208
+ pass
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM210.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM210.py
index dafe5e24dab9b..9def63a549c95 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM210.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM210.py
@@ -6,6 +6,7 @@
a = False if b else True # OK
+
def f():
# OK
def bool():
diff --git a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM401.py b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM401.py
index 0ef88effb7aa7..487f15a5f71e9 100644
--- a/crates/ruff/resources/test/fixtures/flake8_simplify/SIM401.py
+++ b/crates/ruff/resources/test/fixtures/flake8_simplify/SIM401.py
@@ -14,7 +14,7 @@
else:
var = a_dict[key]
-# SIM401 (default with a complex expression)
+# OK (default contains effect)
if key in a_dict:
var = a_dict[key]
else:
@@ -36,12 +36,18 @@
if key in a_dict:
vars[idx] = a_dict[key]
else:
- vars[idx] = "default"
+ vars[idx] = "defaultß9💣2ℝ6789ß9💣2ℝ6789ß9💣2ℝ6789ß9💣2ℝ6789ß9💣2ℝ6789"
###
# Negative cases
###
+# OK (too long)
+if key in a_dict:
+ vars[idx] = a_dict[key]
+else:
+ vars[idx] = "defaultß9💣2ℝ6789ß9💣2ℝ6789ß9💣2ℝ6789ß9💣2ℝ6789ß9💣2ℝ6789ß"
+
# OK (false negative)
if not key in a_dict:
var = "default"
diff --git a/crates/ruff/resources/test/fixtures/flake8_todos/TD001.py b/crates/ruff/resources/test/fixtures/flake8_todos/TD001.py
new file mode 100644
index 0000000000000..77ff4ae70d081
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_todos/TD001.py
@@ -0,0 +1,9 @@
+# T001 - accepted
+# TODO (evanrittenhouse): this is a valid TODO
+# SOME_OTHER_TAG: this is impossible to determine
+# this is not a TODO
+
+# T001 - errors
+# XXX (evanrittenhouse): this is not fine
+# FIXME (evanrittenhouse): this is not fine
+# foo # XXX: this isn't fine either
diff --git a/crates/ruff/resources/test/fixtures/flake8_todos/TD002.py b/crates/ruff/resources/test/fixtures/flake8_todos/TD002.py
new file mode 100644
index 0000000000000..3c4867516f783
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_todos/TD002.py
@@ -0,0 +1,8 @@
+# T002 - accepted
+# TODO (evanrittenhouse): this has an author
+# TODO(evanrittenhouse): this also has an author
+# T002 - errors
+# TODO: this has no author
+# FIXME: neither does this
+# TODO : and neither does this
+# foo # TODO: this doesn't either
diff --git a/crates/ruff/resources/test/fixtures/flake8_todos/TD003.py b/crates/ruff/resources/test/fixtures/flake8_todos/TD003.py
new file mode 100644
index 0000000000000..f203bfe4f6605
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_todos/TD003.py
@@ -0,0 +1,31 @@
+# TDO003 - accepted
+# TODO: this comment has a link
+# https://github.com/charliermarsh/ruff/issues/3870
+
+# TODO: this comment has an issue
+# TDO-3870
+
+# TDO003 - errors
+# TODO: this comment has no
+# link after it
+
+# TODO: here's a TODO with no link after it
+def foo(x):
+ return x
+
+# TODO: here's a TODO on the last line with no link
+# Here's more content.
+# TDO-3870
+
+# TODO: here's a TODO on the last line with no link
+# Here's more content, with a space.
+
+# TDO-3870
+
+# TODO: here's a TODO without an issue link
+# TODO: followed by a new TODO with an issue link
+# TDO-3870
+
+# foo # TODO: no link!
+
+# TODO: here's a TODO on the last line with no link
diff --git a/crates/ruff/resources/test/fixtures/flake8_todos/TD004.py b/crates/ruff/resources/test/fixtures/flake8_todos/TD004.py
new file mode 100644
index 0000000000000..9fa5d9c50a105
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_todos/TD004.py
@@ -0,0 +1,8 @@
+# T004 - accepted
+# TODO(evanrittenhouse): this has a colon
+# T004 - errors
+# TODO this has no colon
+# TODO(evanrittenhouse 😀) this has no colon
+# FIXME add a colon
+# foo # TODO add a colon
+# TODO this has a colon but it doesn't terminate the tag, so this should throw. https://www.google.com
diff --git a/crates/ruff/resources/test/fixtures/flake8_todos/TD005.py b/crates/ruff/resources/test/fixtures/flake8_todos/TD005.py
new file mode 100644
index 0000000000000..64e8fe5364ab7
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_todos/TD005.py
@@ -0,0 +1,7 @@
+# T005 - accepted
+# TODO(evanrittenhouse): this has text, while the errors do not
+# T005 - errors
+# TODO(evanrittenhouse):
+# TODO(evanrittenhouse)
+# FIXME
+# foo # TODO
diff --git a/crates/ruff/resources/test/fixtures/flake8_todos/TD006.py b/crates/ruff/resources/test/fixtures/flake8_todos/TD006.py
new file mode 100644
index 0000000000000..90fbbe387b078
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_todos/TD006.py
@@ -0,0 +1,6 @@
+# TDO006 - accepted
+# TODO (evanrittenhouse): this is a valid TODO
+# TDO006 - error
+# ToDo (evanrittenhouse): invalid capitalization
+# todo (evanrittenhouse): another invalid capitalization
+# foo # todo: invalid capitalization
diff --git a/crates/ruff/resources/test/fixtures/flake8_todos/TD007.py b/crates/ruff/resources/test/fixtures/flake8_todos/TD007.py
new file mode 100644
index 0000000000000..44915d6eda5c1
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flake8_todos/TD007.py
@@ -0,0 +1,10 @@
+# T007 - accepted
+# TODO(evanrittenhouse): this has a space after a colon
+# TODO: so does this
+# T007 - errors
+# TODO(evanrittenhouse):this has no space after a colon
+# TODO (evanrittenhouse):this doesn't either
+# TODO:neither does this
+# FIXME:and lastly neither does this
+# foo # TODO:this is really the last one
+# TODO this colon doesn't terminate the tag, so don't check it. https://www.google.com
diff --git a/crates/ruff/resources/test/fixtures/flake8_type_checking/TCH002.py b/crates/ruff/resources/test/fixtures/flake8_type_checking/TCH002.py
index 2bda6ff9838a6..82d6d2f10ba4a 100644
--- a/crates/ruff/resources/test/fixtures/flake8_type_checking/TCH002.py
+++ b/crates/ruff/resources/test/fixtures/flake8_type_checking/TCH002.py
@@ -146,3 +146,21 @@ def f():
import pandas as pd
x = dict[pd.DataFrame, pd.DataFrame]
+
+
+def f():
+ import pandas as pd
+
+
+def f():
+ from pandas import DataFrame # noqa: TCH002
+
+ x: DataFrame = 2
+
+
+def f():
+ from pandas import ( # noqa: TCH002
+ DataFrame,
+ )
+
+ x: DataFrame = 2
diff --git a/crates/ruff/resources/test/fixtures/flake8_type_checking/strict.py b/crates/ruff/resources/test/fixtures/flake8_type_checking/strict.py
index d7f9bda9840ad..4882fefe9a638 100644
--- a/crates/ruff/resources/test/fixtures/flake8_type_checking/strict.py
+++ b/crates/ruff/resources/test/fixtures/flake8_type_checking/strict.py
@@ -2,7 +2,7 @@
def f():
- # Even in strict mode, this shouldn't rase an error, since `pkg` is used at runtime,
+ # Even in strict mode, this shouldn't raise an error, since `pkg` is used at runtime,
# and implicitly imports `pkg.bar`.
import pkg
import pkg.bar
@@ -12,7 +12,7 @@ def test(value: pkg.bar.A):
def f():
- # Even in strict mode, this shouldn't rase an error, since `pkg.bar` is used at
+ # Even in strict mode, this shouldn't raise an error, since `pkg.bar` is used at
# runtime, and implicitly imports `pkg`.
import pkg
import pkg.bar
@@ -22,7 +22,7 @@ def test(value: pkg.A):
def f():
- # In un-strict mode, this shouldn't rase an error, since `pkg` is used at runtime.
+ # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
import pkg
from pkg import A
@@ -31,7 +31,7 @@ def test(value: A):
def f():
- # In un-strict mode, this shouldn't rase an error, since `pkg` is used at runtime.
+ # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
from pkg import A, B
def test(value: A):
@@ -39,7 +39,7 @@ def test(value: A):
def f():
- # Even in strict mode, this shouldn't rase an error, since `pkg.baz` is used at
+ # Even in strict mode, this shouldn't raise an error, since `pkg.baz` is used at
# runtime, and implicitly imports `pkg.bar`.
import pkg.bar
import pkg.baz
@@ -49,9 +49,56 @@ def test(value: pkg.bar.A):
def f():
- # In un-strict mode, this _should_ rase an error, since `pkg` is used at runtime.
+ # In un-strict mode, this _should_ raise an error, since `pkg.bar` isn't used at runtime
import pkg
from pkg.bar import A
def test(value: A):
return pkg.B()
+
+
+def f():
+ # In un-strict mode, this shouldn't raise an error, since `pkg.bar` is used at runtime.
+ import pkg
+ import pkg.bar as B
+
+ def test(value: pkg.A):
+ return B()
+
+
+def f():
+ # In un-strict mode, this shouldn't raise an error, since `pkg.foo.bar` is used at runtime.
+ import pkg.foo as F
+ import pkg.foo.bar as B
+
+ def test(value: F.Foo):
+ return B()
+
+
+def f():
+ # In un-strict mode, this shouldn't raise an error, since `pkg.foo.bar` is used at runtime.
+ import pkg
+ import pkg.foo.bar as B
+
+ def test(value: pkg.A):
+ return B()
+
+
+def f():
+ # In un-strict mode, this _should_ raise an error, since `pkg` isn't used at runtime.
+ # Note that `pkg` is a prefix of `pkgfoo` which are both different modules. This is
+ # testing the implementation.
+ import pkg
+ import pkgfoo.bar as B
+
+ def test(value: pkg.A):
+ return B()
+
+
+def f():
+ # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
+ import pkg.bar as B
+ import pkg.foo as F
+
+ def test(value: F.Foo):
+ return B.Bar()
diff --git a/crates/ruff/resources/test/fixtures/flynt/FLY002.py b/crates/ruff/resources/test/fixtures/flynt/FLY002.py
new file mode 100644
index 0000000000000..2067b12d5da92
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/flynt/FLY002.py
@@ -0,0 +1,18 @@
+import secrets
+from random import random, choice
+
+a = "Hello"
+ok1 = " ".join([a, " World"]) # OK
+ok2 = "".join(["Finally, ", a, " World"]) # OK
+ok3 = "x".join(("1", "2", "3")) # OK
+ok4 = "y".join([1, 2, 3]) # Technically OK, though would've been an error originally
+ok5 = "a".join([random(), random()]) # OK (simple calls)
+ok6 = "a".join([secrets.token_urlsafe(), secrets.token_hex()]) # OK (attr calls)
+
+nok1 = "x".join({"4", "5", "yee"}) # Not OK (set)
+nok2 = a.join(["1", "2", "3"]) # Not OK (not a static joiner)
+nok3 = "a".join(a) # Not OK (not a static joinee)
+nok4 = "a".join([a, a, *a]) # Not OK (not a static length)
+nok5 = "a".join([choice("flarp")]) # Not OK (not a simple call)
+nok6 = "a".join(x for x in "feefoofum") # Not OK (generator)
+nok7 = "a".join([f"foo{8}", "bar"]) # Not OK (contains an f-string)
diff --git a/crates/ruff/resources/test/fixtures/isort/fit_line_length_comment.py b/crates/ruff/resources/test/fixtures/isort/fit_line_length_comment.py
index 11f1ee1f6e6ca..94648e045ecb7 100644
--- a/crates/ruff/resources/test/fixtures/isort/fit_line_length_comment.py
+++ b/crates/ruff/resources/test/fixtures/isort/fit_line_length_comment.py
@@ -2,3 +2,7 @@
# Don't take this comment into account when determining whether the next import can fit on one line.
from b import c
from d import e # Do take this comment into account when determining whether the next import can fit on one line.
+# The next import fits on one line.
+from f import g # 012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ
+# The next import doesn't fit on one line.
+from h import i # 012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9012ß9💣2ℝ9
diff --git a/crates/ruff/resources/test/fixtures/isort/required_imports/off.py b/crates/ruff/resources/test/fixtures/isort/required_imports/off.py
new file mode 100644
index 0000000000000..62590951c4f91
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/isort/required_imports/off.py
@@ -0,0 +1,4 @@
+# isort: off
+
+x = 1
+# isort: on
diff --git a/crates/ruff/resources/test/fixtures/isort/split.py b/crates/ruff/resources/test/fixtures/isort/split.py
index acdc032fe5203..e4beaec56334a 100644
--- a/crates/ruff/resources/test/fixtures/isort/split.py
+++ b/crates/ruff/resources/test/fixtures/isort/split.py
@@ -6,7 +6,16 @@
import c
import d
-# isort: split
+# isort: split
import a
import b
+
+if True:
+ import C
+ import A
+
+ # isort: split
+
+ import D
+ import B
diff --git a/crates/ruff/resources/test/fixtures/pandas_vet/PD002.py b/crates/ruff/resources/test/fixtures/pandas_vet/PD002.py
index 094fd0e5b08a6..99dc33a327395 100644
--- a/crates/ruff/resources/test/fixtures/pandas_vet/PD002.py
+++ b/crates/ruff/resources/test/fixtures/pandas_vet/PD002.py
@@ -22,3 +22,7 @@
x.drop(["a"], axis=1, **kwargs, inplace=True)
x.drop(["a"], axis=1, inplace=True, **kwargs)
f(x.drop(["a"], axis=1, inplace=True))
+
+x.apply(lambda x: x.sort_values('a', inplace=True))
+import torch
+torch.m.ReLU(inplace=True) # safe because this isn't a pandas call
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/E11.py b/crates/ruff/resources/test/fixtures/pycodestyle/E11.py
index f8ace8030b9dc..9f94aea1011c9 100644
--- a/crates/ruff/resources/test/fixtures/pycodestyle/E11.py
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/E11.py
@@ -40,3 +40,27 @@ def start():
#: E117 W191
def start():
print()
+#: E112
+if False: #
+print()
+#:
+if False:
+ print()
+#:
+if False: #
+ print()
+#:
+if False:
+ print()
+
+ print()
+#:
+if False:
+ print()
+ if False:
+
+ print()
+#:
+if False:
+
+ print()
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/E20.py b/crates/ruff/resources/test/fixtures/pycodestyle/E20.py
index 20c6dfd805d74..2e8f5f7d90ce0 100644
--- a/crates/ruff/resources/test/fixtures/pycodestyle/E20.py
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/E20.py
@@ -76,3 +76,11 @@
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
+
+#: E201:1:6
+spam[ ~ham]
+
+#: Okay
+x = [ #
+ 'some value',
+]
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/E22.py b/crates/ruff/resources/test/fixtures/pycodestyle/E22.py
index 7ea27927e50fe..556984df20b45 100644
--- a/crates/ruff/resources/test/fixtures/pycodestyle/E22.py
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/E22.py
@@ -160,6 +160,7 @@ def halves(n):
*a, b = (1, 2, 3)
+@decorator
def squares(n):
return (i**2 for i in range(n))
@@ -168,4 +169,14 @@ def squares(n):
-6: "\u03bc", # Greek letter mu
-3: "m",
}
+
+i = (
+ i + #
+ 1
+)
+
+x[~y]
+
+if i == -1:
+ pass
#:
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/E23.py b/crates/ruff/resources/test/fixtures/pycodestyle/E23.py
index f47b124ed66c0..e2c6b9fe661a1 100644
--- a/crates/ruff/resources/test/fixtures/pycodestyle/E23.py
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/E23.py
@@ -18,3 +18,13 @@ def foo() -> None:
#: E231
if (1,2):
pass
+
+#: Okay
+a = (1,\
+2)
+
+#: E231:2:20
+mdtypes_template = {
+ 'tag_full': [('mdtype', 'u4'), ('byte_count', 'u4')],
+ 'tag_smalldata':[('byte_count_mdtype', 'u4'), ('data', 'S4')],
+}
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/E26.py b/crates/ruff/resources/test/fixtures/pycodestyle/E26.py
index 2cdd4cf5425b0..9d35553dc521a 100644
--- a/crates/ruff/resources/test/fixtures/pycodestyle/E26.py
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/E26.py
@@ -64,3 +64,11 @@ def oof():
#: E262:2:9
# (Two spaces) Ok for block comment
a = 42 # (Two spaces)
+
+#: E265:5:1
+### Means test is not done yet
+# E Means test is giving error (E)
+# F Means test is failing (F)
+# EF Means test is giving error and Failing
+#! Means test is segfaulting
+# 8 Means test runs forever
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/E27.py b/crates/ruff/resources/test/fixtures/pycodestyle/E27.py
index ca06930698bfc..576e43ae01300 100644
--- a/crates/ruff/resources/test/fixtures/pycodestyle/E27.py
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/E27.py
@@ -56,3 +56,7 @@
def f():
print((yield))
x = (yield)
+#: Okay
+if (a and
+ b):
+ pass
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/E402.py b/crates/ruff/resources/test/fixtures/pycodestyle/E402.py
index b95b2ae1e832a..81e6306c7af11 100644
--- a/crates/ruff/resources/test/fixtures/pycodestyle/E402.py
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/E402.py
@@ -19,7 +19,7 @@
else:
import e
-y = x + 1
+__some__magic = 1
import f
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/E501_2.py b/crates/ruff/resources/test/fixtures/pycodestyle/E501_2.py
new file mode 100644
index 0000000000000..18ea839392ba1
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/E501_2.py
@@ -0,0 +1,11 @@
+a = """ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A6"""
+a = """ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A6"""
+
+b = """ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A6"""
+b = """ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A6"""
+
+c = """2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A6"""
+c = """2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A6"""
+
+d = """💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A6"""
+d = """💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A67ß9💣2ℝ4A6"""
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/W505.py b/crates/ruff/resources/test/fixtures/pycodestyle/W505.py
index d8085da9892a8..c297b8c1e9e89 100644
--- a/crates/ruff/resources/test/fixtures/pycodestyle/W505.py
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/W505.py
@@ -2,7 +2,7 @@
"""Here's a top-level docstring that's over the limit."""
-def f():
+def f1():
"""Here's a docstring that's also over the limit."""
x = 1 # Here's a comment that's over the limit, but it's not standalone.
@@ -16,3 +16,16 @@ def f():
"This is also considered a docstring, and is over the limit."
+
+
+def f2():
+ """Here's a multi-line docstring.
+
+ It's over the limit on this line, which isn't the first line in the docstring.
+ """
+
+
+def f3():
+ """Here's a multi-line docstring.
+
+ It's over the limit on this line, which isn't the first line in the docstring."""
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/W505_utf_8.py b/crates/ruff/resources/test/fixtures/pycodestyle/W505_utf_8.py
new file mode 100644
index 0000000000000..6e177dad8f0ea
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/W505_utf_8.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+"""Here's a top-level ß9💣2ℝing that's over theß9💣2ℝ."""
+
+
+def f1():
+ """Here's a ß9💣2ℝing that's also over theß9💣2ℝ."""
+
+ x = 1 # Here's a comment that's over theß9💣2ℝ, but it's not standalone.
+
+ # Here's a standalone comment that's over theß9💣2ℝ.
+
+ x = 2
+ # Another standalone that is preceded by a newline and indent toke and is over theß9💣2ℝ.
+
+ print("Here's a string that's over theß9💣2ℝ, but it's not a ß9💣2ℝing.")
+
+
+"This is also considered a ß9💣2ℝing, and is over theß9💣2ℝ."
+
+
+def f2():
+ """Here's a multi-line ß9💣2ℝing.
+
+ It's over theß9💣2ℝ on this line, which isn't the first line in the ß9💣2ℝing.
+ """
+
+
+def f3():
+ """Here's a multi-line ß9💣2ℝing.
+
+ It's over theß9💣2ℝ on this line, which isn't the first line in the ß9💣2ℝing."""
diff --git a/crates/ruff/resources/test/fixtures/pycodestyle/shebang.py b/crates/ruff/resources/test/fixtures/pycodestyle/shebang.py
new file mode 100644
index 0000000000000..2d5d0e2441c52
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pycodestyle/shebang.py
@@ -0,0 +1,4 @@
+#!/usr/bin/python
+#
+#!
+#:
diff --git a/crates/ruff/resources/test/fixtures/pydocstyle/D403.py b/crates/ruff/resources/test/fixtures/pydocstyle/D403.py
index 129de7d11a518..d90acb358efab 100644
--- a/crates/ruff/resources/test/fixtures/pydocstyle/D403.py
+++ b/crates/ruff/resources/test/fixtures/pydocstyle/D403.py
@@ -13,3 +13,15 @@ def another_function():
def utf8_function():
"""éste docstring is capitalized."""
+
+def uppercase_char_not_possible():
+ """'args' is not capitalized."""
+
+def non_alphabetic():
+ """th!is is not capitalized."""
+
+def non_ascii():
+ """th•s is not capitalized."""
+
+def all_caps():
+ """th•s is not capitalized."""
diff --git a/crates/ruff/resources/test/fixtures/pydocstyle/all.py b/crates/ruff/resources/test/fixtures/pydocstyle/all.py
new file mode 100644
index 0000000000000..ffe230d817503
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pydocstyle/all.py
@@ -0,0 +1,18 @@
+def public_func():
+ pass
+
+
+def private_func():
+ pass
+
+
+class PublicClass:
+ class PublicNestedClass:
+ pass
+
+
+class PrivateClass:
+ pass
+
+
+__all__ = ("public_func", "PublicClass")
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F401_12.py b/crates/ruff/resources/test/fixtures/pyflakes/F401_12.py
new file mode 100644
index 0000000000000..f3ba7c360c777
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F401_12.py
@@ -0,0 +1,10 @@
+"""Test: module bindings are preferred over local bindings, for deferred annotations."""
+
+from __future__ import annotations
+
+import datetime
+from typing import Optional
+
+
+class Class:
+ datetime: Optional[datetime.datetime]
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F401_13.py b/crates/ruff/resources/test/fixtures/pyflakes/F401_13.py
new file mode 100644
index 0000000000000..49ba589ae7d75
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F401_13.py
@@ -0,0 +1,12 @@
+"""Test: module bindings are preferred over local bindings, for deferred annotations."""
+
+from __future__ import annotations
+
+from typing import TypeAlias, List
+
+
+class Class:
+ List: TypeAlias = List
+
+ def bar(self) -> List:
+ pass
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F401_14.py b/crates/ruff/resources/test/fixtures/pyflakes/F401_14.py
new file mode 100644
index 0000000000000..6e7bb3695cd84
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F401_14.py
@@ -0,0 +1,8 @@
+"""Test: module bindings are preferred over local bindings, for deferred annotations."""
+
+import datetime
+from typing import Optional
+
+
+class Class:
+ datetime: "Optional[datetime.datetime]"
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F401_15.py b/crates/ruff/resources/test/fixtures/pyflakes/F401_15.py
new file mode 100644
index 0000000000000..ac4b90b2c4cb4
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F401_15.py
@@ -0,0 +1,9 @@
+from typing import TYPE_CHECKING
+from django.db.models import ForeignKey
+
+if TYPE_CHECKING:
+ from pathlib import Path
+
+
+class Foo:
+ var = ForeignKey["Path"]()
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F401_16.py b/crates/ruff/resources/test/fixtures/pyflakes/F401_16.py
new file mode 100644
index 0000000000000..dd815bb9007ae
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F401_16.py
@@ -0,0 +1,15 @@
+"""Test that `__all__` exports are respected even with multiple declarations."""
+
+import random
+
+
+def some_dependency_check():
+ return random.uniform(0.0, 1.0) > 0.49999
+
+
+if some_dependency_check():
+ import math
+
+ __all__ = ["math"]
+else:
+ __all__ = []
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F401_17.py b/crates/ruff/resources/test/fixtures/pyflakes/F401_17.py
new file mode 100644
index 0000000000000..0d243eac55195
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F401_17.py
@@ -0,0 +1,32 @@
+"""Test that runtime typing references are properly attributed to scoped imports."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, cast
+
+if TYPE_CHECKING:
+ from threading import Thread
+
+
+def fn(thread: Thread):
+ from threading import Thread
+
+ # The `Thread` on the left-hand side should resolve to the `Thread` imported at the
+ # top level.
+ x: Thread
+
+
+def fn(thread: Thread):
+ from threading import Thread
+
+ # The `Thread` on the left-hand side should resolve to the `Thread` imported at the
+ # top level.
+ cast("Thread", thread)
+
+
+def fn(thread: Thread):
+ from threading import Thread
+
+ # The `Thread` on the right-hand side should resolve to the`Thread` imported within
+ # `fn`.
+ cast(Thread, thread)
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F522.py b/crates/ruff/resources/test/fixtures/pyflakes/F522.py
index f84a28d53ec20..18e3d072f26a1 100644
--- a/crates/ruff/resources/test/fixtures/pyflakes/F522.py
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F522.py
@@ -2,3 +2,6 @@
"{bar}{}".format(1, bar=2, spam=3) # F522
"{bar:{spam}}".format(bar=2, spam=3) # No issues
"{bar:{spam}}".format(bar=2, spam=3, eggs=4, ham=5) # F522
+# Not fixable
+(''
+ .format(x=2))
\ No newline at end of file
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F523.py b/crates/ruff/resources/test/fixtures/pyflakes/F523.py
index 2055dd3a3144f..d3dd1b68db7d1 100644
--- a/crates/ruff/resources/test/fixtures/pyflakes/F523.py
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F523.py
@@ -17,3 +17,17 @@
"{0}{1}".format(1, *args) # No issues
"{0}{1}".format(1, 2, *args) # No issues
"{0}{1}".format(1, 2, 3, *args) # F523
+
+# With nested quotes
+"''1{0}".format(1, 2, 3) # F523
+"\"\"{1}{0}".format(1, 2, 3) # F523
+'""{1}{0}'.format(1, 2, 3) # F523
+
+# With modified indexes
+"{1}{2}".format(1, 2, 3) # F523, # F524
+"{1}{3}".format(1, 2, 3, 4) # F523, # F524
+"{1} {8}".format(0, 1) # F523, # F524
+
+# Not fixable
+(''
+.format(2))
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F524.py b/crates/ruff/resources/test/fixtures/pyflakes/F524.py
index 30cc80ea0ed72..feffa38af3c9a 100644
--- a/crates/ruff/resources/test/fixtures/pyflakes/F524.py
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F524.py
@@ -4,3 +4,4 @@
"{0} {bar}".format(1) # F524
"{0} {bar}".format() # F524
"{bar} {0}".format() # F524
+"{1} {8}".format(0, 1)
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F811_23.py b/crates/ruff/resources/test/fixtures/pyflakes/F811_23.py
new file mode 100644
index 0000000000000..0332e48bc8401
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F811_23.py
@@ -0,0 +1,4 @@
+"""Test that shadowing an explicit re-export produces a warning."""
+
+import foo as foo
+import bar as foo
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F811_24.py b/crates/ruff/resources/test/fixtures/pyflakes/F811_24.py
new file mode 100644
index 0000000000000..0207b9329dab0
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F811_24.py
@@ -0,0 +1,5 @@
+"""Test that shadowing a `__future__` import does not produce a warning."""
+
+from __future__ import annotations
+
+import annotations
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F841_0.py b/crates/ruff/resources/test/fixtures/pyflakes/F841_0.py
index c0e33502d6930..94bc0b6ca507d 100644
--- a/crates/ruff/resources/test/fixtures/pyflakes/F841_0.py
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F841_0.py
@@ -126,3 +126,22 @@ def f(x: int):
def f():
if any((key := (value := x)) for x in ["ok"]):
print(key)
+
+
+def f() -> None:
+ is_connected = False
+
+ class Foo:
+ @property
+ def is_connected(self):
+ nonlocal is_connected
+ return is_connected
+
+ def do_thing(self):
+ # This should resolve to the `is_connected` in the function scope.
+ nonlocal is_connected
+ print(is_connected)
+
+ obj = Foo()
+ obj.do_thing()
+
diff --git a/crates/ruff/resources/test/fixtures/pyflakes/F841_3.py b/crates/ruff/resources/test/fixtures/pyflakes/F841_3.py
index 89bae2b6ef63b..28d5af1f3be58 100644
--- a/crates/ruff/resources/test/fixtures/pyflakes/F841_3.py
+++ b/crates/ruff/resources/test/fixtures/pyflakes/F841_3.py
@@ -83,6 +83,11 @@ def f():
pass
+def f():
+ with (Nested(m)) as (cm):
+ pass
+
+
def f():
toplevel = tt = lexer.get_token()
if not tt:
diff --git a/crates/ruff/resources/test/fixtures/pygrep-hooks/PGH005_0.py b/crates/ruff/resources/test/fixtures/pygrep-hooks/PGH005_0.py
new file mode 100644
index 0000000000000..cb80cc8c28048
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pygrep-hooks/PGH005_0.py
@@ -0,0 +1,19 @@
+# Errors
+assert my_mock.not_called()
+assert my_mock.called_once_with()
+assert my_mock.not_called
+assert my_mock.called_once_with
+my_mock.assert_not_called
+my_mock.assert_called
+my_mock.assert_called_once_with
+my_mock.assert_called_once_with
+MyMock.assert_called_once_with
+
+# OK
+assert my_mock.call_count == 1
+assert my_mock.called
+my_mock.assert_not_called()
+my_mock.assert_called()
+my_mock.assert_called_once_with()
+"""like :meth:`Mock.assert_called_once_with`"""
+"""like :meth:`MagicMock.assert_called_once_with`"""
diff --git a/crates/ruff/resources/test/fixtures/pylint/duplicate_bases.py b/crates/ruff/resources/test/fixtures/pylint/duplicate_bases.py
new file mode 100644
index 0000000000000..491421ccf5eb2
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/duplicate_bases.py
@@ -0,0 +1,24 @@
+###
+# Errors.
+###
+class A:
+ ...
+
+
+class B(A, A):
+ ...
+
+
+###
+# Non-errors.
+###
+class C:
+ ...
+
+
+class D(C):
+ ...
+
+
+class E(A, C):
+ ...
diff --git a/crates/ruff/resources/test/fixtures/pylint/invalid_return_type_str.py b/crates/ruff/resources/test/fixtures/pylint/invalid_return_type_str.py
new file mode 100644
index 0000000000000..a47ed1b306ab5
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/invalid_return_type_str.py
@@ -0,0 +1,28 @@
+class Str:
+ def __str__(self):
+ return 1
+
+class Float:
+ def __str__(self):
+ return 3.05
+
+class Int:
+ def __str__(self):
+ return 0
+
+class Bool:
+ def __str__(self):
+ return False
+
+class Str2:
+ def __str__(self):
+ x = "ruff"
+ return x
+
+# TODO fixme once Ruff has better type checking
+def return_int():
+ return 3
+
+class ComplexReturn:
+ def __str__(self):
+ return return_int()
\ No newline at end of file
diff --git a/crates/ruff/resources/test/fixtures/pylint/iteration_over_set.py b/crates/ruff/resources/test/fixtures/pylint/iteration_over_set.py
new file mode 100644
index 0000000000000..1b22612e52f9c
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/iteration_over_set.py
@@ -0,0 +1,38 @@
+# Errors
+
+for item in {"apples", "lemons", "water"}: # flags in-line set literals
+ print(f"I like {item}.")
+
+numbers_list = [i for i in {1, 2, 3}] # flags sets in list comprehensions
+
+numbers_set = {i for i in {1, 2, 3}} # flags sets in set comprehensions
+
+numbers_dict = {str(i): i for i in {1, 2, 3}} # flags sets in dict comprehensions
+
+numbers_gen = (i for i in {1, 2, 3}) # flags sets in generator expressions
+
+# Non-errors
+
+items = {"apples", "lemons", "water"}
+for item in items: # only complains about in-line sets (as per Pylint)
+ print(f"I like {item}.")
+
+for item in ["apples", "lemons", "water"]: # lists are fine
+ print(f"I like {item}.")
+
+for item in ("apples", "lemons", "water"): # tuples are fine
+ print(f"I like {item}.")
+
+numbers_list = [i for i in [1, 2, 3]] # lists in comprehensions are fine
+
+numbers_set = {i for i in (1, 2, 3)} # tuples in comprehensions are fine
+
+numbers_dict = {str(i): i for i in [1, 2, 3]} # lists in dict comprehensions are fine
+
+numbers_gen = (i for i in (1, 2, 3)) # tuples in generator expressions are fine
+
+for item in set(("apples", "lemons", "water")): # set constructor is fine
+ print(f"I like {item}.")
+
+for number in {i for i in range(10)}: # set comprehensions are fine
+ print(number)
diff --git a/crates/ruff/resources/test/fixtures/pylint/named_expr_without_context.py b/crates/ruff/resources/test/fixtures/pylint/named_expr_without_context.py
new file mode 100644
index 0000000000000..2dcc56cffd206
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/named_expr_without_context.py
@@ -0,0 +1,19 @@
+# Errors
+(a := 42)
+if True:
+ (b := 1)
+
+
+class Foo:
+ (c := 1)
+
+
+# OK
+if a := 42:
+ print("Success")
+
+a = 0
+while (a := a + 1) < 10:
+ print("Correct")
+
+a = (b := 1)
diff --git a/crates/ruff/resources/test/fixtures/pylint/nested_min_max.py b/crates/ruff/resources/test/fixtures/pylint/nested_min_max.py
new file mode 100644
index 0000000000000..2c66580305c6e
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/nested_min_max.py
@@ -0,0 +1,38 @@
+min(1, 2, 3)
+min(1, min(2, 3))
+min(1, min(2, min(3, 4)))
+min(1, foo("a", "b"), min(3, 4))
+min(1, max(2, 3))
+max(1, 2, 3)
+max(1, max(2, 3))
+max(1, max(2, max(3, 4)))
+max(1, foo("a", "b"), max(3, 4))
+
+# These should not trigger; we do not flag cases with keyword args.
+min(1, min(2, 3), key=test)
+min(1, min(2, 3, key=test))
+# This will still trigger, to merge the calls without keyword args.
+min(1, min(2, 3, key=test), min(4, 5))
+
+# Don't provide a fix if there are comments within the call.
+min(
+ 1, # This is a comment.
+ min(2, 3),
+)
+
+# Handle iterable expressions.
+min(1, min(a))
+min(1, min(i for i in range(10)))
+max(1, max(a))
+max(1, max(i for i in range(10)))
+
+tuples_list = [
+ (1, 2),
+ (2, 3),
+ (3, 4),
+ (4, 5),
+ (5, 6),
+]
+
+min(min(tuples_list))
+max(max(tuples_list))
diff --git a/crates/ruff/resources/test/fixtures/pylint/nonlocal_without_binding.py b/crates/ruff/resources/test/fixtures/pylint/nonlocal_without_binding.py
index baa823565b599..154ee23bec74a 100644
--- a/crates/ruff/resources/test/fixtures/pylint/nonlocal_without_binding.py
+++ b/crates/ruff/resources/test/fixtures/pylint/nonlocal_without_binding.py
@@ -17,3 +17,30 @@ def f():
def f():
nonlocal y
+
+
+def f():
+ x = 1
+
+ def g():
+ nonlocal x
+
+ del x
+
+
+def f():
+ def g():
+ nonlocal x
+
+ del x
+
+
+def f():
+ try:
+ pass
+ except Exception as x:
+ pass
+
+ def g():
+ nonlocal x
+ x = 2
diff --git a/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_10.py b/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_10.py
new file mode 100644
index 0000000000000..4344aec3d5219
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_10.py
@@ -0,0 +1,8 @@
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from sys import exit as bar
+
+
+def main():
+ exit(0)
diff --git a/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_7.py b/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_7.py
new file mode 100644
index 0000000000000..2771bfa6039cf
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_7.py
@@ -0,0 +1,5 @@
+def main():
+ exit(0)
+
+
+import functools
diff --git a/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_8.py b/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_8.py
new file mode 100644
index 0000000000000..822bec52c77be
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_8.py
@@ -0,0 +1,5 @@
+from sys import argv
+
+
+def main():
+ exit(0)
diff --git a/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_9.py b/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_9.py
new file mode 100644
index 0000000000000..326901d18378d
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/sys_exit_alias_9.py
@@ -0,0 +1,5 @@
+def main():
+ exit(0)
+
+
+from sys import argv
diff --git a/crates/ruff/resources/test/fixtures/pylint/too_many_arguments.py b/crates/ruff/resources/test/fixtures/pylint/too_many_arguments.py
index 7b272f50894f6..5271910cc53a4 100644
--- a/crates/ruff/resources/test/fixtures/pylint/too_many_arguments.py
+++ b/crates/ruff/resources/test/fixtures/pylint/too_many_arguments.py
@@ -22,13 +22,13 @@ def f(x=1, y=1, z=1): # OK
pass
-def f(x, y, z, /, u, v, w): # OK
+def f(x, y, z, /, u, v, w): # Too many arguments (6/5)
pass
-def f(x, y, z, *, u, v, w): # OK
+def f(x, y, z, *, u, v, w): # Too many arguments (6/5)
pass
-def f(x, y, z, a, b, c, *, u, v, w): # Too many arguments (6/5)
+def f(x, y, z, a, b, c, *, u, v, w): # Too many arguments (9/5)
pass
diff --git a/crates/ruff/resources/test/fixtures/pylint/unexpected_special_method_signature.py b/crates/ruff/resources/test/fixtures/pylint/unexpected_special_method_signature.py
index f5270e04594d6..0748872abce6a 100644
--- a/crates/ruff/resources/test/fixtures/pylint/unexpected_special_method_signature.py
+++ b/crates/ruff/resources/test/fixtures/pylint/unexpected_special_method_signature.py
@@ -1,19 +1,16 @@
class TestClass:
def __bool__(self):
...
-
+
def __bool__(self, x): # too many mandatory args
...
-
+
def __bool__(self, x=1): # additional optional args OK
...
-
- def __bool__(self, *args): # varargs OK
- ...
-
+
def __bool__(): # ignored; should be caughty by E0211/N805
...
-
+
@staticmethod
def __bool__():
...
@@ -21,31 +18,58 @@ def __bool__():
@staticmethod
def __bool__(x): # too many mandatory args
...
-
+
@staticmethod
def __bool__(x=1): # additional optional args OK
...
-
+
def __eq__(self, other): # multiple args
...
-
+
def __eq__(self, other=1): # expected arg is optional
...
-
+
def __eq__(self): # too few mandatory args
...
-
+
def __eq__(self, other, other_other): # too many mandatory args
...
-
- def __round__(self): # allow zero additional args.
+
+ def __round__(self): # allow zero additional args
...
-
- def __round__(self, x): # allow one additional args.
+
+ def __round__(self, x): # allow one additional args
...
-
+
def __round__(self, x, y): # disallow 2 args
...
-
+
def __round__(self, x, y, z=2): # disallow 3 args even when one is optional
- ...
\ No newline at end of file
+ ...
+
+ def __eq__(self, *args): # ignore *args
+ ...
+
+ def __eq__(self, x, *args): # extra *args is ok
+ ...
+
+ def __eq__(self, x, y, *args): # too many args with *args
+ ...
+
+ def __round__(self, *args): # allow zero additional args
+ ...
+
+ def __round__(self, x, *args): # allow one additional args
+ ...
+
+ def __round__(self, x, y, *args): # disallow 2 args
+ ...
+
+ def __eq__(self, **kwargs): # ignore **kwargs
+ ...
+
+ def __eq__(self, /, other=42): # ignore positional-only args
+ ...
+
+ def __eq__(self, *, other=42): # ignore positional-only args
+ ...
diff --git a/crates/ruff/resources/test/fixtures/pylint/yield_from_in_async_function.py b/crates/ruff/resources/test/fixtures/pylint/yield_from_in_async_function.py
new file mode 100644
index 0000000000000..8130c177277cf
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pylint/yield_from_in_async_function.py
@@ -0,0 +1,7 @@
+async def success():
+ yield 42
+
+
+async def fail():
+ l = (1, 2, 3)
+ yield from l
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP006.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP006_0.py
similarity index 85%
rename from crates/ruff/resources/test/fixtures/pyupgrade/UP006.py
rename to crates/ruff/resources/test/fixtures/pyupgrade/UP006_0.py
index 75a369bfb9254..106366ec2cc2b 100644
--- a/crates/ruff/resources/test/fixtures/pyupgrade/UP006.py
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP006_0.py
@@ -56,3 +56,11 @@ def f(x: "List['Li' 'st[str]']") -> None:
def f(x: "Li" "st['List[str]']") -> None:
...
+
+
+def f(x: typing.Deque[str]) -> None:
+ ...
+
+
+def f(x: typing.DefaultDict[str, str]) -> None:
+ ...
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP006_1.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP006_1.py
new file mode 100644
index 0000000000000..1f56c64011a15
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP006_1.py
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+import typing
+
+if typing.TYPE_CHECKING:
+ from collections import defaultdict
+
+
+def f(x: typing.DefaultDict[str, str]) -> None:
+ ...
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP006_2.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP006_2.py
new file mode 100644
index 0000000000000..eb4f5e8864f33
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP006_2.py
@@ -0,0 +1,8 @@
+import typing
+
+if typing.TYPE_CHECKING:
+ from collections import defaultdict
+
+
+def f(x: typing.DefaultDict[str, str]) -> None:
+ ...
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP006_3.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP006_3.py
new file mode 100644
index 0000000000000..463365102feb1
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP006_3.py
@@ -0,0 +1,8 @@
+import typing
+
+if typing.TYPE_CHECKING:
+ from collections import defaultdict
+
+
+def f(x: "typing.DefaultDict[str, str]") -> None:
+ ...
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP012.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP012.py
index af589f15f51a6..266e8431ccf0e 100644
--- a/crates/ruff/resources/test/fixtures/pyupgrade/UP012.py
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP012.py
@@ -59,3 +59,14 @@
R"foo\o".encode("utf-8") # br"foo\o"
U"foo".encode("utf-8") # b"foo"
print("foo".encode()) # print(b"foo")
+
+# `encode` on parenthesized strings.
+(
+ "abc"
+ "def"
+).encode()
+
+((
+ "abc"
+ "def"
+)).encode()
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP018.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP018.py
index 4f97f3a0bb6d6..adcaa1c4802b5 100644
--- a/crates/ruff/resources/test/fixtures/pyupgrade/UP018.py
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP018.py
@@ -15,6 +15,7 @@
bytes(b"foo"
b"bar")
bytes("foo")
+f"{f'{str()}'}"
# These become string or byte literals
str()
@@ -25,3 +26,4 @@
bytes(b"foo")
bytes(b"""
foo""")
+f"{str()}"
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP032.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP032_0.py
similarity index 100%
rename from crates/ruff/resources/test/fixtures/pyupgrade/UP032.py
rename to crates/ruff/resources/test/fixtures/pyupgrade/UP032_0.py
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP032_1.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP032_1.py
new file mode 100644
index 0000000000000..884f2d9648b11
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP032_1.py
@@ -0,0 +1 @@
+"{} {}".format(a, b) # Intentionally at start-of-file, to ensure graceful handling.
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP032_2.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP032_2.py
new file mode 100644
index 0000000000000..2987164454638
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP032_2.py
@@ -0,0 +1,28 @@
+# Errors
+"{.real}".format(1)
+"{0.real}".format(1)
+"{a.real}".format(a=1)
+
+"{.real}".format(1.0)
+"{0.real}".format(1.0)
+"{a.real}".format(a=1.0)
+
+"{.real}".format(1j)
+"{0.real}".format(1j)
+"{a.real}".format(a=1j)
+
+"{.real}".format(0b01)
+"{0.real}".format(0b01)
+"{a.real}".format(a=0b01)
+
+"{}".format(1 + 2)
+"{}".format([1, 2])
+"{}".format({1, 2})
+"{}".format({1: 2, 3: 4})
+"{}".format((i for i in range(2)))
+
+"{.real}".format(1 + 2)
+"{.real}".format([1, 2])
+"{.real}".format({1, 2})
+"{.real}".format({1: 2, 3: 4})
+"{}".format((i for i in range(2)))
diff --git a/crates/ruff/resources/test/fixtures/pyupgrade/UP036_5.py b/crates/ruff/resources/test/fixtures/pyupgrade/UP036_5.py
new file mode 100644
index 0000000000000..c7e1a8778da50
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/pyupgrade/UP036_5.py
@@ -0,0 +1,30 @@
+import sys
+
+if sys.version_info < (3, 8):
+
+ def a():
+ if b:
+ print(1)
+ elif c:
+ print(2)
+ return None
+
+else:
+ pass
+
+
+import sys
+
+if sys.version_info < (3, 8):
+ pass
+
+else:
+
+ def a():
+ if b:
+ print(1)
+ elif c:
+ print(2)
+ else:
+ print(3)
+ return None
diff --git a/crates/ruff/resources/test/fixtures/ruff/RUF005.py b/crates/ruff/resources/test/fixtures/ruff/RUF005.py
index d0a611c26ddc9..2007e47a2e5e8 100644
--- a/crates/ruff/resources/test/fixtures/ruff/RUF005.py
+++ b/crates/ruff/resources/test/fixtures/ruff/RUF005.py
@@ -1,9 +1,38 @@
+###
+# Non-fixable Errors.
+###
+foo + [ # This will be preserved.
+]
+[*foo] + [ # This will be preserved.
+]
+first = [
+ # The order
+ 1, # here
+ 2, # is
+ # extremely
+ 3, # critical
+ # to preserve
+]
+second = first + [
+ # please
+ 4,
+ # don't
+ 5,
+ # touch
+ 6,
+]
+
+
+###
+# Fixable errors.
+###
class Fun:
words = ("how", "fun!")
def yay(self):
return self.words
+
yay = Fun().yay
foo = [4, 5, 6]
@@ -13,33 +42,27 @@ def yay(self):
spam = quux + (10, 11, 12)
spom = list(spam)
eggs = spom + [13, 14, 15]
-elatement = ("we all say", ) + yay()
-excitement = ("we all think", ) + Fun().yay()
-astonishment = ("we all feel", ) + Fun.words
+elatement = ("we all say",) + yay()
+excitement = ("we all think",) + Fun().yay()
+astonishment = ("we all feel",) + Fun.words
-chain = ['a', 'b', 'c'] + eggs + list(('yes', 'no', 'pants') + zoob)
+chain = ["a", "b", "c"] + eggs + list(("yes", "no", "pants") + zoob)
baz = () + zoob
-first = [
- # The order
- 1, # here
- 2, # is
- # extremely
- 3, # critical
- # to preserve
-]
-second = first + [
- # please
- 4,
- # don't
- 5,
- # touch
- 6,
-]
-
[] + foo + [
]
-[] + foo + [ # This will be preserved, but doesn't prevent the fix
-]
+pylint_call = [sys.executable, "-m", "pylint"] + args + [path]
+pylint_call_tuple = (sys.executable, "-m", "pylint") + args + (path, path2)
+b = a + [2, 3] + [4]
+
+# Uses the non-preferred quote style, which should be retained.
+f"{a() + ['b']}"
+
+###
+# Non-errors.
+###
+a = (1,) + [2]
+a = [1, 2] + (3, 4)
+a = ([1, 2, 3] + b) + (4, 5, 6)
diff --git a/crates/ruff/resources/test/fixtures/ruff/RUF009.py b/crates/ruff/resources/test/fixtures/ruff/RUF009.py
index 53c6a0598e1bc..3ba1aad6bed96 100644
--- a/crates/ruff/resources/test/fixtures/ruff/RUF009.py
+++ b/crates/ruff/resources/test/fixtures/ruff/RUF009.py
@@ -2,10 +2,10 @@
import re
import typing
from dataclasses import dataclass, field
+from fractions import Fraction
from pathlib import Path
from typing import ClassVar, NamedTuple
-
def default_function() -> list[int]:
return []
@@ -25,7 +25,12 @@ class A:
fine_timedelta: datetime.timedelta = datetime.timedelta(hours=7)
fine_tuple: tuple[int] = tuple([1])
fine_regex: re.Pattern = re.compile(r".*")
-
+ fine_float: float = float('-inf')
+ fine_int: int = int(12)
+ fine_complex: complex = complex(1, 2)
+ fine_str: str = str("foo")
+ fine_bool: bool = bool("foo")
+ fine_fraction: Fraction = Fraction(1,2)
DEFAULT_IMMUTABLETYPE_FOR_ALL_DATACLASSES = ImmutableType(40)
DEFAULT_A_FOR_ALL_DATACLASSES = A([1, 2, 3])
diff --git a/crates/ruff/resources/test/fixtures/ruff/RUF010.py b/crates/ruff/resources/test/fixtures/ruff/RUF010.py
new file mode 100644
index 0000000000000..77e459c21496a
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/ruff/RUF010.py
@@ -0,0 +1,36 @@
+bla = b"bla"
+d = {"a": b"bla", "b": b"bla", "c": b"bla"}
+
+
+def foo(one_arg):
+ pass
+
+
+f"{str(bla)}, {repr(bla)}, {ascii(bla)}" # RUF010
+
+f"{str(d['a'])}, {repr(d['b'])}, {ascii(d['c'])}" # RUF010
+
+f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
+
+f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
+
+f"{foo(bla)}" # OK
+
+f"{str(bla, 'ascii')}, {str(bla, encoding='cp1255')}" # OK
+
+f"{bla!s} {[]!r} {'bar'!a}" # OK
+
+"Not an f-string {str(bla)}, {repr(bla)}, {ascii(bla)}" # OK
+
+
+def ascii(arg):
+ pass
+
+
+f"{ascii(bla)}" # OK
+
+(
+ f"Member of tuple mismatches type at index {i}. Expected {of_shape_i}. Got "
+ " intermediary content "
+ f" that flows {repr(obj)} of type {type(obj)}.{additional_message}" # RUF010
+)
diff --git a/crates/ruff/resources/test/fixtures/ruff/RUF100_0.py b/crates/ruff/resources/test/fixtures/ruff/RUF100_0.py
index f89729938bfb6..5e51807ef90c9 100644
--- a/crates/ruff/resources/test/fixtures/ruff/RUF100_0.py
+++ b/crates/ruff/resources/test/fixtures/ruff/RUF100_0.py
@@ -88,3 +88,12 @@ def f() -> None:
print(sys.path)
"shape: (6,)\nSeries: '' [duration[μs]]\n[\n\t0µs\n\t1µs\n\t2µs\n\t3µs\n\t4µs\n\t5µs\n]" # noqa: F401
+
+
+def f():
+ # Ensure that the `noqa` applies to both the overlong line _and_ the unused
+ # variable.
+ a = """Lorem ipsum dolor sit amet.
+
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+ """ # noqa
diff --git a/crates/ruff/resources/test/fixtures/ruff/confusables.py b/crates/ruff/resources/test/fixtures/ruff/confusables.py
index b642cebd6f396..3ae350887fa3f 100644
--- a/crates/ruff/resources/test/fixtures/ruff/confusables.py
+++ b/crates/ruff/resources/test/fixtures/ruff/confusables.py
@@ -8,7 +8,24 @@ def f():
...
-def g():
+def f():
"""Here's a docstring with a greek rho: ρ"""
# And here's a comment with a greek alpha: ∗
...
+
+
+x = "𝐁ad string"
+x = "−"
+
+# This should be ignored, since it contains an unambiguous unicode character, and no
+# ASCII.
+x = "Русский"
+
+# The first word should be ignored, while the second should be included, since it
+# contains ASCII.
+x = "βα Bαd"
+
+# The two characters should be flagged here. The first character is a "word"
+# consisting of a single ambiguous character, while the second character is a "word
+# boundary" (whitespace) that it itself ambiguous.
+x = "Р усский"
diff --git a/crates/ruff/resources/test/fixtures/ruff/noqa.py b/crates/ruff/resources/test/fixtures/ruff/noqa.py
new file mode 100644
index 0000000000000..30e59400c6f2f
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/ruff/noqa.py
@@ -0,0 +1,23 @@
+def f():
+ # These should both be ignored by the `noqa`.
+ I = 1 # noqa: E741, F841
+
+
+def f():
+ # These should both be ignored by the `noqa`.
+ I = 1 # noqa: E741,F841
+
+
+def f():
+ # These should both be ignored by the `noqa`.
+ I = 1 # noqa: E741 F841
+
+
+def f():
+ # These should both be ignored by the `noqa`.
+ I = 1 # noqa: E741 , F841
+
+
+def f():
+ # Only `E741` should be ignored by the `noqa`.
+ I = 1 # noqa: E741.F841
diff --git a/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/bleach/pyproject.toml b/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/bleach/pyproject.toml
new file mode 100644
index 0000000000000..6345a9db16eed
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/bleach/pyproject.toml
@@ -0,0 +1,7 @@
+[project]
+name = "hello-world"
+version = "0.1.0"
+# There's a comma missing here
+dependencies = [
+ "tinycss2>=1.1.0<1.2",
+]
diff --git a/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/invalid_author/pyproject.toml b/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/invalid_author/pyproject.toml
new file mode 100644
index 0000000000000..94243f3807890
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/invalid_author/pyproject.toml
@@ -0,0 +1,7 @@
+[project]
+name = "hello-world"
+version = "0.1.0"
+# Ensure that the spans from toml handle utf-8 correctly
+authors = [
+ { name = "Z͑ͫ̓ͪ̂ͫ̽͏̴̙̤̞͉͚̯̞̠͍A̴̵̜̰͔ͫ͗͢L̠ͨͧͩ͘G̴̻͈͍͔̹̑͗̎̅͛́Ǫ̵̹̻̝̳͂̌̌͘", email = 1 }
+]
diff --git a/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/maturin/pyproject.toml b/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/maturin/pyproject.toml
new file mode 100644
index 0000000000000..6cbcadc76841e
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/maturin/pyproject.toml
@@ -0,0 +1,57 @@
+# This is a valid pyproject.toml
+# https://github.com/PyO3/maturin/blob/87ac3d9f74dd79ef2df9a20880b9f1fa23f9a437/pyproject.toml
+[build-system]
+requires = ["setuptools", "wheel>=0.36.2", "tomli>=1.1.0 ; python_version<'3.11'", "setuptools-rust>=1.4.0"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "maturin"
+requires-python = ">=3.7"
+classifiers = [
+ "Topic :: Software Development :: Build Tools",
+ "Programming Language :: Rust",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+]
+dependencies = ["tomli>=1.1.0 ; python_version<'3.11'"]
+dynamic = [
+ "authors",
+ "description",
+ "license",
+ "readme",
+ "version"
+]
+
+[project.optional-dependencies]
+zig = [
+ "ziglang~=0.10.0",
+]
+patchelf = [
+ "patchelf",
+]
+
+[project.urls]
+"Source Code" = "https://github.com/PyO3/maturin"
+Issues = "https://github.com/PyO3/maturin/issues"
+Documentation = "https://maturin.rs"
+Changelog = "https://maturin.rs/changelog.html"
+
+[tool.maturin]
+bindings = "bin"
+
+[tool.black]
+target_version = ['py37']
+extend-exclude = '''
+# Ignore cargo-generate templates
+^/src/templates
+'''
+
+[tool.ruff]
+line-length = 120
+target-version = "py37"
+
+[tool.mypy]
+disallow_untyped_defs = true
+disallow_incomplete_defs = true
+warn_no_return = true
+ignore_missing_imports = true
diff --git a/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/maturin_gh_1615/pyproject.toml b/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/maturin_gh_1615/pyproject.toml
new file mode 100644
index 0000000000000..14e8b567ea1b1
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/ruff/pyproject_toml/maturin_gh_1615/pyproject.toml
@@ -0,0 +1,39 @@
+# license-files is wrong here
+# https://github.com/PyO3/maturin/issues/1615
+[build-system]
+requires = [ "maturin>=0.14", "numpy", "wheel", "patchelf",]
+build-backend = "maturin"
+
+[project]
+name = "..."
+license-files = [ "license.txt",]
+requires-python = ">=3.8"
+requires-dist = [ "maturin>=0.14", "...",]
+dependencies = [ "packaging", "...",]
+zip-safe = false
+version = "..."
+readme = "..."
+description = "..."
+classifiers = [ "...",]
+[[project.authors]]
+name = "..."
+email = "..."
+
+[project.urls]
+homepage = "..."
+documentation = "..."
+repository = "..."
+
+[project.optional-dependencies]
+test = [ "coverage", "...",]
+docs = [ "sphinx", "sphinx-rtd-theme",]
+devel = []
+
+[tool.maturin]
+include = [ "...",]
+bindings = "pyo3"
+compatibility = "manylinux2014"
+
+[tool.pytest.ini_options]
+testpaths = [ "...",]
+addopts = "--color=yes --tb=native --cov-report term --cov-report html:docs/dist_coverage --cov=aisdb --doctest-modules --envfile .env"
diff --git a/crates/ruff/resources/test/fixtures/tryceratops/TRY301.py b/crates/ruff/resources/test/fixtures/tryceratops/TRY301.py
index f9b98b8a0e147..da7c82e487112 100644
--- a/crates/ruff/resources/test/fixtures/tryceratops/TRY301.py
+++ b/crates/ruff/resources/test/fixtures/tryceratops/TRY301.py
@@ -1,3 +1,8 @@
+"""
+Violation:
+
+Checks for `raise` statements within `try` blocks.
+"""
class MyException(Exception):
pass
@@ -45,3 +50,10 @@ def good():
logger.exception("a failed")
except Exception:
logger.exception("something failed")
+
+
+def fine():
+ try:
+ a = process() # This throws the exception now
+ finally:
+ print("finally")
diff --git a/crates/ruff/resources/test/fixtures/tryceratops/TRY302.py b/crates/ruff/resources/test/fixtures/tryceratops/TRY302.py
new file mode 100644
index 0000000000000..3691e5472afc5
--- /dev/null
+++ b/crates/ruff/resources/test/fixtures/tryceratops/TRY302.py
@@ -0,0 +1,123 @@
+"""
+Violation:
+
+Checks for uses of `raise` directly after a `rescue`.
+"""
+class MyException(Exception):
+ pass
+
+def bad():
+ try:
+ process()
+ except Exception:
+ raise
+
+def bad():
+ try:
+ process()
+ except Exception:
+ raise
+ print("this code is pointless!")
+
+def bad():
+ try:
+ process()
+ except:
+ # I am a comment, not a statement!
+ raise
+
+def bad():
+ try:
+ process()
+ except Exception:
+ raise
+
+def bad():
+ try:
+ process()
+ except Exception as e:
+ raise
+
+def bad():
+ try:
+ process()
+ except Exception as e:
+ raise e
+
+def bad():
+ try:
+ process()
+ except MyException:
+ raise
+ except Exception:
+ raise
+
+def bad():
+ try:
+ process()
+ except MyException as e:
+ raise e
+ except Exception as e:
+ raise e
+
+def bad():
+ try:
+ process()
+ except MyException as ex:
+ raise ex
+ except Exception as e:
+ raise e
+
+def fine():
+ try:
+ process()
+ except Exception as e:
+ raise e from None
+
+def fine():
+ try:
+ process()
+ except Exception as e:
+ raise e from Exception
+
+def fine():
+ try:
+ process()
+ except Exception as e:
+ raise ex
+
+def fine():
+ try:
+ process()
+ except MyException:
+ raise
+ except Exception:
+ print("bar")
+
+def fine():
+ try:
+ process()
+ except Exception:
+ print("initiating rapid unscheduled disassembly of program")
+
+def fine():
+ try:
+ process()
+ except MyException:
+ print("oh no!")
+ raise
+
+def fine():
+ try:
+ process()
+ except Exception:
+ if True:
+ raise
+
+def fine():
+ try:
+ process()
+ finally:
+ # I am a comment, not a statement!
+ print("but i am a statement")
+ raise
diff --git a/crates/ruff/resources/test/fixtures/tryceratops/TRY400.py b/crates/ruff/resources/test/fixtures/tryceratops/TRY400.py
index f8deb6ad9b51c..1c31197c26754 100644
--- a/crates/ruff/resources/test/fixtures/tryceratops/TRY400.py
+++ b/crates/ruff/resources/test/fixtures/tryceratops/TRY400.py
@@ -4,6 +4,7 @@
"""
import logging
+import sys
logger = logging.getLogger(__name__)
@@ -60,3 +61,17 @@ def good():
a = 1
except Exception:
foo.exception("Context message here")
+
+
+def fine():
+ try:
+ a = 1
+ except Exception:
+ logger.error("Context message here", exc_info=True)
+
+
+def fine():
+ try:
+ a = 1
+ except Exception:
+ logger.error("Context message here", exc_info=sys.exc_info())
diff --git a/crates/ruff/src/autofix/actions.rs b/crates/ruff/src/autofix/actions.rs
deleted file mode 100644
index 283c9298a9835..0000000000000
--- a/crates/ruff/src/autofix/actions.rs
+++ /dev/null
@@ -1,570 +0,0 @@
-//! Interface for generating autofix edits from higher-level actions (e.g., "remove an argument").
-use anyhow::{bail, Result};
-use itertools::Itertools;
-use libcst_native::{
- Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
-};
-use ruff_text_size::{TextLen, TextRange, TextSize};
-use rustpython_parser::ast::{ExcepthandlerKind, Expr, Keyword, Stmt, StmtKind};
-use rustpython_parser::{lexer, Mode, Tok};
-
-use ruff_diagnostics::Edit;
-use ruff_python_ast::helpers;
-use ruff_python_ast::imports::{AnyImport, Import};
-use ruff_python_ast::newlines::NewlineWithTrailingNewline;
-use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
-use ruff_python_semantic::context::Context;
-
-use crate::cst::helpers::compose_module_path;
-use crate::cst::matchers::match_module;
-use crate::importer::Importer;
-
-/// Determine if a body contains only a single statement, taking into account
-/// deleted.
-fn has_single_child(body: &[Stmt], deleted: &[&Stmt]) -> bool {
- body.iter().filter(|child| !deleted.contains(child)).count() == 1
-}
-
-/// Determine if a child is the only statement in its body.
-fn is_lone_child(child: &Stmt, parent: &Stmt, deleted: &[&Stmt]) -> Result {
- match &parent.node {
- StmtKind::FunctionDef { body, .. }
- | StmtKind::AsyncFunctionDef { body, .. }
- | StmtKind::ClassDef { body, .. }
- | StmtKind::With { body, .. }
- | StmtKind::AsyncWith { body, .. } => {
- if body.iter().contains(child) {
- Ok(has_single_child(body, deleted))
- } else {
- bail!("Unable to find child in parent body")
- }
- }
- StmtKind::For { body, orelse, .. }
- | StmtKind::AsyncFor { body, orelse, .. }
- | StmtKind::While { body, orelse, .. }
- | StmtKind::If { body, orelse, .. } => {
- if body.iter().contains(child) {
- Ok(has_single_child(body, deleted))
- } else if orelse.iter().contains(child) {
- Ok(has_single_child(orelse, deleted))
- } else {
- bail!("Unable to find child in parent body")
- }
- }
- StmtKind::Try {
- body,
- handlers,
- orelse,
- finalbody,
- }
- | StmtKind::TryStar {
- body,
- handlers,
- orelse,
- finalbody,
- } => {
- if body.iter().contains(child) {
- Ok(has_single_child(body, deleted))
- } else if orelse.iter().contains(child) {
- Ok(has_single_child(orelse, deleted))
- } else if finalbody.iter().contains(child) {
- Ok(has_single_child(finalbody, deleted))
- } else if let Some(body) = handlers.iter().find_map(|handler| match &handler.node {
- ExcepthandlerKind::ExceptHandler { body, .. } => {
- if body.iter().contains(child) {
- Some(body)
- } else {
- None
- }
- }
- }) {
- Ok(has_single_child(body, deleted))
- } else {
- bail!("Unable to find child in parent body")
- }
- }
- StmtKind::Match { cases, .. } => {
- if let Some(body) = cases.iter().find_map(|case| {
- if case.body.iter().contains(child) {
- Some(&case.body)
- } else {
- None
- }
- }) {
- Ok(has_single_child(body, deleted))
- } else {
- bail!("Unable to find child in parent body")
- }
- }
- _ => bail!("Unable to find child in parent body"),
- }
-}
-
-/// Return the location of a trailing semicolon following a `Stmt`, if it's part
-/// of a multi-statement line.
-fn trailing_semicolon(stmt: &Stmt, locator: &Locator) -> Option {
- let contents = locator.after(stmt.end());
-
- for line in NewlineWithTrailingNewline::from(contents) {
- let trimmed = line.trim_start();
-
- if trimmed.starts_with(';') {
- let colon_offset = line.text_len() - trimmed.text_len();
- return Some(stmt.end() + line.start() + colon_offset);
- }
-
- if !trimmed.starts_with('\\') {
- break;
- }
- }
- None
-}
-
-/// Find the next valid break for a `Stmt` after a semicolon.
-fn next_stmt_break(semicolon: TextSize, locator: &Locator) -> TextSize {
- let start_location = semicolon + TextSize::from(1);
-
- let contents = &locator.contents()[usize::from(start_location)..];
- for line in NewlineWithTrailingNewline::from(contents) {
- let trimmed = line.trim();
- // Skip past any continuations.
- if trimmed.starts_with('\\') {
- continue;
- }
-
- return start_location
- + if trimmed.is_empty() {
- // If the line is empty, then despite the previous statement ending in a
- // semicolon, we know that it's not a multi-statement line.
- line.start()
- } else {
- // Otherwise, find the start of the next statement. (Or, anything that isn't
- // whitespace.)
- let relative_offset = line.find(|c: char| !c.is_whitespace()).unwrap();
- line.start() + TextSize::try_from(relative_offset).unwrap()
- };
- }
-
- locator.line_end(start_location)
-}
-
-/// Return `true` if a `Stmt` occurs at the end of a file.
-fn is_end_of_file(stmt: &Stmt, locator: &Locator) -> bool {
- stmt.end() == locator.contents().text_len()
-}
-
-/// Return the `Fix` to use when deleting a `Stmt`.
-///
-/// In some cases, this is as simple as deleting the `Range` of the `Stmt`
-/// itself. However, there are a few exceptions:
-/// - If the `Stmt` is _not_ the terminal statement in a multi-statement line,
-/// we need to delete up to the start of the next statement (and avoid
-/// deleting any content that precedes the statement).
-/// - If the `Stmt` is the terminal statement in a multi-statement line, we need
-/// to avoid deleting any content that precedes the statement.
-/// - If the `Stmt` has no trailing and leading content, then it's convenient to
-/// remove the entire start and end lines.
-/// - If the `Stmt` is the last statement in its parent body, replace it with a
-/// `pass` instead.
-pub fn delete_stmt(
- stmt: &Stmt,
- parent: Option<&Stmt>,
- deleted: &[&Stmt],
- locator: &Locator,
- indexer: &Indexer,
- stylist: &Stylist,
-) -> Result {
- if parent
- .map(|parent| is_lone_child(stmt, parent, deleted))
- .map_or(Ok(None), |v| v.map(Some))?
- .unwrap_or_default()
- {
- // If removing this node would lead to an invalid syntax tree, replace
- // it with a `pass`.
- Ok(Edit::range_replacement("pass".to_string(), stmt.range()))
- } else {
- Ok(if let Some(semicolon) = trailing_semicolon(stmt, locator) {
- let next = next_stmt_break(semicolon, locator);
- Edit::deletion(stmt.start(), next)
- } else if helpers::has_leading_content(stmt, locator) {
- Edit::range_deletion(stmt.range())
- } else if helpers::preceded_by_continuation(stmt, indexer, locator) {
- if is_end_of_file(stmt, locator) && locator.is_at_start_of_line(stmt.start()) {
- // Special-case: a file can't end in a continuation.
- Edit::range_replacement(stylist.line_ending().to_string(), stmt.range())
- } else {
- Edit::range_deletion(stmt.range())
- }
- } else {
- let range = locator.full_lines_range(stmt.range());
- Edit::range_deletion(range)
- })
- }
-}
-
-/// Generate a `Fix` to remove any unused imports from an `import` statement.
-pub fn remove_unused_imports<'a>(
- unused_imports: impl Iterator- ,
- stmt: &Stmt,
- parent: Option<&Stmt>,
- deleted: &[&Stmt],
- locator: &Locator,
- indexer: &Indexer,
- stylist: &Stylist,
-) -> Result
{
- let module_text = locator.slice(stmt.range());
- let mut tree = match_module(module_text)?;
-
- let Some(Statement::Simple(body)) = tree.body.first_mut() else {
- bail!("Expected Statement::Simple");
- };
-
- let (aliases, import_module) = match body.body.first_mut() {
- Some(SmallStatement::Import(import_body)) => (&mut import_body.names, None),
- Some(SmallStatement::ImportFrom(import_body)) => {
- if let ImportNames::Aliases(names) = &mut import_body.names {
- (
- names,
- Some((&import_body.relative, import_body.module.as_ref())),
- )
- } else if let ImportNames::Star(..) = &import_body.names {
- // Special-case: if the import is a `from ... import *`, then we delete the
- // entire statement.
- let mut found_star = false;
- for unused_import in unused_imports {
- let full_name = match import_body.module.as_ref() {
- Some(module_name) => format!("{}.*", compose_module_path(module_name)),
- None => "*".to_string(),
- };
- if unused_import == full_name {
- found_star = true;
- } else {
- bail!(
- "Expected \"*\" for unused import (got: \"{}\")",
- unused_import
- );
- }
- }
- if !found_star {
- bail!("Expected \'*\' for unused import");
- }
- return delete_stmt(stmt, parent, deleted, locator, indexer, stylist);
- } else {
- bail!("Expected: ImportNames::Aliases | ImportNames::Star");
- }
- }
- _ => bail!("Expected: SmallStatement::ImportFrom | SmallStatement::Import"),
- };
-
- // Preserve the trailing comma (or not) from the last entry.
- let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
-
- for unused_import in unused_imports {
- let alias_index = aliases.iter().position(|alias| {
- let full_name = match import_module {
- Some((relative, module)) => {
- let module = module.map(compose_module_path);
- let member = compose_module_path(&alias.name);
- let mut full_name = String::with_capacity(
- relative.len()
- + module.as_ref().map_or(0, std::string::String::len)
- + member.len()
- + 1,
- );
- for _ in 0..relative.len() {
- full_name.push('.');
- }
- if let Some(module) = module {
- full_name.push_str(&module);
- full_name.push('.');
- }
- full_name.push_str(&member);
- full_name
- }
- None => compose_module_path(&alias.name),
- };
- full_name == unused_import
- });
-
- if let Some(index) = alias_index {
- aliases.remove(index);
- }
- }
-
- // But avoid destroying any trailing comments.
- if let Some(alias) = aliases.last_mut() {
- let has_comment = if let Some(comma) = &alias.comma {
- match &comma.whitespace_after {
- ParenthesizableWhitespace::SimpleWhitespace(_) => false,
- ParenthesizableWhitespace::ParenthesizedWhitespace(whitespace) => {
- whitespace.first_line.comment.is_some()
- }
- }
- } else {
- false
- };
- if !has_comment {
- alias.comma = trailing_comma;
- }
- }
-
- if aliases.is_empty() {
- delete_stmt(stmt, parent, deleted, locator, indexer, stylist)
- } else {
- let mut state = CodegenState {
- default_newline: &stylist.line_ending(),
- default_indent: stylist.indentation(),
- ..CodegenState::default()
- };
- tree.codegen(&mut state);
-
- Ok(Edit::range_replacement(state.to_string(), stmt.range()))
- }
-}
-
-/// Generic function to remove arguments or keyword arguments in function
-/// calls and class definitions. (For classes `args` should be considered
-/// `bases`)
-///
-/// Supports the removal of parentheses when this is the only (kw)arg left.
-/// For this behavior, set `remove_parentheses` to `true`.
-pub fn remove_argument(
- locator: &Locator,
- call_at: TextSize,
- expr_range: TextRange,
- args: &[Expr],
- keywords: &[Keyword],
- remove_parentheses: bool,
-) -> Result {
- // TODO(sbrugman): Preserve trailing comments.
- let contents = locator.after(call_at);
-
- let mut fix_start = None;
- let mut fix_end = None;
-
- let n_arguments = keywords.len() + args.len();
- if n_arguments == 0 {
- bail!("No arguments or keywords to remove");
- }
-
- if n_arguments == 1 {
- // Case 1: there is only one argument.
- let mut count: usize = 0;
- for (tok, range) in lexer::lex_located(contents, Mode::Module, call_at).flatten() {
- if matches!(tok, Tok::Lpar) {
- if count == 0 {
- fix_start = Some(if remove_parentheses {
- range.start()
- } else {
- range.start() + TextSize::from(1)
- });
- }
- count += 1;
- }
-
- if matches!(tok, Tok::Rpar) {
- count -= 1;
- if count == 0 {
- fix_end = Some(if remove_parentheses {
- range.end()
- } else {
- range.end() - TextSize::from(1)
- });
- break;
- }
- }
- }
- } else if args
- .iter()
- .map(Expr::start)
- .chain(keywords.iter().map(Keyword::start))
- .any(|location| location > expr_range.start())
- {
- // Case 2: argument or keyword is _not_ the last node.
- let mut seen_comma = false;
- for (tok, range) in lexer::lex_located(contents, Mode::Module, call_at).flatten() {
- if seen_comma {
- if matches!(tok, Tok::NonLogicalNewline) {
- // Also delete any non-logical newlines after the comma.
- continue;
- }
- fix_end = Some(if matches!(tok, Tok::Newline) {
- range.end()
- } else {
- range.start()
- });
- break;
- }
- if range.start() == expr_range.start() {
- fix_start = Some(range.start());
- }
- if fix_start.is_some() && matches!(tok, Tok::Comma) {
- seen_comma = true;
- }
- }
- } else {
- // Case 3: argument or keyword is the last node, so we have to find the last
- // comma in the stmt.
- for (tok, range) in lexer::lex_located(contents, Mode::Module, call_at).flatten() {
- if range.start() == expr_range.start() {
- fix_end = Some(expr_range.end());
- break;
- }
- if matches!(tok, Tok::Comma) {
- fix_start = Some(range.start());
- }
- }
- }
-
- match (fix_start, fix_end) {
- (Some(start), Some(end)) => Ok(Edit::deletion(start, end)),
- _ => {
- bail!("No fix could be constructed")
- }
- }
-}
-
-/// Generate an [`Edit`] to reference the given symbol. Returns the [`Edit`] necessary to make the
-/// symbol available in the current scope along with the bound name of the symbol.
-///
-/// For example, assuming `module` is `"functools"` and `member` is `"lru_cache"`, this function
-/// could return an [`Edit`] to add `import functools` to the top of the file, alongside with the
-/// name on which the `lru_cache` symbol would be made available (`"functools.lru_cache"`).
-///
-/// Attempts to reuse existing imports when possible.
-pub fn get_or_import_symbol(
- module: &str,
- member: &str,
- context: &Context,
- importer: &Importer,
- locator: &Locator,
-) -> Result<(Edit, String)> {
- if let Some((source, binding)) = context.resolve_qualified_import_name(module, member) {
- // If the symbol is already available in the current scope, use it.
- //
- // We also add a no-nop edit to force conflicts with any other fixes that might try to
- // remove the import. Consider:
- //
- // ```py
- // import sys
- //
- // quit()
- // ```
- //
- // Assume you omit this no-op edit. If you run Ruff with `unused-imports` and
- // `sys-exit-alias` over this snippet, it will generate two fixes: (1) remove the unused
- // `sys` import; and (2) replace `quit()` with `sys.exit()`, under the assumption that `sys`
- // is already imported and available.
- //
- // By adding this no-op edit, we force the `unused-imports` fix to conflict with the
- // `sys-exit-alias` fix, and thus will avoid applying both fixes in the same pass.
- let import_edit =
- Edit::range_replacement(locator.slice(source.range()).to_string(), source.range());
- Ok((import_edit, binding))
- } else {
- if let Some(stmt) = importer.get_import_from(module) {
- // Case 1: `from functools import lru_cache` is in scope, and we're trying to reference
- // `functools.cache`; thus, we add `cache` to the import, and return `"cache"` as the
- // bound name.
- if context
- .find_binding(member)
- .map_or(true, |binding| binding.kind.is_builtin())
- {
- let import_edit = importer.add_member(stmt, member)?;
- Ok((import_edit, member.to_string()))
- } else {
- bail!(
- "Unable to insert `{}` into scope due to name conflict",
- member
- )
- }
- } else {
- // Case 2: No `functools` import is in scope; thus, we add `import functools`, and
- // return `"functools.cache"` as the bound name.
- if context
- .find_binding(module)
- .map_or(true, |binding| binding.kind.is_builtin())
- {
- let import_edit = importer.add_import(&AnyImport::Import(Import::module(module)));
- Ok((import_edit, format!("{module}.{member}")))
- } else {
- bail!(
- "Unable to insert `{}` into scope due to name conflict",
- module
- )
- }
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use anyhow::Result;
- use ruff_text_size::TextSize;
- use rustpython_parser as parser;
-
- use ruff_python_ast::source_code::Locator;
-
- use crate::autofix::actions::{next_stmt_break, trailing_semicolon};
-
- #[test]
- fn find_semicolon() -> Result<()> {
- let contents = "x = 1";
- let program = parser::parse_program(contents, "")?;
- let stmt = program.first().unwrap();
- let locator = Locator::new(contents);
- assert_eq!(trailing_semicolon(stmt, &locator), None);
-
- let contents = "x = 1; y = 1";
- let program = parser::parse_program(contents, "")?;
- let stmt = program.first().unwrap();
- let locator = Locator::new(contents);
- assert_eq!(trailing_semicolon(stmt, &locator), Some(TextSize::from(5)));
-
- let contents = "x = 1 ; y = 1";
- let program = parser::parse_program(contents, "")?;
- let stmt = program.first().unwrap();
- let locator = Locator::new(contents);
- assert_eq!(trailing_semicolon(stmt, &locator), Some(TextSize::from(6)));
-
- let contents = r#"
-x = 1 \
- ; y = 1
-"#
- .trim();
- let program = parser::parse_program(contents, "")?;
- let stmt = program.first().unwrap();
- let locator = Locator::new(contents);
- assert_eq!(trailing_semicolon(stmt, &locator), Some(TextSize::from(10)));
-
- Ok(())
- }
-
- #[test]
- fn find_next_stmt_break() {
- let contents = "x = 1; y = 1";
- let locator = Locator::new(contents);
- assert_eq!(
- next_stmt_break(TextSize::from(4), &locator),
- TextSize::from(5)
- );
-
- let contents = "x = 1 ; y = 1";
- let locator = Locator::new(contents);
- assert_eq!(
- next_stmt_break(TextSize::from(5), &locator),
- TextSize::from(6)
- );
-
- let contents = r#"
-x = 1 \
- ; y = 1
-"#
- .trim();
- let locator = Locator::new(contents);
- assert_eq!(
- next_stmt_break(TextSize::from(10), &locator),
- TextSize::from(12)
- );
- }
-}
diff --git a/crates/ruff/src/autofix/codemods.rs b/crates/ruff/src/autofix/codemods.rs
new file mode 100644
index 0000000000000..b73b2d50410ec
--- /dev/null
+++ b/crates/ruff/src/autofix/codemods.rs
@@ -0,0 +1,214 @@
+//! Interface for editing code snippets. These functions take statements or expressions as input,
+//! and return the modified code snippet as output.
+use anyhow::{bail, Result};
+use libcst_native::{
+ Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
+};
+use rustpython_parser::ast::{Ranged, Stmt};
+
+use ruff_python_ast::source_code::{Locator, Stylist};
+
+use crate::cst::helpers::compose_module_path;
+use crate::cst::matchers::match_statement;
+
+/// Glue code to make libcst codegen work with ruff's Stylist
+pub(crate) trait CodegenStylist<'a>: Codegen<'a> {
+ fn codegen_stylist(&self, stylist: &'a Stylist) -> String;
+}
+
+impl<'a, T: Codegen<'a>> CodegenStylist<'a> for T {
+ fn codegen_stylist(&self, stylist: &'a Stylist) -> String {
+ let mut state = CodegenState {
+ default_newline: stylist.line_ending().as_str(),
+ default_indent: stylist.indentation(),
+ ..Default::default()
+ };
+ self.codegen(&mut state);
+ state.to_string()
+ }
+}
+
+/// Given an import statement, remove any imports that are specified in the `imports` iterator.
+///
+/// Returns `Ok(None)` if the statement is empty after removing the imports.
+pub(crate) fn remove_imports<'a>(
+ imports: impl Iterator- ,
+ stmt: &Stmt,
+ locator: &Locator,
+ stylist: &Stylist,
+) -> Result
> {
+ let module_text = locator.slice(stmt.range());
+ let mut tree = match_statement(module_text)?;
+
+ let Statement::Simple(body) = &mut tree else {
+ bail!("Expected Statement::Simple");
+ };
+
+ let (aliases, import_module) = match body.body.first_mut() {
+ Some(SmallStatement::Import(import_body)) => (&mut import_body.names, None),
+ Some(SmallStatement::ImportFrom(import_body)) => {
+ if let ImportNames::Aliases(names) = &mut import_body.names {
+ (
+ names,
+ Some((&import_body.relative, import_body.module.as_ref())),
+ )
+ } else if let ImportNames::Star(..) = &import_body.names {
+ // Special-case: if the import is a `from ... import *`, then we delete the
+ // entire statement.
+ let mut found_star = false;
+ for import in imports {
+ let qualified_name = match import_body.module.as_ref() {
+ Some(module_name) => format!("{}.*", compose_module_path(module_name)),
+ None => "*".to_string(),
+ };
+ if import == qualified_name {
+ found_star = true;
+ } else {
+ bail!("Expected \"*\" for unused import (got: \"{}\")", import);
+ }
+ }
+ if !found_star {
+ bail!("Expected \'*\' for unused import");
+ }
+ return Ok(None);
+ } else {
+ bail!("Expected: ImportNames::Aliases | ImportNames::Star");
+ }
+ }
+ _ => bail!("Expected: SmallStatement::ImportFrom | SmallStatement::Import"),
+ };
+
+ // Preserve the trailing comma (or not) from the last entry.
+ let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
+
+ for import in imports {
+ let alias_index = aliases.iter().position(|alias| {
+ let qualified_name = match import_module {
+ Some((relative, module)) => {
+ let module = module.map(compose_module_path);
+ let member = compose_module_path(&alias.name);
+ let mut qualified_name = String::with_capacity(
+ relative.len() + module.as_ref().map_or(0, String::len) + member.len() + 1,
+ );
+ for _ in 0..relative.len() {
+ qualified_name.push('.');
+ }
+ if let Some(module) = module {
+ qualified_name.push_str(&module);
+ qualified_name.push('.');
+ }
+ qualified_name.push_str(&member);
+ qualified_name
+ }
+ None => compose_module_path(&alias.name),
+ };
+ qualified_name == import
+ });
+
+ if let Some(index) = alias_index {
+ aliases.remove(index);
+ }
+ }
+
+ // But avoid destroying any trailing comments.
+ if let Some(alias) = aliases.last_mut() {
+ let has_comment = if let Some(comma) = &alias.comma {
+ match &comma.whitespace_after {
+ ParenthesizableWhitespace::SimpleWhitespace(_) => false,
+ ParenthesizableWhitespace::ParenthesizedWhitespace(whitespace) => {
+ whitespace.first_line.comment.is_some()
+ }
+ }
+ } else {
+ false
+ };
+ if !has_comment {
+ alias.comma = trailing_comma;
+ }
+ }
+
+ if aliases.is_empty() {
+ return Ok(None);
+ }
+
+ Ok(Some(tree.codegen_stylist(stylist)))
+}
+
+/// Given an import statement, remove any imports that are not specified in the `imports` slice.
+///
+/// Returns the modified import statement.
+pub(crate) fn retain_imports(
+ imports: &[&str],
+ stmt: &Stmt,
+ locator: &Locator,
+ stylist: &Stylist,
+) -> Result {
+ let module_text = locator.slice(stmt.range());
+ let mut tree = match_statement(module_text)?;
+
+ let Statement::Simple(body) = &mut tree else {
+ bail!("Expected Statement::Simple");
+ };
+
+ let (aliases, import_module) = match body.body.first_mut() {
+ Some(SmallStatement::Import(import_body)) => (&mut import_body.names, None),
+ Some(SmallStatement::ImportFrom(import_body)) => {
+ if let ImportNames::Aliases(names) = &mut import_body.names {
+ (
+ names,
+ Some((&import_body.relative, import_body.module.as_ref())),
+ )
+ } else {
+ bail!("Expected: ImportNames::Aliases");
+ }
+ }
+ _ => bail!("Expected: SmallStatement::ImportFrom | SmallStatement::Import"),
+ };
+
+ // Preserve the trailing comma (or not) from the last entry.
+ let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
+
+ aliases.retain(|alias| {
+ imports.iter().any(|import| {
+ let qualified_name = match import_module {
+ Some((relative, module)) => {
+ let module = module.map(compose_module_path);
+ let member = compose_module_path(&alias.name);
+ let mut qualified_name = String::with_capacity(
+ relative.len() + module.as_ref().map_or(0, String::len) + member.len() + 1,
+ );
+ for _ in 0..relative.len() {
+ qualified_name.push('.');
+ }
+ if let Some(module) = module {
+ qualified_name.push_str(&module);
+ qualified_name.push('.');
+ }
+ qualified_name.push_str(&member);
+ qualified_name
+ }
+ None => compose_module_path(&alias.name),
+ };
+ qualified_name == *import
+ })
+ });
+
+ // But avoid destroying any trailing comments.
+ if let Some(alias) = aliases.last_mut() {
+ let has_comment = if let Some(comma) = &alias.comma {
+ match &comma.whitespace_after {
+ ParenthesizableWhitespace::SimpleWhitespace(_) => false,
+ ParenthesizableWhitespace::ParenthesizedWhitespace(whitespace) => {
+ whitespace.first_line.comment.is_some()
+ }
+ }
+ } else {
+ false
+ };
+ if !has_comment {
+ alias.comma = trailing_comma;
+ }
+ }
+
+ Ok(tree.codegen_stylist(stylist))
+}
diff --git a/crates/ruff/src/autofix/edits.rs b/crates/ruff/src/autofix/edits.rs
new file mode 100644
index 0000000000000..e660c0f20b346
--- /dev/null
+++ b/crates/ruff/src/autofix/edits.rs
@@ -0,0 +1,363 @@
+//! Interface for generating autofix edits from higher-level actions (e.g., "remove an argument").
+use anyhow::{bail, Result};
+use ruff_text_size::{TextLen, TextRange, TextSize};
+use rustpython_parser::ast::{self, Excepthandler, Expr, Keyword, Ranged, Stmt};
+use rustpython_parser::{lexer, Mode, Tok};
+
+use ruff_diagnostics::Edit;
+use ruff_newlines::NewlineWithTrailingNewline;
+use ruff_python_ast::helpers;
+use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
+
+use crate::autofix::codemods;
+
+/// Return the `Fix` to use when deleting a `Stmt`.
+///
+/// In some cases, this is as simple as deleting the `Range` of the `Stmt`
+/// itself. However, there are a few exceptions:
+/// - If the `Stmt` is _not_ the terminal statement in a multi-statement line,
+/// we need to delete up to the start of the next statement (and avoid
+/// deleting any content that precedes the statement).
+/// - If the `Stmt` is the terminal statement in a multi-statement line, we need
+/// to avoid deleting any content that precedes the statement.
+/// - If the `Stmt` has no trailing and leading content, then it's convenient to
+/// remove the entire start and end lines.
+/// - If the `Stmt` is the last statement in its parent body, replace it with a
+/// `pass` instead.
+pub(crate) fn delete_stmt(
+ stmt: &Stmt,
+ parent: Option<&Stmt>,
+ locator: &Locator,
+ indexer: &Indexer,
+ stylist: &Stylist,
+) -> Edit {
+ if parent
+ .map(|parent| is_lone_child(stmt, parent))
+ .unwrap_or_default()
+ {
+ // If removing this node would lead to an invalid syntax tree, replace
+ // it with a `pass`.
+ Edit::range_replacement("pass".to_string(), stmt.range())
+ } else {
+ if let Some(semicolon) = trailing_semicolon(stmt, locator) {
+ let next = next_stmt_break(semicolon, locator);
+ Edit::deletion(stmt.start(), next)
+ } else if helpers::has_leading_content(stmt, locator) {
+ Edit::range_deletion(stmt.range())
+ } else if helpers::preceded_by_continuation(stmt, indexer, locator) {
+ if is_end_of_file(stmt, locator) && locator.is_at_start_of_line(stmt.start()) {
+ // Special-case: a file can't end in a continuation.
+ Edit::range_replacement(stylist.line_ending().to_string(), stmt.range())
+ } else {
+ Edit::range_deletion(stmt.range())
+ }
+ } else {
+ let range = locator.full_lines_range(stmt.range());
+ Edit::range_deletion(range)
+ }
+ }
+}
+
+/// Generate a `Fix` to remove the specified imports from an `import` statement.
+pub(crate) fn remove_unused_imports<'a>(
+ unused_imports: impl Iterator- ,
+ stmt: &Stmt,
+ parent: Option<&Stmt>,
+ locator: &Locator,
+ indexer: &Indexer,
+ stylist: &Stylist,
+) -> Result
{
+ match codemods::remove_imports(unused_imports, stmt, locator, stylist)? {
+ None => Ok(delete_stmt(stmt, parent, locator, indexer, stylist)),
+ Some(content) => Ok(Edit::range_replacement(content, stmt.range())),
+ }
+}
+
+/// Generic function to remove arguments or keyword arguments in function
+/// calls and class definitions. (For classes `args` should be considered
+/// `bases`)
+///
+/// Supports the removal of parentheses when this is the only (kw)arg left.
+/// For this behavior, set `remove_parentheses` to `true`.
+pub(crate) fn remove_argument(
+ locator: &Locator,
+ call_at: TextSize,
+ expr_range: TextRange,
+ args: &[Expr],
+ keywords: &[Keyword],
+ remove_parentheses: bool,
+) -> Result {
+ // TODO(sbrugman): Preserve trailing comments.
+ let contents = locator.after(call_at);
+
+ let mut fix_start = None;
+ let mut fix_end = None;
+
+ let n_arguments = keywords.len() + args.len();
+ if n_arguments == 0 {
+ bail!("No arguments or keywords to remove");
+ }
+
+ if n_arguments == 1 {
+ // Case 1: there is only one argument.
+ let mut count = 0u32;
+ for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
+ if matches!(tok, Tok::Lpar) {
+ if count == 0 {
+ fix_start = Some(if remove_parentheses {
+ range.start()
+ } else {
+ range.start() + TextSize::from(1)
+ });
+ }
+ count = count.saturating_add(1);
+ }
+
+ if matches!(tok, Tok::Rpar) {
+ count = count.saturating_sub(1);
+ if count == 0 {
+ fix_end = Some(if remove_parentheses {
+ range.end()
+ } else {
+ range.end() - TextSize::from(1)
+ });
+ break;
+ }
+ }
+ }
+ } else if args
+ .iter()
+ .map(Expr::start)
+ .chain(keywords.iter().map(Keyword::start))
+ .any(|location| location > expr_range.start())
+ {
+ // Case 2: argument or keyword is _not_ the last node.
+ let mut seen_comma = false;
+ for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
+ if seen_comma {
+ if matches!(tok, Tok::NonLogicalNewline) {
+ // Also delete any non-logical newlines after the comma.
+ continue;
+ }
+ fix_end = Some(if matches!(tok, Tok::Newline) {
+ range.end()
+ } else {
+ range.start()
+ });
+ break;
+ }
+ if range.start() == expr_range.start() {
+ fix_start = Some(range.start());
+ }
+ if fix_start.is_some() && matches!(tok, Tok::Comma) {
+ seen_comma = true;
+ }
+ }
+ } else {
+ // Case 3: argument or keyword is the last node, so we have to find the last
+ // comma in the stmt.
+ for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
+ if range.start() == expr_range.start() {
+ fix_end = Some(expr_range.end());
+ break;
+ }
+ if matches!(tok, Tok::Comma) {
+ fix_start = Some(range.start());
+ }
+ }
+ }
+
+ match (fix_start, fix_end) {
+ (Some(start), Some(end)) => Ok(Edit::deletion(start, end)),
+ _ => {
+ bail!("No fix could be constructed")
+ }
+ }
+}
+
+/// Determine if a vector contains only one, specific element.
+fn is_only(vec: &[T], value: &T) -> bool {
+ vec.len() == 1 && vec[0] == *value
+}
+
+/// Determine if a child is the only statement in its body.
+fn is_lone_child(child: &Stmt, parent: &Stmt) -> bool {
+ match parent {
+ Stmt::FunctionDef(ast::StmtFunctionDef { body, .. })
+ | Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef { body, .. })
+ | Stmt::ClassDef(ast::StmtClassDef { body, .. })
+ | Stmt::With(ast::StmtWith { body, .. })
+ | Stmt::AsyncWith(ast::StmtAsyncWith { body, .. }) => {
+ if is_only(body, child) {
+ return true;
+ }
+ }
+ Stmt::For(ast::StmtFor { body, orelse, .. })
+ | Stmt::AsyncFor(ast::StmtAsyncFor { body, orelse, .. })
+ | Stmt::While(ast::StmtWhile { body, orelse, .. })
+ | Stmt::If(ast::StmtIf { body, orelse, .. }) => {
+ if is_only(body, child) || is_only(orelse, child) {
+ return true;
+ }
+ }
+ Stmt::Try(ast::StmtTry {
+ body,
+ handlers,
+ orelse,
+ finalbody,
+ range: _,
+ })
+ | Stmt::TryStar(ast::StmtTryStar {
+ body,
+ handlers,
+ orelse,
+ finalbody,
+ range: _,
+ }) => {
+ if is_only(body, child)
+ || is_only(orelse, child)
+ || is_only(finalbody, child)
+ || handlers.iter().any(|handler| match handler {
+ Excepthandler::ExceptHandler(ast::ExcepthandlerExceptHandler {
+ body, ..
+ }) => is_only(body, child),
+ })
+ {
+ return true;
+ }
+ }
+ Stmt::Match(ast::StmtMatch { cases, .. }) => {
+ if cases.iter().any(|case| is_only(&case.body, child)) {
+ return true;
+ }
+ }
+ _ => {}
+ }
+ false
+}
+
+/// Return the location of a trailing semicolon following a `Stmt`, if it's part
+/// of a multi-statement line.
+fn trailing_semicolon(stmt: &Stmt, locator: &Locator) -> Option {
+ let contents = locator.after(stmt.end());
+
+ for line in NewlineWithTrailingNewline::from(contents) {
+ let trimmed = line.trim_start();
+
+ if trimmed.starts_with(';') {
+ let colon_offset = line.text_len() - trimmed.text_len();
+ return Some(stmt.end() + line.start() + colon_offset);
+ }
+
+ if !trimmed.starts_with('\\') {
+ break;
+ }
+ }
+ None
+}
+
+/// Find the next valid break for a `Stmt` after a semicolon.
+fn next_stmt_break(semicolon: TextSize, locator: &Locator) -> TextSize {
+ let start_location = semicolon + TextSize::from(1);
+
+ let contents = &locator.contents()[usize::from(start_location)..];
+ for line in NewlineWithTrailingNewline::from(contents) {
+ let trimmed = line.trim();
+ // Skip past any continuations.
+ if trimmed.starts_with('\\') {
+ continue;
+ }
+
+ return start_location
+ + if trimmed.is_empty() {
+ // If the line is empty, then despite the previous statement ending in a
+ // semicolon, we know that it's not a multi-statement line.
+ line.start()
+ } else {
+ // Otherwise, find the start of the next statement. (Or, anything that isn't
+ // whitespace.)
+ let relative_offset = line.find(|c: char| !c.is_whitespace()).unwrap();
+ line.start() + TextSize::try_from(relative_offset).unwrap()
+ };
+ }
+
+ locator.line_end(start_location)
+}
+
+/// Return `true` if a `Stmt` occurs at the end of a file.
+fn is_end_of_file(stmt: &Stmt, locator: &Locator) -> bool {
+ stmt.end() == locator.contents().text_len()
+}
+
+#[cfg(test)]
+mod tests {
+ use anyhow::Result;
+ use ruff_text_size::TextSize;
+ use rustpython_parser::ast::Suite;
+ use rustpython_parser::Parse;
+
+ use ruff_python_ast::source_code::Locator;
+
+ use crate::autofix::edits::{next_stmt_break, trailing_semicolon};
+
+ #[test]
+ fn find_semicolon() -> Result<()> {
+ let contents = "x = 1";
+ let program = Suite::parse(contents, "")?;
+ let stmt = program.first().unwrap();
+ let locator = Locator::new(contents);
+ assert_eq!(trailing_semicolon(stmt, &locator), None);
+
+ let contents = "x = 1; y = 1";
+ let program = Suite::parse(contents, "")?;
+ let stmt = program.first().unwrap();
+ let locator = Locator::new(contents);
+ assert_eq!(trailing_semicolon(stmt, &locator), Some(TextSize::from(5)));
+
+ let contents = "x = 1 ; y = 1";
+ let program = Suite::parse(contents, "")?;
+ let stmt = program.first().unwrap();
+ let locator = Locator::new(contents);
+ assert_eq!(trailing_semicolon(stmt, &locator), Some(TextSize::from(6)));
+
+ let contents = r#"
+x = 1 \
+ ; y = 1
+"#
+ .trim();
+ let program = Suite::parse(contents, "")?;
+ let stmt = program.first().unwrap();
+ let locator = Locator::new(contents);
+ assert_eq!(trailing_semicolon(stmt, &locator), Some(TextSize::from(10)));
+
+ Ok(())
+ }
+
+ #[test]
+ fn find_next_stmt_break() {
+ let contents = "x = 1; y = 1";
+ let locator = Locator::new(contents);
+ assert_eq!(
+ next_stmt_break(TextSize::from(4), &locator),
+ TextSize::from(5)
+ );
+
+ let contents = "x = 1 ; y = 1";
+ let locator = Locator::new(contents);
+ assert_eq!(
+ next_stmt_break(TextSize::from(5), &locator),
+ TextSize::from(6)
+ );
+
+ let contents = r#"
+x = 1 \
+ ; y = 1
+"#
+ .trim();
+ let locator = Locator::new(contents);
+ assert_eq!(
+ next_stmt_break(TextSize::from(10), &locator),
+ TextSize::from(12)
+ );
+ }
+}
diff --git a/crates/ruff/src/autofix/mod.rs b/crates/ruff/src/autofix/mod.rs
index 50283071e7af9..df4fe0e69cbe1 100644
--- a/crates/ruff/src/autofix/mod.rs
+++ b/crates/ruff/src/autofix/mod.rs
@@ -1,22 +1,27 @@
use std::collections::BTreeSet;
use itertools::Itertools;
+use nohash_hasher::IntSet;
use ruff_text_size::{TextRange, TextSize};
use rustc_hash::FxHashMap;
-use ruff_diagnostics::{Diagnostic, Edit, Fix};
+use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel};
use ruff_python_ast::source_code::Locator;
use crate::linter::FixTable;
use crate::registry::{AsRule, Rule};
-pub mod actions;
+pub(crate) mod codemods;
+pub(crate) mod edits;
/// Auto-fix errors in a file, and write the fixed source code to disk.
-pub fn fix_file(diagnostics: &[Diagnostic], locator: &Locator) -> Option<(String, FixTable)> {
+pub(crate) fn fix_file(
+ diagnostics: &[Diagnostic],
+ locator: &Locator,
+) -> Option<(String, FixTable)> {
let mut with_fixes = diagnostics
.iter()
- .filter(|diag| !diag.fix.is_empty())
+ .filter(|diag| diag.fix.is_some())
.peekable();
if with_fixes.peek().is_none() {
@@ -34,15 +39,15 @@ fn apply_fixes<'a>(
let mut output = String::with_capacity(locator.len());
let mut last_pos: Option = None;
let mut applied: BTreeSet<&Edit> = BTreeSet::default();
+ let mut isolated: IntSet = IntSet::default();
let mut fixed = FxHashMap::default();
for (rule, fix) in diagnostics
.filter_map(|diagnostic| {
- if diagnostic.fix.is_empty() {
- None
- } else {
- Some((diagnostic.kind.rule(), &diagnostic.fix))
- }
+ diagnostic
+ .fix
+ .as_ref()
+ .map(|fix| (diagnostic.kind.rule(), fix))
})
.sorted_by(|(rule1, fix1), (rule2, fix2)| cmp_fix(*rule1, *rule2, fix1, fix2))
{
@@ -62,7 +67,19 @@ fn apply_fixes<'a>(
continue;
}
- for edit in fix.edits() {
+ // If this fix requires isolation, and we've already applied another fix in the
+ // same isolation group, skip it.
+ if let IsolationLevel::Group(id) = fix.isolation() {
+ if !isolated.insert(id) {
+ continue;
+ }
+ }
+
+ for edit in fix
+ .edits()
+ .iter()
+ .sorted_unstable_by_key(|edit| edit.start())
+ {
// Add all contents from `last_pos` to `fix.location`.
let slice = locator.slice(TextRange::new(last_pos.unwrap_or_default(), edit.start()));
output.push_str(slice);
@@ -93,6 +110,13 @@ fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Orderi
// Apply `EndsInPeriod` fixes before `NewLineAfterLastParagraph` fixes.
(Rule::EndsInPeriod, Rule::NewLineAfterLastParagraph) => std::cmp::Ordering::Less,
(Rule::NewLineAfterLastParagraph, Rule::EndsInPeriod) => std::cmp::Ordering::Greater,
+ // Apply `IfElseBlockInsteadOfDictGet` fixes before `IfElseBlockInsteadOfIfExp` fixes.
+ (Rule::IfElseBlockInsteadOfDictGet, Rule::IfElseBlockInsteadOfIfExp) => {
+ std::cmp::Ordering::Less
+ }
+ (Rule::IfElseBlockInsteadOfIfExp, Rule::IfElseBlockInsteadOfDictGet) => {
+ std::cmp::Ordering::Greater
+ }
_ => std::cmp::Ordering::Equal,
})
}
@@ -103,18 +127,20 @@ mod tests {
use ruff_diagnostics::Diagnostic;
use ruff_diagnostics::Edit;
+ use ruff_diagnostics::Fix;
use ruff_python_ast::source_code::Locator;
use crate::autofix::apply_fixes;
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
+ #[allow(deprecated)]
fn create_diagnostics(edit: impl IntoIterator- ) -> Vec
{
edit.into_iter()
.map(|edit| Diagnostic {
// The choice of rule here is arbitrary.
kind: MissingNewlineAtEndOfFile.into(),
range: edit.range(),
- fix: edit.into(),
+ fix: Some(Fix::unspecified(edit)),
parent: None,
})
.collect()
diff --git a/crates/ruff/src/checkers/ast/deferred.rs b/crates/ruff/src/checkers/ast/deferred.rs
index 61f2135f9fcbd..ab74d512345e3 100644
--- a/crates/ruff/src/checkers/ast/deferred.rs
+++ b/crates/ruff/src/checkers/ast/deferred.rs
@@ -1,25 +1,17 @@
use ruff_text_size::TextRange;
-use rustpython_parser::ast::{Expr, Stmt};
+use rustpython_parser::ast::Expr;
-use ruff_python_ast::types::RefEquality;
-use ruff_python_semantic::analyze::visibility::{Visibility, VisibleScope};
-use ruff_python_semantic::scope::ScopeId;
-
-use crate::checkers::ast::AnnotationContext;
-use crate::docstrings::definition::Definition;
-
-type Context<'a> = (ScopeId, Vec>);
+use ruff_python_semantic::model::Snapshot;
/// A collection of AST nodes that are deferred for later analysis.
/// Used to, e.g., store functions, whose bodies shouldn't be analyzed until all
/// module-level definitions have been analyzed.
-#[derive(Default)]
-pub struct Deferred<'a> {
- pub definitions: Vec<(Definition<'a>, Visibility, Context<'a>)>,
- pub string_type_definitions: Vec<(TextRange, &'a str, AnnotationContext, Context<'a>)>,
- pub type_definitions: Vec<(&'a Expr, AnnotationContext, Context<'a>)>,
- pub functions: Vec<(&'a Stmt, Context<'a>, VisibleScope)>,
- pub lambdas: Vec<(&'a Expr, Context<'a>)>,
- pub for_loops: Vec<(&'a Stmt, Context<'a>)>,
- pub assignments: Vec>,
+#[derive(Debug, Default)]
+pub(crate) struct Deferred<'a> {
+ pub(crate) string_type_definitions: Vec<(TextRange, &'a str, Snapshot)>,
+ pub(crate) future_type_definitions: Vec<(&'a Expr, Snapshot)>,
+ pub(crate) functions: Vec,
+ pub(crate) lambdas: Vec<(&'a Expr, Snapshot)>,
+ pub(crate) for_loops: Vec,
+ pub(crate) assignments: Vec,
}
diff --git a/crates/ruff/src/checkers/ast/mod.rs b/crates/ruff/src/checkers/ast/mod.rs
index 14febab297f99..8b210f721dc9d 100644
--- a/crates/ruff/src/checkers/ast/mod.rs
+++ b/crates/ruff/src/checkers/ast/mod.rs
@@ -3,91 +3,91 @@ use std::path::Path;
use itertools::Itertools;
use log::error;
use ruff_text_size::{TextRange, TextSize};
-use rustc_hash::{FxHashMap, FxHashSet};
-use rustpython_common::cformat::{CFormatError, CFormatErrorType};
+use rustpython_format::cformat::{CFormatError, CFormatErrorType};
use rustpython_parser::ast::{
- Arg, Arguments, Comprehension, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext,
- ExprKind, KeywordData, Located, Operator, Pattern, PatternKind, Stmt, StmtKind, Suite,
+ self, Arg, Arguments, Comprehension, Constant, Excepthandler, Expr, ExprContext, Keyword,
+ Operator, Pattern, Ranged, Stmt, Suite, Unaryop,
};
-use ruff_diagnostics::Diagnostic;
+use ruff_diagnostics::{Diagnostic, IsolationLevel};
use ruff_python_ast::all::{extract_all_names, AllNamesFlags};
use ruff_python_ast::helpers::{extract_handled_exceptions, to_module_path};
-use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
-use ruff_python_ast::types::{Node, RefEquality};
-use ruff_python_ast::typing::parse_type_annotation;
+use ruff_python_ast::source_code::{Generator, Indexer, Locator, Quote, Stylist};
+use ruff_python_ast::str::trailing_quote;
+use ruff_python_ast::types::Node;
+use ruff_python_ast::typing::{parse_type_annotation, AnnotationKind};
use ruff_python_ast::visitor::{walk_excepthandler, walk_pattern, Visitor};
-use ruff_python_ast::{branch_detection, cast, helpers, str, visitor};
+use ruff_python_ast::{cast, helpers, str, visitor};
use ruff_python_semantic::analyze;
+use ruff_python_semantic::analyze::branch_detection;
use ruff_python_semantic::analyze::typing::{Callable, SubscriptKind};
+use ruff_python_semantic::analyze::visibility::ModuleSource;
use ruff_python_semantic::binding::{
- Binding, BindingId, BindingKind, Exceptions, ExecutionContext, Export, FromImportation,
+ Binding, BindingFlags, BindingId, BindingKind, Exceptions, Export, FromImportation,
Importation, StarImportation, SubmoduleImportation,
};
-use ruff_python_semantic::context::Context;
-use ruff_python_semantic::scope::{ClassDef, FunctionDef, Lambda, Scope, ScopeId, ScopeKind};
+use ruff_python_semantic::context::ExecutionContext;
+use ruff_python_semantic::definition::{ContextualizedDefinition, Module, ModuleKind};
+use ruff_python_semantic::globals::Globals;
+use ruff_python_semantic::model::{ResolvedReference, SemanticModel, SemanticModelFlags};
+use ruff_python_semantic::scope::{Scope, ScopeId, ScopeKind};
use ruff_python_stdlib::builtins::{BUILTINS, MAGIC_GLOBALS};
use ruff_python_stdlib::path::is_python_stub_file;
use crate::checkers::ast::deferred::Deferred;
-use crate::docstrings::definition::{
- transition_scope, Definition, DefinitionKind, Docstring, Documentable,
-};
+use crate::docstrings::extraction::ExtractionTarget;
+use crate::docstrings::Docstring;
use crate::fs::relativize_path;
use crate::importer::Importer;
use crate::noqa::NoqaMapping;
-use crate::registry::{AsRule, Rule};
+use crate::registry::Rule;
+use crate::rules::flake8_builtins::helpers::AnyShadowing;
use crate::rules::{
- flake8_2020, flake8_annotations, flake8_bandit, flake8_blind_except, flake8_boolean_trap,
- flake8_bugbear, flake8_builtins, flake8_comprehensions, flake8_datetimez, flake8_debugger,
- flake8_django, flake8_errmsg, flake8_gettext, flake8_implicit_str_concat,
- flake8_import_conventions, flake8_logging_format, flake8_pie, flake8_print, flake8_pyi,
- flake8_pytest_style, flake8_raise, flake8_return, flake8_self, flake8_simplify,
- flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments, flake8_use_pathlib, mccabe,
- numpy, pandas_vet, pep8_naming, pycodestyle, pydocstyle, pyflakes, pygrep_hooks, pylint,
- pyupgrade, ruff, tryceratops,
+ airflow, flake8_2020, flake8_annotations, flake8_async, flake8_bandit, flake8_blind_except,
+ flake8_boolean_trap, flake8_bugbear, flake8_builtins, flake8_comprehensions, flake8_datetimez,
+ flake8_debugger, flake8_django, flake8_errmsg, flake8_future_annotations, flake8_gettext,
+ flake8_implicit_str_concat, flake8_import_conventions, flake8_logging_format, flake8_pie,
+ flake8_print, flake8_pyi, flake8_pytest_style, flake8_raise, flake8_return, flake8_self,
+ flake8_simplify, flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments,
+ flake8_use_pathlib, flynt, mccabe, numpy, pandas_vet, pep8_naming, pycodestyle, pydocstyle,
+ pyflakes, pygrep_hooks, pylint, pyupgrade, ruff, tryceratops,
};
use crate::settings::types::PythonVersion;
use crate::settings::{flags, Settings};
-use crate::{autofix, docstrings, noqa, warn_user};
+use crate::{docstrings, noqa, warn_user};
mod deferred;
-type AnnotationContext = (bool, bool);
-
-pub struct Checker<'a> {
+pub(crate) struct Checker<'a> {
// Settings, static metadata, etc.
- pub path: &'a Path,
- module_path: Option>,
+ path: &'a Path,
+ module_path: Option<&'a [String]>,
package: Option<&'a Path>,
is_stub: bool,
- autofix: flags::Autofix,
noqa: flags::Noqa,
- pub settings: &'a Settings,
- pub noqa_line_for: &'a NoqaMapping,
- pub locator: &'a Locator<'a>,
- pub stylist: &'a Stylist<'a>,
- pub indexer: &'a Indexer,
- pub importer: Importer<'a>,
+ noqa_line_for: &'a NoqaMapping,
+ pub(crate) settings: &'a Settings,
+ pub(crate) locator: &'a Locator<'a>,
+ pub(crate) stylist: &'a Stylist<'a>,
+ pub(crate) indexer: &'a Indexer,
+ pub(crate) importer: Importer<'a>,
// Stateful fields.
- pub ctx: Context<'a>,
- pub deferred: Deferred<'a>,
- pub diagnostics: Vec,
- pub deletions: FxHashSet>,
+ semantic_model: SemanticModel<'a>,
+ deferred: Deferred<'a>,
+ pub(crate) diagnostics: Vec,
// Check-specific state.
- pub flake8_bugbear_seen: Vec<&'a Expr>,
+ pub(crate) flake8_bugbear_seen: Vec<&'a Expr>,
}
impl<'a> Checker<'a> {
#[allow(clippy::too_many_arguments)]
- pub fn new(
+ pub(crate) fn new(
settings: &'a Settings,
noqa_line_for: &'a NoqaMapping,
- autofix: flags::Autofix,
noqa: flags::Noqa,
path: &'a Path,
package: Option<&'a Path>,
- module_path: Option>,
+ module: Module<'a>,
locator: &'a Locator,
stylist: &'a Stylist,
indexer: &'a Indexer,
@@ -96,34 +96,31 @@ impl<'a> Checker<'a> {
Checker {
settings,
noqa_line_for,
- autofix,
noqa,
path,
package,
- module_path: module_path.clone(),
+ module_path: module.path(),
is_stub: is_python_stub_file(path),
locator,
stylist,
indexer,
importer,
- ctx: Context::new(&settings.typing_modules, path, module_path),
+ semantic_model: SemanticModel::new(&settings.typing_modules, path, module),
deferred: Deferred::default(),
diagnostics: Vec::default(),
- deletions: FxHashSet::default(),
flake8_bugbear_seen: Vec::default(),
}
}
}
impl<'a> Checker<'a> {
- /// Return `true` if a patch should be generated under the given autofix
- /// `Mode`.
- pub fn patch(&self, code: Rule) -> bool {
- self.autofix.into() && self.settings.rules.should_fix(code)
+ /// Return `true` if a patch should be generated for a given [`Rule`].
+ pub(crate) fn patch(&self, code: Rule) -> bool {
+ self.settings.rules.should_fix(code)
}
- /// Return `true` if a `Rule` is disabled by a `noqa` directive.
- pub fn rule_is_ignored(&self, code: Rule, offset: TextSize) -> bool {
+ /// Return `true` if a [`Rule`] is disabled by a `noqa` directive.
+ pub(crate) fn rule_is_ignored(&self, code: Rule, offset: TextSize) -> bool {
// TODO(charlie): `noqa` directives are mostly enforced in `check_lines.rs`.
// However, in rare cases, we need to check them here. For example, when
// removing unused imports, we create a single fix that's applied to all
@@ -136,38 +133,76 @@ impl<'a> Checker<'a> {
}
noqa::rule_is_ignored(code, offset, self.noqa_line_for, self.locator)
}
-}
-/// Visit an [`Expr`], and treat it as a type definition.
-macro_rules! visit_type_definition {
- ($self:ident, $expr:expr) => {{
- let prev_in_type_definition = $self.ctx.in_type_definition;
- $self.ctx.in_type_definition = true;
- $self.visit_expr($expr);
- $self.ctx.in_type_definition = prev_in_type_definition;
- }};
-}
+ /// Create a [`Generator`] to generate source code based on the current AST state.
+ pub(crate) fn generator(&self) -> Generator {
+ fn quote_style(
+ model: &SemanticModel,
+ locator: &Locator,
+ indexer: &Indexer,
+ ) -> Option {
+ if !model.in_f_string() {
+ return None;
+ }
-/// Visit an [`Expr`], and treat it as _not_ a type definition.
-macro_rules! visit_non_type_definition {
- ($self:ident, $expr:expr) => {{
- let prev_in_type_definition = $self.ctx.in_type_definition;
- $self.ctx.in_type_definition = false;
- $self.visit_expr($expr);
- $self.ctx.in_type_definition = prev_in_type_definition;
- }};
-}
+ // Find the quote character used to start the containing f-string.
+ let expr = model.expr()?;
+ let string_range = indexer.f_string_range(expr.start())?;
+ let trailing_quote = trailing_quote(locator.slice(string_range))?;
+
+ // Invert the quote character, if it's a single quote.
+ match *trailing_quote {
+ "'" => Some(Quote::Double),
+ "\"" => Some(Quote::Single),
+ _ => None,
+ }
+ }
+
+ Generator::new(
+ self.stylist.indentation(),
+ quote_style(&self.semantic_model, self.locator, self.indexer)
+ .unwrap_or(self.stylist.quote()),
+ self.stylist.line_ending(),
+ )
+ }
+
+ /// Returns the [`IsolationLevel`] for fixes in the current context.
+ ///
+ /// The primary use-case for fix isolation is to ensure that we don't delete all statements
+ /// in a given indented block, which would cause a syntax error. We therefore need to ensure
+ /// that we delete at most one statement per indented block per fixer pass. Fix isolation should
+ /// thus be applied whenever we delete a statement, but can otherwise be omitted.
+ pub(crate) fn isolation(&self, parent: Option<&Stmt>) -> IsolationLevel {
+ parent
+ .and_then(|stmt| self.semantic_model.stmts.node_id(stmt))
+ .map_or(IsolationLevel::default(), |node_id| {
+ IsolationLevel::Group(node_id.into())
+ })
+ }
+
+ pub(crate) const fn semantic_model(&self) -> &SemanticModel<'a> {
+ &self.semantic_model
+ }
+
+ pub(crate) const fn package(&self) -> Option<&'a Path> {
+ self.package
+ }
+
+ pub(crate) const fn path(&self) -> &'a Path {
+ self.path
+ }
+
+ /// Returns whether the given rule should be checked.
+ #[inline]
+ pub(crate) const fn enabled(&self, rule: Rule) -> bool {
+ self.settings.rules.enabled(rule)
+ }
-/// Visit an [`Expr`], and treat it as a boolean test. This is useful for detecting whether an
-/// expressions return value is significant, or whether the calling context only relies on
-/// its truthiness.
-macro_rules! visit_boolean_test {
- ($self:ident, $expr:expr) => {{
- let prev_in_boolean_test = $self.ctx.in_boolean_test;
- $self.ctx.in_boolean_test = true;
- $self.visit_expr($expr);
- $self.ctx.in_boolean_test = prev_in_boolean_test;
- }};
+ /// Returns whether any of the given rules should be checked.
+ #[inline]
+ pub(crate) const fn any_enabled(&self, rules: &[Rule]) -> bool {
+ self.settings.rules.any_enabled(rules)
+ }
}
impl<'a, 'b> Visitor<'b> for Checker<'a>
@@ -175,114 +210,115 @@ where
'b: 'a,
{
fn visit_stmt(&mut self, stmt: &'b Stmt) {
- self.ctx.push_parent(stmt);
+ self.semantic_model.push_stmt(stmt);
// Track whether we've seen docstrings, non-imports, etc.
- match &stmt.node {
- StmtKind::ImportFrom { module, .. } => {
+ match stmt {
+ Stmt::ImportFrom(ast::StmtImportFrom { module, names, .. }) => {
// Allow __future__ imports until we see a non-__future__ import.
- if self.ctx.futures_allowed {
- if let Some(module) = module {
- if module != "__future__" {
- self.ctx.futures_allowed = false;
- }
+ if let Some("__future__") = module.as_deref() {
+ if names
+ .iter()
+ .any(|alias| alias.name.as_str() == "annotations")
+ {
+ self.semantic_model.flags |= SemanticModelFlags::FUTURE_ANNOTATIONS;
}
+ } else {
+ self.semantic_model.flags |= SemanticModelFlags::FUTURES_BOUNDARY;
}
}
- StmtKind::Import { .. } => {
- self.ctx.futures_allowed = false;
+ Stmt::Import(_) => {
+ self.semantic_model.flags |= SemanticModelFlags::FUTURES_BOUNDARY;
}
_ => {
- self.ctx.futures_allowed = false;
- if !self.ctx.seen_import_boundary
+ self.semantic_model.flags |= SemanticModelFlags::FUTURES_BOUNDARY;
+ if !self.semantic_model.seen_import_boundary()
&& !helpers::is_assignment_to_a_dunder(stmt)
- && !helpers::in_nested_block(self.ctx.parents.iter().rev().map(Into::into))
+ && !helpers::in_nested_block(self.semantic_model.parents())
{
- self.ctx.seen_import_boundary = true;
+ self.semantic_model.flags |= SemanticModelFlags::IMPORT_BOUNDARY;
}
}
}
// Track each top-level import, to guide import insertions.
- if matches!(
- &stmt.node,
- StmtKind::Import { .. } | StmtKind::ImportFrom { .. }
- ) {
- if self.ctx.scope_id.is_global() && self.ctx.current_stmt_parent().is_none() {
+ if matches!(stmt, Stmt::Import(_) | Stmt::ImportFrom(_)) {
+ if self.semantic_model.at_top_level() {
self.importer.visit_import(stmt);
}
}
+ // Store the flags prior to any further descent, so that we can restore them after visiting
+ // the node.
+ let flags_snapshot = self.semantic_model.flags;
+
// Pre-visit.
- match &stmt.node {
- StmtKind::Global { names } => {
+ match stmt {
+ Stmt::Global(ast::StmtGlobal { names, range: _ }) => {
let ranges: Vec = helpers::find_names(stmt, self.locator).collect();
- if !self.ctx.scope_id.is_global() {
- // Add the binding to the current scope.
- let context = self.ctx.execution_context();
- let exceptions = self.ctx.exceptions();
- let scope = &mut self.ctx.scopes[self.ctx.scope_id];
- let usage = Some((self.ctx.scope_id, stmt.range()));
+ if !self.semantic_model.scope_id.is_global() {
for (name, range) in names.iter().zip(ranges.iter()) {
- let id = self.ctx.bindings.push(Binding {
- kind: BindingKind::Global,
- runtime_usage: None,
- synthetic_usage: usage,
- typing_usage: None,
- range: *range,
- source: Some(RefEquality(stmt)),
- context,
- exceptions,
- });
- scope.add(name, id);
+ // Add a binding to the current scope.
+ let binding_id = self.semantic_model.push_binding(
+ *range,
+ BindingKind::Global,
+ BindingFlags::empty(),
+ );
+ let scope = self.semantic_model.scope_mut();
+ scope.add(name, binding_id);
}
}
- if self.settings.rules.enabled(Rule::AmbiguousVariableName) {
+ if self.enabled(Rule::AmbiguousVariableName) {
self.diagnostics
.extend(names.iter().zip(ranges.iter()).filter_map(|(name, range)| {
pycodestyle::rules::ambiguous_variable_name(name, *range)
}));
}
}
- StmtKind::Nonlocal { names } => {
+ Stmt::Nonlocal(ast::StmtNonlocal { names, range: _ }) => {
let ranges: Vec = helpers::find_names(stmt, self.locator).collect();
- if !self.ctx.scope_id.is_global() {
- let context = self.ctx.execution_context();
- let exceptions = self.ctx.exceptions();
- let scope = &mut self.ctx.scopes[self.ctx.scope_id];
- let usage = Some((self.ctx.scope_id, stmt.range()));
+ if !self.semantic_model.scope_id.is_global() {
for (name, range) in names.iter().zip(ranges.iter()) {
// Add a binding to the current scope.
- let id = self.ctx.bindings.push(Binding {
- kind: BindingKind::Nonlocal,
- runtime_usage: None,
- synthetic_usage: usage,
- typing_usage: None,
- range: *range,
- source: Some(RefEquality(stmt)),
- context,
- exceptions,
- });
- scope.add(name, id);
+ let binding_id = self.semantic_model.push_binding(
+ *range,
+ BindingKind::Nonlocal,
+ BindingFlags::empty(),
+ );
+ let scope = self.semantic_model.scope_mut();
+ scope.add(name, binding_id);
}
// Mark the binding in the defining scopes as used too. (Skip the global scope
- // and the current scope.)
+ // and the current scope, and, per standard resolution rules, any class scopes.)
for (name, range) in names.iter().zip(ranges.iter()) {
let binding_id = self
- .ctx
+ .semantic_model
.scopes
- .ancestors(self.ctx.scope_id)
+ .ancestors(self.semantic_model.scope_id)
.skip(1)
- .take_while(|scope| !scope.kind.is_module())
+ .filter(|scope| !(scope.kind.is_module() || scope.kind.is_class()))
.find_map(|scope| scope.get(name.as_str()));
if let Some(binding_id) = binding_id {
- self.ctx.bindings[*binding_id].runtime_usage = usage;
- } else {
- // Ensure that every nonlocal has an existing binding from a parent scope.
- if self.settings.rules.enabled(Rule::NonlocalWithoutBinding) {
+ self.semantic_model.add_local_reference(
+ binding_id,
+ stmt.range(),
+ ExecutionContext::Runtime,
+ );
+ }
+
+ // Ensure that every nonlocal has an existing binding from a parent scope.
+ if self.enabled(Rule::NonlocalWithoutBinding) {
+ if self
+ .semantic_model
+ .scopes
+ .ancestors(self.semantic_model.scope_id)
+ .skip(1)
+ .take_while(|scope| !scope.kind.is_module())
+ .all(|scope| !scope.declares(name.as_str()))
+ {
self.diagnostics.push(Diagnostic::new(
pylint::rules::NonlocalWithoutBinding {
name: name.to_string(),
@@ -294,62 +330,58 @@ where
}
}
- if self.settings.rules.enabled(Rule::AmbiguousVariableName) {
+ if self.enabled(Rule::AmbiguousVariableName) {
self.diagnostics
.extend(names.iter().zip(ranges.iter()).filter_map(|(name, range)| {
pycodestyle::rules::ambiguous_variable_name(name, *range)
}));
}
}
- StmtKind::Break => {
- if self.settings.rules.enabled(Rule::BreakOutsideLoop) {
+ Stmt::Break(_) => {
+ if self.enabled(Rule::BreakOutsideLoop) {
if let Some(diagnostic) = pyflakes::rules::break_outside_loop(
stmt,
- &mut self.ctx.parents.iter().rev().map(Into::into).skip(1),
+ &mut self.semantic_model.parents().skip(1),
) {
self.diagnostics.push(diagnostic);
}
}
}
- StmtKind::Continue => {
- if self.settings.rules.enabled(Rule::ContinueOutsideLoop) {
+ Stmt::Continue(_) => {
+ if self.enabled(Rule::ContinueOutsideLoop) {
if let Some(diagnostic) = pyflakes::rules::continue_outside_loop(
stmt,
- &mut self.ctx.parents.iter().rev().map(Into::into).skip(1),
+ &mut self.semantic_model.parents().skip(1),
) {
self.diagnostics.push(diagnostic);
}
}
}
- StmtKind::FunctionDef {
+ Stmt::FunctionDef(ast::StmtFunctionDef {
name,
decorator_list,
returns,
args,
body,
..
- }
- | StmtKind::AsyncFunctionDef {
+ })
+ | Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef {
name,
decorator_list,
returns,
args,
body,
..
- } => {
- if self
- .settings
- .rules
- .enabled(Rule::DjangoNonLeadingReceiverDecorator)
- {
+ }) => {
+ if self.enabled(Rule::DjangoNonLeadingReceiverDecorator) {
self.diagnostics
.extend(flake8_django::rules::non_leading_receiver_decorator(
decorator_list,
- |expr| self.ctx.resolve_call_path(expr),
+ |expr| self.semantic_model.resolve_call_path(expr),
));
}
- if self.settings.rules.enabled(Rule::AmbiguousFunctionName) {
+ if self.enabled(Rule::AmbiguousFunctionName) {
if let Some(diagnostic) =
pycodestyle::rules::ambiguous_function_name(name, || {
helpers::identifier_range(stmt, self.locator)
@@ -359,28 +391,28 @@ where
}
}
- if self.settings.rules.enabled(Rule::InvalidFunctionName) {
+ if self.enabled(Rule::InvalidStrReturnType) {
+ pylint::rules::invalid_str_return(self, name, body);
+ }
+
+ if self.enabled(Rule::InvalidFunctionName) {
if let Some(diagnostic) = pep8_naming::rules::invalid_function_name(
stmt,
name,
decorator_list,
&self.settings.pep8_naming.ignore_names,
- &self.ctx,
+ &self.semantic_model,
self.locator,
) {
self.diagnostics.push(diagnostic);
}
}
- if self
- .settings
- .rules
- .enabled(Rule::InvalidFirstArgumentNameForClassMethod)
- {
+ if self.enabled(Rule::InvalidFirstArgumentNameForClassMethod) {
if let Some(diagnostic) =
pep8_naming::rules::invalid_first_argument_name_for_class_method(
self,
- self.ctx.scope(),
+ self.semantic_model.scope(),
name,
decorator_list,
args,
@@ -390,15 +422,11 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::InvalidFirstArgumentNameForMethod)
- {
+ if self.enabled(Rule::InvalidFirstArgumentNameForMethod) {
if let Some(diagnostic) =
pep8_naming::rules::invalid_first_argument_name_for_method(
self,
- self.ctx.scope(),
+ self.semantic_model.scope(),
name,
decorator_list,
args,
@@ -409,17 +437,40 @@ where
}
if self.is_stub {
- if self.settings.rules.enabled(Rule::PassStatementStubBody) {
+ if self.enabled(Rule::PassStatementStubBody) {
flake8_pyi::rules::pass_statement_stub_body(self, body);
}
- if self.settings.rules.enabled(Rule::NonEmptyStubBody) {
+ if self.enabled(Rule::NonEmptyStubBody) {
flake8_pyi::rules::non_empty_stub_body(self, body);
}
+ if self.enabled(Rule::StubBodyMultipleStatements) {
+ flake8_pyi::rules::stub_body_multiple_statements(self, stmt, body);
+ }
+ if self.enabled(Rule::AnyEqNeAnnotation) {
+ flake8_pyi::rules::any_eq_ne_annotation(self, name, args);
+ }
+ if self.enabled(Rule::NonSelfReturnType) {
+ flake8_pyi::rules::non_self_return_type(
+ self,
+ stmt,
+ name,
+ decorator_list,
+ returns.as_ref().map(|expr| &**expr),
+ args,
+ stmt.is_async_function_def_stmt(),
+ );
+ }
+ if self.enabled(Rule::StrOrReprDefinedInStub) {
+ flake8_pyi::rules::str_or_repr_defined_in_stub(self, stmt);
+ }
+ if self.enabled(Rule::NoReturnArgumentAnnotationInStub) {
+ flake8_pyi::rules::no_return_argument_annotation(self, args);
+ }
}
- if self.settings.rules.enabled(Rule::DunderFunctionName) {
+ if self.enabled(Rule::DunderFunctionName) {
if let Some(diagnostic) = pep8_naming::rules::dunder_function_name(
- self.ctx.scope(),
+ self.semantic_model.scope(),
stmt,
name,
self.locator,
@@ -428,26 +479,26 @@ where
}
}
- if self.settings.rules.enabled(Rule::GlobalStatement) {
+ if self.enabled(Rule::GlobalStatement) {
pylint::rules::global_statement(self, name);
}
- if self.settings.rules.enabled(Rule::LRUCacheWithoutParameters)
+ if self.enabled(Rule::LRUCacheWithoutParameters)
&& self.settings.target_version >= PythonVersion::Py38
{
pyupgrade::rules::lru_cache_without_parameters(self, decorator_list);
}
- if self.settings.rules.enabled(Rule::LRUCacheWithMaxsizeNone)
+ if self.enabled(Rule::LRUCacheWithMaxsizeNone)
&& self.settings.target_version >= PythonVersion::Py39
{
pyupgrade::rules::lru_cache_with_maxsize_none(self, decorator_list);
}
- if self.settings.rules.enabled(Rule::CachedInstanceMethod) {
+ if self.enabled(Rule::CachedInstanceMethod) {
flake8_bugbear::rules::cached_instance_method(self, decorator_list);
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::UnnecessaryReturnNone,
Rule::ImplicitReturnValue,
Rule::ImplicitReturn,
@@ -464,7 +515,7 @@ where
);
}
- if self.settings.rules.enabled(Rule::UselessReturn) {
+ if self.enabled(Rule::UselessReturn) {
pylint::rules::useless_return(
self,
stmt,
@@ -473,7 +524,7 @@ where
);
}
- if self.settings.rules.enabled(Rule::ComplexStructure) {
+ if self.enabled(Rule::ComplexStructure) {
if let Some(diagnostic) = mccabe::rules::function_is_too_complex(
stmt,
name,
@@ -485,20 +536,20 @@ where
}
}
- if self.settings.rules.enabled(Rule::HardcodedPasswordDefault) {
+ if self.enabled(Rule::HardcodedPasswordDefault) {
self.diagnostics
.extend(flake8_bandit::rules::hardcoded_password_default(args));
}
- if self.settings.rules.enabled(Rule::PropertyWithParameters) {
+ if self.enabled(Rule::PropertyWithParameters) {
pylint::rules::property_with_parameters(self, stmt, decorator_list, args);
}
- if self.settings.rules.enabled(Rule::TooManyArguments) {
+ if self.enabled(Rule::TooManyArguments) {
pylint::rules::too_many_arguments(self, args, stmt);
}
- if self.settings.rules.enabled(Rule::TooManyReturnStatements) {
+ if self.enabled(Rule::TooManyReturnStatements) {
if let Some(diagnostic) = pylint::rules::too_many_return_statements(
stmt,
body,
@@ -509,7 +560,7 @@ where
}
}
- if self.settings.rules.enabled(Rule::TooManyBranches) {
+ if self.enabled(Rule::TooManyBranches) {
if let Some(diagnostic) = pylint::rules::too_many_branches(
stmt,
body,
@@ -520,7 +571,7 @@ where
}
}
- if self.settings.rules.enabled(Rule::TooManyStatements) {
+ if self.enabled(Rule::TooManyStatements) {
if let Some(diagnostic) = pylint::rules::too_many_statements(
stmt,
body,
@@ -531,7 +582,7 @@ where
}
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::PytestFixtureIncorrectParenthesesStyle,
Rule::PytestFixturePositionalArgs,
Rule::PytestExtraneousScopeFunction,
@@ -554,25 +605,21 @@ where
);
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::PytestParametrizeNamesWrongType,
Rule::PytestParametrizeValuesWrongType,
]) {
flake8_pytest_style::rules::parametrize(self, decorator_list);
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::PytestIncorrectMarkParenthesesStyle,
Rule::PytestUseFixturesWithoutParameters,
]) {
flake8_pytest_style::rules::marks(self, decorator_list);
}
- if self
- .settings
- .rules
- .enabled(Rule::BooleanPositionalArgInFunctionDefinition)
- {
+ if self.enabled(Rule::BooleanPositionalArgInFunctionDefinition) {
flake8_boolean_trap::rules::check_positional_boolean_in_def(
self,
name,
@@ -581,11 +628,7 @@ where
);
}
- if self
- .settings
- .rules
- .enabled(Rule::BooleanDefaultValueInFunctionDefinition)
- {
+ if self.enabled(Rule::BooleanDefaultValueInFunctionDefinition) {
flake8_boolean_trap::rules::check_boolean_default_value_in_function_definition(
self,
name,
@@ -594,11 +637,7 @@ where
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnexpectedSpecialMethodSignature)
- {
+ if self.enabled(Rule::UnexpectedSpecialMethodSignature) {
pylint::rules::unexpected_special_method_signature(
self,
stmt,
@@ -609,161 +648,87 @@ where
);
}
- self.check_builtin_shadowing(name, stmt, true);
-
- // Visit the decorators and arguments, but avoid the body, which will be
- // deferred.
- for expr in decorator_list {
- self.visit_expr(expr);
+ if self.enabled(Rule::FStringDocstring) {
+ flake8_bugbear::rules::f_string_docstring(self, body);
}
- // Function annotations are always evaluated at runtime, unless future annotations
- // are enabled.
- let runtime_annotation = !self.ctx.annotations_future_enabled;
-
- for arg in &args.posonlyargs {
- if let Some(expr) = &arg.node.annotation {
- if runtime_annotation {
- visit_type_definition!(self, expr);
- } else {
- self.visit_annotation(expr);
- };
- }
- }
- for arg in &args.args {
- if let Some(expr) = &arg.node.annotation {
- if runtime_annotation {
- visit_type_definition!(self, expr);
- } else {
- self.visit_annotation(expr);
- };
- }
- }
- if let Some(arg) = &args.vararg {
- if let Some(expr) = &arg.node.annotation {
- if runtime_annotation {
- visit_type_definition!(self, expr);
- } else {
- self.visit_annotation(expr);
- };
- }
+ if self.enabled(Rule::YieldInForLoop) {
+ pyupgrade::rules::yield_in_for_loop(self, stmt);
}
- for arg in &args.kwonlyargs {
- if let Some(expr) = &arg.node.annotation {
- if runtime_annotation {
- visit_type_definition!(self, expr);
- } else {
- self.visit_annotation(expr);
- };
+
+ if self.semantic_model.scope().kind.is_class() {
+ if self.enabled(Rule::BuiltinAttributeShadowing) {
+ flake8_builtins::rules::builtin_attribute_shadowing(
+ self,
+ name,
+ AnyShadowing::from(stmt),
+ );
}
- }
- if let Some(arg) = &args.kwarg {
- if let Some(expr) = &arg.node.annotation {
- if runtime_annotation {
- visit_type_definition!(self, expr);
- } else {
- self.visit_annotation(expr);
- };
+ } else {
+ if self.enabled(Rule::BuiltinVariableShadowing) {
+ flake8_builtins::rules::builtin_variable_shadowing(
+ self,
+ name,
+ AnyShadowing::from(stmt),
+ );
}
}
- for expr in returns {
- if runtime_annotation {
- visit_type_definition!(self, expr);
- } else {
- self.visit_annotation(expr);
- };
- }
- for expr in &args.kw_defaults {
- self.visit_expr(expr);
- }
- for expr in &args.defaults {
- self.visit_expr(expr);
- }
-
- self.add_binding(
- name,
- Binding {
- kind: BindingKind::FunctionDefinition,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: stmt.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
- );
}
- StmtKind::Return { .. } => {
- if self.settings.rules.enabled(Rule::ReturnOutsideFunction) {
+ Stmt::Return(_) => {
+ if self.enabled(Rule::ReturnOutsideFunction) {
pyflakes::rules::return_outside_function(self, stmt);
}
- if self.settings.rules.enabled(Rule::ReturnInInit) {
+ if self.enabled(Rule::ReturnInInit) {
pylint::rules::return_in_init(self, stmt);
}
}
- StmtKind::ClassDef {
+ Stmt::ClassDef(ast::StmtClassDef {
name,
bases,
keywords,
decorator_list,
body,
- } => {
- if self
- .settings
- .rules
- .enabled(Rule::DjangoNullableModelStringField)
- {
+ range: _,
+ }) => {
+ if self.enabled(Rule::DjangoNullableModelStringField) {
self.diagnostics
.extend(flake8_django::rules::nullable_model_string_field(
self, body,
));
}
- if self
- .settings
- .rules
- .enabled(Rule::DjangoExcludeWithModelForm)
- {
+ if self.enabled(Rule::DjangoExcludeWithModelForm) {
if let Some(diagnostic) =
flake8_django::rules::exclude_with_model_form(self, bases, body)
{
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.enabled(Rule::DjangoAllWithModelForm) {
+ if self.enabled(Rule::DjangoAllWithModelForm) {
if let Some(diagnostic) =
flake8_django::rules::all_with_model_form(self, bases, body)
{
self.diagnostics.push(diagnostic);
}
}
- if self
- .settings
- .rules
- .enabled(Rule::DjangoModelWithoutDunderStr)
- {
+ if self.enabled(Rule::DjangoModelWithoutDunderStr) {
if let Some(diagnostic) =
flake8_django::rules::model_without_dunder_str(self, bases, body, stmt)
{
self.diagnostics.push(diagnostic);
}
}
- if self
- .settings
- .rules
- .enabled(Rule::DjangoUnorderedBodyContentInModel)
- {
+ if self.enabled(Rule::DjangoUnorderedBodyContentInModel) {
flake8_django::rules::unordered_body_content_in_model(self, bases, body);
}
- if self.settings.rules.enabled(Rule::GlobalStatement) {
+ if self.enabled(Rule::GlobalStatement) {
pylint::rules::global_statement(self, name);
}
- if self.settings.rules.enabled(Rule::UselessObjectInheritance) {
+ if self.enabled(Rule::UselessObjectInheritance) {
pyupgrade::rules::useless_object_inheritance(self, stmt, name, bases, keywords);
}
- if self.settings.rules.enabled(Rule::AmbiguousClassName) {
+ if self.enabled(Rule::AmbiguousClassName) {
if let Some(diagnostic) = pycodestyle::rules::ambiguous_class_name(name, || {
helpers::identifier_range(stmt, self.locator)
}) {
@@ -771,7 +736,7 @@ where
}
}
- if self.settings.rules.enabled(Rule::InvalidClassName) {
+ if self.enabled(Rule::InvalidClassName) {
if let Some(diagnostic) =
pep8_naming::rules::invalid_class_name(stmt, name, self.locator)
{
@@ -779,11 +744,7 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::ErrorSuffixOnExceptionName)
- {
+ if self.enabled(Rule::ErrorSuffixOnExceptionName) {
if let Some(diagnostic) = pep8_naming::rules::error_suffix_on_exception_name(
stmt,
bases,
@@ -795,7 +756,7 @@ where
}
if !self.is_stub {
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::AbstractBaseClassWithoutAbstractMethod,
Rule::EmptyMethodWithoutAbstractDecorator,
]) {
@@ -805,216 +766,182 @@ where
}
}
if self.is_stub {
- if self.settings.rules.enabled(Rule::PassStatementStubBody) {
+ if self.enabled(Rule::PassStatementStubBody) {
flake8_pyi::rules::pass_statement_stub_body(self, body);
}
- if self.settings.rules.enabled(Rule::PassInClassBody) {
+ if self.enabled(Rule::PassInClassBody) {
flake8_pyi::rules::pass_in_class_body(self, stmt, body);
}
+ if self.enabled(Rule::EllipsisInNonEmptyClassBody) {
+ flake8_pyi::rules::ellipsis_in_non_empty_class_body(self, stmt, body);
+ }
}
- if self
- .settings
- .rules
- .enabled(Rule::PytestIncorrectMarkParenthesesStyle)
- {
+ if self.enabled(Rule::PytestIncorrectMarkParenthesesStyle) {
flake8_pytest_style::rules::marks(self, decorator_list);
}
- if self
- .settings
- .rules
- .enabled(Rule::DuplicateClassFieldDefinition)
- {
+ if self.enabled(Rule::DuplicateClassFieldDefinition) {
flake8_pie::rules::duplicate_class_field_definition(self, stmt, body);
}
- if self.settings.rules.enabled(Rule::NonUniqueEnums) {
+ if self.enabled(Rule::NonUniqueEnums) {
flake8_pie::rules::non_unique_enums(self, stmt, body);
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::MutableDataclassDefault,
Rule::FunctionCallInDataclassDefaultArgument,
- ]) && ruff::rules::is_dataclass(self, decorator_list)
+ ]) && ruff::rules::is_dataclass(&self.semantic_model, decorator_list)
{
- if self.settings.rules.enabled(Rule::MutableDataclassDefault) {
+ if self.enabled(Rule::MutableDataclassDefault) {
ruff::rules::mutable_dataclass_default(self, body);
}
- if self
- .settings
- .rules
- .enabled(Rule::FunctionCallInDataclassDefaultArgument)
- {
+ if self.enabled(Rule::FunctionCallInDataclassDefaultArgument) {
ruff::rules::function_call_in_dataclass_defaults(self, body);
}
}
- self.check_builtin_shadowing(name, stmt, false);
-
- for expr in bases {
- self.visit_expr(expr);
+ if self.enabled(Rule::FStringDocstring) {
+ flake8_bugbear::rules::f_string_docstring(self, body);
}
- for keyword in keywords {
- self.visit_keyword(keyword);
+
+ if self.enabled(Rule::BuiltinVariableShadowing) {
+ flake8_builtins::rules::builtin_variable_shadowing(
+ self,
+ name,
+ AnyShadowing::from(stmt),
+ );
}
- for expr in decorator_list {
- self.visit_expr(expr);
+
+ if self.enabled(Rule::DuplicateBases) {
+ pylint::rules::duplicate_bases(self, name, bases);
}
}
- StmtKind::Import { names } => {
- if self.settings.rules.enabled(Rule::MultipleImportsOnOneLine) {
+ Stmt::Import(ast::StmtImport { names, range: _ }) => {
+ if self.enabled(Rule::MultipleImportsOnOneLine) {
pycodestyle::rules::multiple_imports_on_one_line(self, stmt, names);
}
- if self
- .settings
- .rules
- .enabled(Rule::ModuleImportNotAtTopOfFile)
- {
+ if self.enabled(Rule::ModuleImportNotAtTopOfFile) {
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
}
- if self.settings.rules.enabled(Rule::GlobalStatement) {
+ if self.enabled(Rule::GlobalStatement) {
for name in names.iter() {
- if let Some(asname) = name.node.asname.as_ref() {
+ if let Some(asname) = name.asname.as_ref() {
pylint::rules::global_statement(self, asname);
} else {
- pylint::rules::global_statement(self, &name.node.name);
+ pylint::rules::global_statement(self, &name.name);
}
}
}
- if self.settings.rules.enabled(Rule::DeprecatedCElementTree) {
+ if self.enabled(Rule::DeprecatedCElementTree) {
pyupgrade::rules::deprecated_c_element_tree(self, stmt);
}
- if self.settings.rules.enabled(Rule::DeprecatedMockImport) {
+ if self.enabled(Rule::DeprecatedMockImport) {
pyupgrade::rules::deprecated_mock_import(self, stmt);
}
for alias in names {
- if alias.node.name == "__future__" {
- let name = alias.node.asname.as_ref().unwrap_or(&alias.node.name);
+ if &alias.name == "__future__" {
+ let name = alias.asname.as_ref().unwrap_or(&alias.name);
self.add_binding(
name,
- Binding {
- kind: BindingKind::FutureImportation,
- runtime_usage: None,
- // Always mark `__future__` imports as used.
- synthetic_usage: Some((self.ctx.scope_id, alias.range())),
- typing_usage: None,
- range: alias.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ alias.range(),
+ BindingKind::FutureImportation,
+ BindingFlags::empty(),
);
- if self.settings.rules.enabled(Rule::LateFutureImport)
- && !self.ctx.futures_allowed
- {
- self.diagnostics.push(Diagnostic::new(
- pyflakes::rules::LateFutureImport,
- stmt.range(),
- ));
+ if self.enabled(Rule::LateFutureImport) {
+ if self.semantic_model.seen_futures_boundary() {
+ self.diagnostics.push(Diagnostic::new(
+ pyflakes::rules::LateFutureImport,
+ stmt.range(),
+ ));
+ }
}
- } else if alias.node.name.contains('.') && alias.node.asname.is_none() {
- // Given `import foo.bar`, `name` would be "foo", and `full_name` would be
+ } else if alias.name.contains('.') && alias.asname.is_none() {
+ // Given `import foo.bar`, `name` would be "foo", and `qualified_name` would be
// "foo.bar".
- let name = alias.node.name.split('.').next().unwrap();
- let full_name = &alias.node.name;
+ let name = alias.name.split('.').next().unwrap();
+ let qualified_name = &alias.name;
self.add_binding(
name,
- Binding {
- kind: BindingKind::SubmoduleImportation(SubmoduleImportation {
- name,
- full_name,
- }),
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: alias.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ alias.range(),
+ BindingKind::SubmoduleImportation(SubmoduleImportation {
+ qualified_name,
+ }),
+ BindingFlags::empty(),
);
} else {
- // Treat explicit re-export as usage (e.g., `from .applications
- // import FastAPI as FastAPI`).
- let is_explicit_reexport = alias
- .node
- .asname
- .as_ref()
- .map_or(false, |asname| asname == &alias.node.name);
-
- let name = alias.node.asname.as_ref().unwrap_or(&alias.node.name);
- let full_name = &alias.node.name;
+ let name = alias.asname.as_ref().unwrap_or(&alias.name);
+ let qualified_name = &alias.name;
self.add_binding(
name,
- Binding {
- kind: BindingKind::Importation(Importation { name, full_name }),
- runtime_usage: None,
- synthetic_usage: if is_explicit_reexport {
- Some((self.ctx.scope_id, alias.range()))
- } else {
- None
- },
- typing_usage: None,
- range: alias.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
+ alias.range(),
+ BindingKind::Importation(Importation { qualified_name }),
+ if alias
+ .asname
+ .as_ref()
+ .map_or(false, |asname| asname == &alias.name)
+ {
+ BindingFlags::EXPLICIT_EXPORT
+ } else {
+ BindingFlags::empty()
},
);
- if let Some(asname) = &alias.node.asname {
- self.check_builtin_shadowing(asname, stmt, false);
+ if let Some(asname) = &alias.asname {
+ if self.enabled(Rule::BuiltinVariableShadowing) {
+ flake8_builtins::rules::builtin_variable_shadowing(
+ self,
+ asname,
+ AnyShadowing::from(stmt),
+ );
+ }
}
}
// flake8-debugger
- if self.settings.rules.enabled(Rule::Debugger) {
+ if self.enabled(Rule::Debugger) {
if let Some(diagnostic) =
- flake8_debugger::rules::debugger_import(stmt, None, &alias.node.name)
+ flake8_debugger::rules::debugger_import(stmt, None, &alias.name)
{
self.diagnostics.push(diagnostic);
}
}
// flake8_tidy_imports
- if self.settings.rules.enabled(Rule::BannedApi) {
- flake8_tidy_imports::banned_api::name_or_parent_is_banned(
+ if self.enabled(Rule::BannedApi) {
+ flake8_tidy_imports::rules::name_or_parent_is_banned(
self,
- &alias.node.name,
+ &alias.name,
alias,
);
}
// pylint
if !self.is_stub {
- if self.settings.rules.enabled(Rule::UselessImportAlias) {
+ if self.enabled(Rule::UselessImportAlias) {
pylint::rules::useless_import_alias(self, alias);
}
}
- if self.settings.rules.enabled(Rule::ManualFromImport) {
+ if self.enabled(Rule::ManualFromImport) {
pylint::rules::manual_from_import(self, stmt, alias, names);
}
- if self.settings.rules.enabled(Rule::ImportSelf) {
+ if self.enabled(Rule::ImportSelf) {
if let Some(diagnostic) =
- pylint::rules::import_self(alias, self.module_path.as_deref())
+ pylint::rules::import_self(alias, self.module_path)
{
self.diagnostics.push(diagnostic);
}
}
- if let Some(asname) = &alias.node.asname {
- let name = alias.node.name.split('.').last().unwrap();
- if self
- .settings
- .rules
- .enabled(Rule::ConstantImportedAsNonConstant)
- {
+ if let Some(asname) = &alias.asname {
+ let name = alias.name.split('.').last().unwrap();
+ if self.enabled(Rule::ConstantImportedAsNonConstant) {
if let Some(diagnostic) =
pep8_naming::rules::constant_imported_as_non_constant(
name, asname, alias, stmt,
@@ -1024,11 +951,7 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::LowercaseImportedAsNonLowercase)
- {
+ if self.enabled(Rule::LowercaseImportedAsNonLowercase) {
if let Some(diagnostic) =
pep8_naming::rules::lowercase_imported_as_non_lowercase(
name, asname, alias, stmt,
@@ -1038,11 +961,7 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::CamelcaseImportedAsLowercase)
- {
+ if self.enabled(Rule::CamelcaseImportedAsLowercase) {
if let Some(diagnostic) =
pep8_naming::rules::camelcase_imported_as_lowercase(
name, asname, alias, stmt,
@@ -1052,11 +971,7 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::CamelcaseImportedAsConstant)
- {
+ if self.enabled(Rule::CamelcaseImportedAsConstant) {
if let Some(diagnostic) =
pep8_naming::rules::camelcase_imported_as_constant(
name, asname, alias, stmt,
@@ -1066,11 +981,7 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::CamelcaseImportedAsAcronym)
- {
+ if self.enabled(Rule::CamelcaseImportedAsAcronym) {
if let Some(diagnostic) =
pep8_naming::rules::camelcase_imported_as_acronym(
name, asname, alias, stmt,
@@ -1081,12 +992,12 @@ where
}
}
- if self.settings.rules.enabled(Rule::UnconventionalImportAlias) {
+ if self.enabled(Rule::UnconventionalImportAlias) {
if let Some(diagnostic) =
flake8_import_conventions::rules::conventional_import_alias(
stmt,
- &alias.node.name,
- alias.node.asname.as_deref(),
+ &alias.name,
+ alias.asname.as_deref(),
&self.settings.flake8_import_conventions.aliases,
)
{
@@ -1094,12 +1005,12 @@ where
}
}
- if self.settings.rules.enabled(Rule::BannedImportAlias) {
- if let Some(asname) = &alias.node.asname {
+ if self.enabled(Rule::BannedImportAlias) {
+ if let Some(asname) = &alias.asname {
if let Some(diagnostic) =
flake8_import_conventions::rules::banned_import_alias(
stmt,
- &alias.node.name,
+ &alias.name,
asname,
&self.settings.flake8_import_conventions.banned_aliases,
)
@@ -1109,259 +1020,205 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::PytestIncorrectPytestImport)
- {
+ if self.enabled(Rule::PytestIncorrectPytestImport) {
if let Some(diagnostic) = flake8_pytest_style::rules::import(
stmt,
- &alias.node.name,
- alias.node.asname.as_deref(),
+ &alias.name,
+ alias.asname.as_deref(),
) {
self.diagnostics.push(diagnostic);
}
}
}
}
- StmtKind::ImportFrom {
- names,
- module,
- level,
- } => {
- if self
- .settings
- .rules
- .enabled(Rule::ModuleImportNotAtTopOfFile)
- {
+ Stmt::ImportFrom(
+ import_from @ ast::StmtImportFrom {
+ names,
+ module,
+ level,
+ range: _,
+ },
+ ) => {
+ let module = module.as_deref();
+ let level = level.map(|level| level.to_u32());
+ if self.enabled(Rule::ModuleImportNotAtTopOfFile) {
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
}
- if self.settings.rules.enabled(Rule::GlobalStatement) {
+ if self.enabled(Rule::GlobalStatement) {
for name in names.iter() {
- if let Some(asname) = name.node.asname.as_ref() {
+ if let Some(asname) = name.asname.as_ref() {
pylint::rules::global_statement(self, asname);
} else {
- pylint::rules::global_statement(self, &name.node.name);
+ pylint::rules::global_statement(self, &name.name);
}
}
}
- if self.settings.rules.enabled(Rule::UnnecessaryFutureImport)
+ if self.enabled(Rule::UnnecessaryFutureImport)
&& self.settings.target_version >= PythonVersion::Py37
{
- if let Some("__future__") = module.as_deref() {
+ if let Some("__future__") = module {
pyupgrade::rules::unnecessary_future_import(self, stmt, names);
}
}
- if self.settings.rules.enabled(Rule::DeprecatedMockImport) {
+ if self.enabled(Rule::DeprecatedMockImport) {
pyupgrade::rules::deprecated_mock_import(self, stmt);
}
- if self.settings.rules.enabled(Rule::DeprecatedCElementTree) {
+ if self.enabled(Rule::DeprecatedCElementTree) {
pyupgrade::rules::deprecated_c_element_tree(self, stmt);
}
- if self.settings.rules.enabled(Rule::DeprecatedImport) {
- pyupgrade::rules::deprecated_import(
- self,
- stmt,
- names,
- module.as_ref().map(String::as_str),
- *level,
- );
+ if self.enabled(Rule::DeprecatedImport) {
+ pyupgrade::rules::deprecated_import(self, stmt, names, module, level);
}
- if self.settings.rules.enabled(Rule::UnnecessaryBuiltinImport) {
- if let Some(module) = module.as_deref() {
+ if self.enabled(Rule::UnnecessaryBuiltinImport) {
+ if let Some(module) = module {
pyupgrade::rules::unnecessary_builtin_import(self, stmt, module, names);
}
}
-
- if self.settings.rules.enabled(Rule::BannedApi) {
- if let Some(module) = helpers::resolve_imported_module_path(
- *level,
- module.as_deref(),
- self.module_path.as_deref(),
- ) {
- flake8_tidy_imports::banned_api::name_or_parent_is_banned(
- self, &module, stmt,
- );
+ if self.enabled(Rule::BannedApi) {
+ if let Some(module) =
+ helpers::resolve_imported_module_path(level, module, self.module_path)
+ {
+ flake8_tidy_imports::rules::name_or_parent_is_banned(self, &module, stmt);
for alias in names {
- if alias.node.name == "*" {
+ if &alias.name == "*" {
continue;
}
- flake8_tidy_imports::banned_api::name_is_banned(
+ flake8_tidy_imports::rules::name_is_banned(
self,
- format!("{module}.{}", alias.node.name),
+ format!("{module}.{}", alias.name),
alias,
);
}
}
}
- if self
- .settings
- .rules
- .enabled(Rule::PytestIncorrectPytestImport)
- {
+ if self.enabled(Rule::PytestIncorrectPytestImport) {
if let Some(diagnostic) =
- flake8_pytest_style::rules::import_from(stmt, module.as_deref(), *level)
+ flake8_pytest_style::rules::import_from(stmt, module, level)
{
self.diagnostics.push(diagnostic);
}
}
+ if self.is_stub {
+ if self.enabled(Rule::UnaliasedCollectionsAbcSetImport) {
+ flake8_pyi::rules::unaliased_collections_abc_set_import(self, import_from);
+ }
+ }
for alias in names {
- if let Some("__future__") = module.as_deref() {
- let name = alias.node.asname.as_ref().unwrap_or(&alias.node.name);
+ if let Some("__future__") = module {
+ let name = alias.asname.as_ref().unwrap_or(&alias.name);
+
self.add_binding(
name,
- Binding {
- kind: BindingKind::FutureImportation,
- runtime_usage: None,
- // Always mark `__future__` imports as used.
- synthetic_usage: Some((self.ctx.scope_id, alias.range())),
- typing_usage: None,
- range: alias.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ alias.range(),
+ BindingKind::FutureImportation,
+ BindingFlags::empty(),
);
- if alias.node.name == "annotations" {
- self.ctx.annotations_future_enabled = true;
- }
-
- if self.settings.rules.enabled(Rule::FutureFeatureNotDefined) {
+ if self.enabled(Rule::FutureFeatureNotDefined) {
pyflakes::rules::future_feature_not_defined(self, alias);
}
- if self.settings.rules.enabled(Rule::LateFutureImport)
- && !self.ctx.futures_allowed
- {
- self.diagnostics.push(Diagnostic::new(
- pyflakes::rules::LateFutureImport,
- stmt.range(),
- ));
+ if self.enabled(Rule::LateFutureImport) {
+ if self.semantic_model.seen_futures_boundary() {
+ self.diagnostics.push(Diagnostic::new(
+ pyflakes::rules::LateFutureImport,
+ stmt.range(),
+ ));
+ }
}
- } else if alias.node.name == "*" {
- self.ctx.scope_mut().add_star_import(StarImportation {
- module: module.as_ref().map(String::as_str),
- level: *level,
- });
+ } else if &alias.name == "*" {
+ self.semantic_model
+ .scope_mut()
+ .add_star_import(StarImportation { level, module });
- if self
- .settings
- .rules
- .enabled(Rule::UndefinedLocalWithNestedImportStarUsage)
- {
- let scope = self.ctx.scope();
+ if self.enabled(Rule::UndefinedLocalWithNestedImportStarUsage) {
+ let scope = self.semantic_model.scope();
if !matches!(scope.kind, ScopeKind::Module) {
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithNestedImportStarUsage {
- name: helpers::format_import_from(
- *level,
- module.as_deref(),
- ),
+ name: helpers::format_import_from(level, module),
},
stmt.range(),
));
}
}
- if self
- .settings
- .rules
- .enabled(Rule::UndefinedLocalWithImportStar)
- {
+ if self.enabled(Rule::UndefinedLocalWithImportStar) {
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithImportStar {
- name: helpers::format_import_from(*level, module.as_deref()),
+ name: helpers::format_import_from(level, module),
},
stmt.range(),
));
}
} else {
- if let Some(asname) = &alias.node.asname {
- self.check_builtin_shadowing(asname, stmt, false);
+ if let Some(asname) = &alias.asname {
+ if self.enabled(Rule::BuiltinVariableShadowing) {
+ flake8_builtins::rules::builtin_variable_shadowing(
+ self,
+ asname,
+ AnyShadowing::from(stmt),
+ );
+ }
}
- // Treat explicit re-export as usage (e.g., `from .applications
- // import FastAPI as FastAPI`).
- let is_explicit_reexport = alias
- .node
- .asname
- .as_ref()
- .map_or(false, |asname| asname == &alias.node.name);
-
- // Given `from foo import bar`, `name` would be "bar" and `full_name` would
+ // Given `from foo import bar`, `name` would be "bar" and `qualified_name` would
// be "foo.bar". Given `from foo import bar as baz`, `name` would be "baz"
- // and `full_name` would be "foo.bar".
- let name = alias.node.asname.as_ref().unwrap_or(&alias.node.name);
- let full_name = helpers::format_import_from_member(
- *level,
- module.as_deref(),
- &alias.node.name,
- );
+ // and `qualified_name` would be "foo.bar".
+ let name = alias.asname.as_ref().unwrap_or(&alias.name);
+ let qualified_name =
+ helpers::format_import_from_member(level, module, &alias.name);
self.add_binding(
name,
- Binding {
- kind: BindingKind::FromImportation(FromImportation {
- name,
- full_name,
- }),
- runtime_usage: None,
- synthetic_usage: if is_explicit_reexport {
- Some((self.ctx.scope_id, alias.range()))
- } else {
- None
- },
- typing_usage: None,
- range: alias.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
+ alias.range(),
+ BindingKind::FromImportation(FromImportation { qualified_name }),
+ if alias
+ .asname
+ .as_ref()
+ .map_or(false, |asname| asname == &alias.name)
+ {
+ BindingFlags::EXPLICIT_EXPORT
+ } else {
+ BindingFlags::empty()
},
);
}
- if self.settings.rules.enabled(Rule::RelativeImports) {
- if let Some(diagnostic) =
- flake8_tidy_imports::relative_imports::banned_relative_import(
- self,
- stmt,
- *level,
- module.as_deref(),
- self.module_path.as_deref(),
- &self.settings.flake8_tidy_imports.ban_relative_imports,
- )
- {
+ if self.enabled(Rule::RelativeImports) {
+ if let Some(diagnostic) = flake8_tidy_imports::rules::banned_relative_import(
+ self,
+ stmt,
+ level,
+ module,
+ self.module_path,
+ self.settings.flake8_tidy_imports.ban_relative_imports,
+ ) {
self.diagnostics.push(diagnostic);
}
}
// flake8-debugger
- if self.settings.rules.enabled(Rule::Debugger) {
- if let Some(diagnostic) = flake8_debugger::rules::debugger_import(
- stmt,
- module.as_deref(),
- &alias.node.name,
- ) {
+ if self.enabled(Rule::Debugger) {
+ if let Some(diagnostic) =
+ flake8_debugger::rules::debugger_import(stmt, module, &alias.name)
+ {
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.enabled(Rule::UnconventionalImportAlias) {
- let full_name = helpers::format_import_from_member(
- *level,
- module.as_deref(),
- &alias.node.name,
- );
+ if self.enabled(Rule::UnconventionalImportAlias) {
+ let qualified_name =
+ helpers::format_import_from_member(level, module, &alias.name);
if let Some(diagnostic) =
flake8_import_conventions::rules::conventional_import_alias(
stmt,
- &full_name,
- alias.node.asname.as_deref(),
+ &qualified_name,
+ alias.asname.as_deref(),
&self.settings.flake8_import_conventions.aliases,
)
{
@@ -1369,17 +1226,14 @@ where
}
}
- if self.settings.rules.enabled(Rule::BannedImportAlias) {
- if let Some(asname) = &alias.node.asname {
- let full_name = helpers::format_import_from_member(
- *level,
- module.as_deref(),
- &alias.node.name,
- );
+ if self.enabled(Rule::BannedImportAlias) {
+ if let Some(asname) = &alias.asname {
+ let qualified_name =
+ helpers::format_import_from_member(level, module, &alias.name);
if let Some(diagnostic) =
flake8_import_conventions::rules::banned_import_alias(
stmt,
- &full_name,
+ &qualified_name,
asname,
&self.settings.flake8_import_conventions.banned_aliases,
)
@@ -1389,15 +1243,11 @@ where
}
}
- if let Some(asname) = &alias.node.asname {
- if self
- .settings
- .rules
- .enabled(Rule::ConstantImportedAsNonConstant)
- {
+ if let Some(asname) = &alias.asname {
+ if self.enabled(Rule::ConstantImportedAsNonConstant) {
if let Some(diagnostic) =
pep8_naming::rules::constant_imported_as_non_constant(
- &alias.node.name,
+ &alias.name,
asname,
alias,
stmt,
@@ -1407,14 +1257,10 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::LowercaseImportedAsNonLowercase)
- {
+ if self.enabled(Rule::LowercaseImportedAsNonLowercase) {
if let Some(diagnostic) =
pep8_naming::rules::lowercase_imported_as_non_lowercase(
- &alias.node.name,
+ &alias.name,
asname,
alias,
stmt,
@@ -1424,14 +1270,10 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::CamelcaseImportedAsLowercase)
- {
+ if self.enabled(Rule::CamelcaseImportedAsLowercase) {
if let Some(diagnostic) =
pep8_naming::rules::camelcase_imported_as_lowercase(
- &alias.node.name,
+ &alias.name,
asname,
alias,
stmt,
@@ -1441,14 +1283,10 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::CamelcaseImportedAsConstant)
- {
+ if self.enabled(Rule::CamelcaseImportedAsConstant) {
if let Some(diagnostic) =
pep8_naming::rules::camelcase_imported_as_constant(
- &alias.node.name,
+ &alias.name,
asname,
alias,
stmt,
@@ -1458,14 +1296,10 @@ where
}
}
- if self
- .settings
- .rules
- .enabled(Rule::CamelcaseImportedAsAcronym)
- {
+ if self.enabled(Rule::CamelcaseImportedAsAcronym) {
if let Some(diagnostic) =
pep8_naming::rules::camelcase_imported_as_acronym(
- &alias.node.name,
+ &alias.name,
asname,
alias,
stmt,
@@ -1477,46 +1311,43 @@ where
// pylint
if !self.is_stub {
- if self.settings.rules.enabled(Rule::UselessImportAlias) {
+ if self.enabled(Rule::UselessImportAlias) {
pylint::rules::useless_import_alias(self, alias);
}
}
}
}
- if self.settings.rules.enabled(Rule::ImportSelf) {
- if let Some(diagnostic) = pylint::rules::import_from_self(
- *level,
- module.as_deref(),
- names,
- self.module_path.as_deref(),
- ) {
+ if self.enabled(Rule::ImportSelf) {
+ if let Some(diagnostic) =
+ pylint::rules::import_from_self(level, module, names, self.module_path)
+ {
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.enabled(Rule::BannedImportFrom) {
+ if self.enabled(Rule::BannedImportFrom) {
if let Some(diagnostic) = flake8_import_conventions::rules::banned_import_from(
stmt,
- &helpers::format_import_from(*level, module.as_deref()),
+ &helpers::format_import_from(level, module),
&self.settings.flake8_import_conventions.banned_from,
) {
self.diagnostics.push(diagnostic);
}
}
}
- StmtKind::Raise { exc, .. } => {
- if self.settings.rules.enabled(Rule::RaiseNotImplemented) {
+ Stmt::Raise(ast::StmtRaise { exc, .. }) => {
+ if self.enabled(Rule::RaiseNotImplemented) {
if let Some(expr) = exc {
pyflakes::rules::raise_not_implemented(self, expr);
}
}
- if self.settings.rules.enabled(Rule::CannotRaiseLiteral) {
+ if self.enabled(Rule::CannotRaiseLiteral) {
if let Some(exc) = exc {
flake8_bugbear::rules::cannot_raise_literal(self, exc);
}
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::RawStringInException,
Rule::FStringInException,
Rule::DotFormatInException,
@@ -1525,112 +1356,105 @@ where
flake8_errmsg::rules::string_in_exception(self, stmt, exc);
}
}
- if self.settings.rules.enabled(Rule::OSErrorAlias) {
+ if self.enabled(Rule::OSErrorAlias) {
if let Some(item) = exc {
pyupgrade::rules::os_error_alias_raise(self, item);
}
}
- if self.settings.rules.enabled(Rule::RaiseVanillaClass) {
+ if self.enabled(Rule::RaiseVanillaClass) {
if let Some(expr) = exc {
tryceratops::rules::raise_vanilla_class(self, expr);
}
}
- if self.settings.rules.enabled(Rule::RaiseVanillaArgs) {
+ if self.enabled(Rule::RaiseVanillaArgs) {
if let Some(expr) = exc {
tryceratops::rules::raise_vanilla_args(self, expr);
}
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryParenOnRaiseException)
- {
+ if self.enabled(Rule::UnnecessaryParenOnRaiseException) {
if let Some(expr) = exc {
flake8_raise::rules::unnecessary_paren_on_raise_exception(self, expr);
}
}
}
- StmtKind::AugAssign { target, .. } => {
+ Stmt::AugAssign(ast::StmtAugAssign { target, .. }) => {
self.handle_node_load(target);
- if self.settings.rules.enabled(Rule::GlobalStatement) {
- if let ExprKind::Name { id, .. } = &target.node {
+ if self.enabled(Rule::GlobalStatement) {
+ if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
pylint::rules::global_statement(self, id);
}
}
}
- StmtKind::If { test, body, orelse } => {
- if self.settings.rules.enabled(Rule::IfTuple) {
+ Stmt::If(ast::StmtIf {
+ test,
+ body,
+ orelse,
+ range: _,
+ }) => {
+ if self.enabled(Rule::IfTuple) {
pyflakes::rules::if_tuple(self, stmt, test);
}
- if self.settings.rules.enabled(Rule::CollapsibleIf) {
+ if self.enabled(Rule::CollapsibleIf) {
flake8_simplify::rules::nested_if_statements(
self,
stmt,
test,
body,
orelse,
- self.ctx.current_stmt_parent().map(Into::into),
+ self.semantic_model.stmt_parent(),
);
}
- if self.settings.rules.enabled(Rule::IfWithSameArms) {
+ if self.enabled(Rule::IfWithSameArms) {
flake8_simplify::rules::if_with_same_arms(
self,
stmt,
- self.ctx.current_stmt_parent().map(Into::into),
+ self.semantic_model.stmt_parent(),
);
}
- if self.settings.rules.enabled(Rule::NeedlessBool) {
+ if self.enabled(Rule::NeedlessBool) {
flake8_simplify::rules::needless_bool(self, stmt);
}
- if self
- .settings
- .rules
- .enabled(Rule::IfElseBlockInsteadOfDictLookup)
- {
+ if self.enabled(Rule::IfElseBlockInsteadOfDictLookup) {
flake8_simplify::rules::manual_dict_lookup(
self,
stmt,
test,
body,
orelse,
- self.ctx.current_stmt_parent().map(Into::into),
+ self.semantic_model.stmt_parent(),
);
}
- if self.settings.rules.enabled(Rule::IfElseBlockInsteadOfIfExp) {
+ if self.enabled(Rule::IfElseBlockInsteadOfIfExp) {
flake8_simplify::rules::use_ternary_operator(
self,
stmt,
- self.ctx.current_stmt_parent().map(Into::into),
+ self.semantic_model.stmt_parent(),
);
}
- if self
- .settings
- .rules
- .enabled(Rule::IfElseBlockInsteadOfDictGet)
- {
+ if self.enabled(Rule::IfElseBlockInsteadOfDictGet) {
flake8_simplify::rules::use_dict_get_with_default(
self,
stmt,
test,
body,
orelse,
- self.ctx.current_stmt_parent().map(Into::into),
+ self.semantic_model.stmt_parent(),
);
}
- if self.settings.rules.enabled(Rule::TypeCheckWithoutTypeError) {
+ if self.enabled(Rule::TypeCheckWithoutTypeError) {
tryceratops::rules::type_check_without_type_error(
self,
body,
test,
orelse,
- self.ctx.current_stmt_parent().map(Into::into),
+ self.semantic_model.stmt_parent(),
);
}
- if self.settings.rules.enabled(Rule::OutdatedVersionBlock) {
+ if self.enabled(Rule::OutdatedVersionBlock) {
pyupgrade::rules::outdated_version_block(self, stmt, test, body, orelse);
}
- if self.settings.rules.enabled(Rule::CollapsibleElseIf) {
+ if self.enabled(Rule::CollapsibleElseIf) {
if let Some(diagnostic) =
pylint::rules::collapsible_else_if(orelse, self.locator)
{
@@ -1638,23 +1462,27 @@ where
}
}
}
- StmtKind::Assert { test, msg } => {
- if !self.ctx.in_type_checking_block {
- if self.settings.rules.enabled(Rule::Assert) {
+ Stmt::Assert(ast::StmtAssert {
+ test,
+ msg,
+ range: _,
+ }) => {
+ if !self.semantic_model.in_type_checking_block() {
+ if self.enabled(Rule::Assert) {
self.diagnostics
.push(flake8_bandit::rules::assert_used(stmt));
}
}
- if self.settings.rules.enabled(Rule::AssertTuple) {
+ if self.enabled(Rule::AssertTuple) {
pyflakes::rules::assert_tuple(self, stmt, test);
}
- if self.settings.rules.enabled(Rule::AssertFalse) {
+ if self.enabled(Rule::AssertFalse) {
flake8_bugbear::rules::assert_false(self, stmt, test, msg.as_deref());
}
- if self.settings.rules.enabled(Rule::PytestAssertAlwaysFalse) {
+ if self.enabled(Rule::PytestAssertAlwaysFalse) {
flake8_pytest_style::rules::assert_falsy(self, stmt, test);
}
- if self.settings.rules.enabled(Rule::PytestCompositeAssertion) {
+ if self.enabled(Rule::PytestCompositeAssertion) {
flake8_pytest_style::rules::composite_condition(
self,
stmt,
@@ -1662,243 +1490,244 @@ where
msg.as_deref(),
);
}
- if self.settings.rules.enabled(Rule::AssertOnStringLiteral) {
+ if self.enabled(Rule::AssertOnStringLiteral) {
pylint::rules::assert_on_string_literal(self, test);
}
+ if self.enabled(Rule::InvalidMockAccess) {
+ pygrep_hooks::rules::non_existent_mock_method(self, test);
+ }
}
- StmtKind::With { items, body, .. } => {
- if self.settings.rules.enabled(Rule::AssertRaisesException) {
+ Stmt::With(ast::StmtWith { items, body, .. }) => {
+ if self.enabled(Rule::AssertRaisesException) {
flake8_bugbear::rules::assert_raises_exception(self, stmt, items);
}
- if self
- .settings
- .rules
- .enabled(Rule::PytestRaisesWithMultipleStatements)
- {
+ if self.enabled(Rule::PytestRaisesWithMultipleStatements) {
flake8_pytest_style::rules::complex_raises(self, stmt, items, body);
}
- if self.settings.rules.enabled(Rule::MultipleWithStatements) {
+ if self.enabled(Rule::MultipleWithStatements) {
flake8_simplify::rules::multiple_with_statements(
self,
stmt,
body,
- self.ctx.current_stmt_parent().map(Into::into),
+ self.semantic_model.stmt_parent(),
);
}
- if self.settings.rules.enabled(Rule::RedefinedLoopName) {
+ if self.enabled(Rule::RedefinedLoopName) {
pylint::rules::redefined_loop_name(self, &Node::Stmt(stmt));
}
}
- StmtKind::While { body, orelse, .. } => {
- if self.settings.rules.enabled(Rule::FunctionUsesLoopVariable) {
+ Stmt::While(ast::StmtWhile { body, orelse, .. }) => {
+ if self.enabled(Rule::FunctionUsesLoopVariable) {
flake8_bugbear::rules::function_uses_loop_variable(self, &Node::Stmt(stmt));
}
- if self.settings.rules.enabled(Rule::UselessElseOnLoop) {
+ if self.enabled(Rule::UselessElseOnLoop) {
pylint::rules::useless_else_on_loop(self, stmt, body, orelse);
}
}
- StmtKind::For {
+ Stmt::For(ast::StmtFor {
target,
body,
iter,
orelse,
..
- }
- | StmtKind::AsyncFor {
+ })
+ | Stmt::AsyncFor(ast::StmtAsyncFor {
target,
body,
iter,
orelse,
..
- } => {
- if self.settings.rules.enabled(Rule::UnusedLoopControlVariable) {
- self.deferred
- .for_loops
- .push((stmt, (self.ctx.scope_id, self.ctx.parents.clone())));
+ }) => {
+ if self.enabled(Rule::UnusedLoopControlVariable) {
+ self.deferred.for_loops.push(self.semantic_model.snapshot());
}
- if self
- .settings
- .rules
- .enabled(Rule::LoopVariableOverridesIterator)
- {
+ if self.enabled(Rule::LoopVariableOverridesIterator) {
flake8_bugbear::rules::loop_variable_overrides_iterator(self, target, iter);
}
- if self.settings.rules.enabled(Rule::FunctionUsesLoopVariable) {
+ if self.enabled(Rule::FunctionUsesLoopVariable) {
flake8_bugbear::rules::function_uses_loop_variable(self, &Node::Stmt(stmt));
}
- if self.settings.rules.enabled(Rule::ReuseOfGroupbyGenerator) {
+ if self.enabled(Rule::ReuseOfGroupbyGenerator) {
flake8_bugbear::rules::reuse_of_groupby_generator(self, target, body, iter);
}
- if self.settings.rules.enabled(Rule::UselessElseOnLoop) {
+ if self.enabled(Rule::UselessElseOnLoop) {
pylint::rules::useless_else_on_loop(self, stmt, body, orelse);
}
- if self.settings.rules.enabled(Rule::RedefinedLoopName) {
+ if self.enabled(Rule::RedefinedLoopName) {
pylint::rules::redefined_loop_name(self, &Node::Stmt(stmt));
}
- if matches!(stmt.node, StmtKind::For { .. }) {
- if self.settings.rules.enabled(Rule::ReimplementedBuiltin) {
+ if self.enabled(Rule::IterationOverSet) {
+ pylint::rules::iteration_over_set(self, iter);
+ }
+ if stmt.is_for_stmt() {
+ if self.enabled(Rule::ReimplementedBuiltin) {
flake8_simplify::rules::convert_for_loop_to_any_all(
self,
stmt,
- self.ctx.current_sibling_stmt(),
+ self.semantic_model.sibling_stmt(),
);
}
- if self.settings.rules.enabled(Rule::InDictKeys) {
+ if self.enabled(Rule::InDictKeys) {
flake8_simplify::rules::key_in_dict_for(self, target, iter);
}
}
}
- StmtKind::Try {
+ Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
- ..
- }
- | StmtKind::TryStar {
+ range: _,
+ })
+ | Stmt::TryStar(ast::StmtTryStar {
body,
handlers,
orelse,
finalbody,
- ..
- } => {
- if self.settings.rules.enabled(Rule::DefaultExceptNotLast) {
+ range: _,
+ }) => {
+ if self.enabled(Rule::DefaultExceptNotLast) {
if let Some(diagnostic) =
pyflakes::rules::default_except_not_last(handlers, self.locator)
{
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::DuplicateHandlerException,
Rule::DuplicateTryBlockException,
]) {
flake8_bugbear::rules::duplicate_exceptions(self, handlers);
}
- if self
- .settings
- .rules
- .enabled(Rule::RedundantTupleInExceptionHandler)
- {
+ if self.enabled(Rule::RedundantTupleInExceptionHandler) {
flake8_bugbear::rules::redundant_tuple_in_exception_handler(self, handlers);
}
- if self.settings.rules.enabled(Rule::OSErrorAlias) {
+ if self.enabled(Rule::OSErrorAlias) {
pyupgrade::rules::os_error_alias_handlers(self, handlers);
}
- if self.settings.rules.enabled(Rule::PytestAssertInExcept) {
+ if self.enabled(Rule::PytestAssertInExcept) {
self.diagnostics.extend(
flake8_pytest_style::rules::assert_in_exception_handler(handlers),
);
}
- if self.settings.rules.enabled(Rule::SuppressibleException) {
+ if self.enabled(Rule::SuppressibleException) {
flake8_simplify::rules::suppressible_exception(
self, stmt, body, handlers, orelse, finalbody,
);
}
- if self.settings.rules.enabled(Rule::ReturnInTryExceptFinally) {
+ if self.enabled(Rule::ReturnInTryExceptFinally) {
flake8_simplify::rules::return_in_try_except_finally(
self, body, handlers, finalbody,
);
}
- if self.settings.rules.enabled(Rule::TryConsiderElse) {
+ if self.enabled(Rule::TryConsiderElse) {
tryceratops::rules::try_consider_else(self, body, orelse, handlers);
}
- if self.settings.rules.enabled(Rule::VerboseRaise) {
+ if self.enabled(Rule::VerboseRaise) {
tryceratops::rules::verbose_raise(self, handlers);
}
- if self.settings.rules.enabled(Rule::VerboseLogMessage) {
+ if self.enabled(Rule::VerboseLogMessage) {
tryceratops::rules::verbose_log_message(self, handlers);
}
- if self.settings.rules.enabled(Rule::RaiseWithinTry) {
- tryceratops::rules::raise_within_try(self, body);
+ if self.enabled(Rule::RaiseWithinTry) {
+ tryceratops::rules::raise_within_try(self, body, handlers);
+ }
+ if self.enabled(Rule::UselessTryExcept) {
+ tryceratops::rules::useless_try_except(self, handlers);
}
- if self.settings.rules.enabled(Rule::ErrorInsteadOfException) {
+ if self.enabled(Rule::ErrorInsteadOfException) {
tryceratops::rules::error_instead_of_exception(self, handlers);
}
}
- StmtKind::Assign { targets, value, .. } => {
- if self.settings.rules.enabled(Rule::LambdaAssignment) {
+ Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
+ if self.enabled(Rule::LambdaAssignment) {
if let [target] = &targets[..] {
pycodestyle::rules::lambda_assignment(self, target, value, None, stmt);
}
}
-
- if self.settings.rules.enabled(Rule::AssignmentToOsEnviron) {
+ if self.enabled(Rule::AssignmentToOsEnviron) {
flake8_bugbear::rules::assignment_to_os_environ(self, targets);
}
-
- if self.settings.rules.enabled(Rule::HardcodedPasswordString) {
+ if self.enabled(Rule::HardcodedPasswordString) {
if let Some(diagnostic) =
flake8_bandit::rules::assign_hardcoded_password_string(value, targets)
{
self.diagnostics.push(diagnostic);
}
}
-
- if self.settings.rules.enabled(Rule::GlobalStatement) {
+ if self.enabled(Rule::GlobalStatement) {
for target in targets.iter() {
- if let ExprKind::Name { id, .. } = &target.node {
+ if let Expr::Name(ast::ExprName { id, .. }) = target {
pylint::rules::global_statement(self, id);
}
}
}
-
- if self.settings.rules.enabled(Rule::UselessMetaclassType) {
+ if self.enabled(Rule::UselessMetaclassType) {
pyupgrade::rules::useless_metaclass_type(self, stmt, value, targets);
}
- if self
- .settings
- .rules
- .enabled(Rule::ConvertTypedDictFunctionalToClass)
- {
+ if self.enabled(Rule::ConvertTypedDictFunctionalToClass) {
pyupgrade::rules::convert_typed_dict_functional_to_class(
self, stmt, targets, value,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::ConvertNamedTupleFunctionalToClass)
- {
+ if self.enabled(Rule::ConvertNamedTupleFunctionalToClass) {
pyupgrade::rules::convert_named_tuple_functional_to_class(
self, stmt, targets, value,
);
}
- if self.settings.rules.enabled(Rule::UnpackedListComprehension) {
+ if self.enabled(Rule::UnpackedListComprehension) {
pyupgrade::rules::unpacked_list_comprehension(self, targets, value);
}
-
- if self.settings.rules.enabled(Rule::PandasDfVariableName) {
+ if self.enabled(Rule::PandasDfVariableName) {
if let Some(diagnostic) = pandas_vet::rules::assignment_to_df(targets) {
self.diagnostics.push(diagnostic);
}
}
-
- if self.is_stub {
- if self
- .settings
- .rules
- .any_enabled(&[Rule::UnprefixedTypeParam, Rule::AssignmentDefaultInStub])
+ if self
+ .settings
+ .rules
+ .enabled(Rule::AirflowVariableNameTaskIdMismatch)
+ {
+ if let Some(diagnostic) =
+ airflow::rules::variable_name_task_id(self, targets, value)
{
+ self.diagnostics.push(diagnostic);
+ }
+ }
+ if self.is_stub {
+ if self.any_enabled(&[
+ Rule::UnprefixedTypeParam,
+ Rule::AssignmentDefaultInStub,
+ Rule::UnannotatedAssignmentInStub,
+ ]) {
// Ignore assignments in function bodies; those are covered by other rules.
- if !self.ctx.scopes().any(|scope| scope.kind.is_function()) {
- if self.settings.rules.enabled(Rule::UnprefixedTypeParam) {
+ if !self
+ .semantic_model
+ .scopes()
+ .any(|scope| scope.kind.is_any_function())
+ {
+ if self.enabled(Rule::UnprefixedTypeParam) {
flake8_pyi::rules::prefix_type_params(self, value, targets);
}
- if self.settings.rules.enabled(Rule::AssignmentDefaultInStub) {
+ if self.enabled(Rule::AssignmentDefaultInStub) {
flake8_pyi::rules::assignment_default_in_stub(self, targets, value);
}
+ if self.enabled(Rule::UnannotatedAssignmentInStub) {
+ flake8_pyi::rules::unannotated_assignment_in_stub(
+ self, targets, value,
+ );
+ }
}
}
}
}
- StmtKind::AnnAssign {
+ Stmt::AnnAssign(ast::StmtAnnAssign {
target,
value,
annotation,
..
- } => {
- if self.settings.rules.enabled(Rule::LambdaAssignment) {
+ }) => {
+ if self.enabled(Rule::LambdaAssignment) {
if let Some(value) = value {
pycodestyle::rules::lambda_assignment(
self,
@@ -1909,11 +1738,7 @@ where
);
}
}
- if self
- .settings
- .rules
- .enabled(Rule::UnintentionalTypeAnnotation)
- {
+ if self.enabled(Rule::UnintentionalTypeAnnotation) {
flake8_bugbear::rules::unintentional_type_annotation(
self,
target,
@@ -1923,44 +1748,63 @@ where
}
if self.is_stub {
if let Some(value) = value {
- if self.settings.rules.enabled(Rule::AssignmentDefaultInStub) {
+ if self.enabled(Rule::AssignmentDefaultInStub) {
// Ignore assignments in function bodies; those are covered by other rules.
- if !self.ctx.scopes().any(|scope| scope.kind.is_function()) {
+ if !self
+ .semantic_model
+ .scopes()
+ .any(|scope| scope.kind.is_any_function())
+ {
flake8_pyi::rules::annotated_assignment_default_in_stub(
self, target, value, annotation,
);
}
}
+ } else {
+ if self.enabled(Rule::UnassignedSpecialVariableInStub) {
+ flake8_pyi::rules::unassigned_special_variable_in_stub(
+ self, target, stmt,
+ );
+ }
}
- if self.ctx.match_typing_expr(annotation, "TypeAlias") {
- if self.settings.rules.enabled(Rule::SnakeCaseTypeAlias) {
+ if self
+ .semantic_model
+ .match_typing_expr(annotation, "TypeAlias")
+ {
+ if self.enabled(Rule::SnakeCaseTypeAlias) {
flake8_pyi::rules::snake_case_type_alias(self, target);
}
- if self.settings.rules.enabled(Rule::TSuffixedTypeAlias) {
+ if self.enabled(Rule::TSuffixedTypeAlias) {
flake8_pyi::rules::t_suffixed_type_alias(self, target);
}
}
}
}
- StmtKind::Delete { targets } => {
- if self.settings.rules.enabled(Rule::GlobalStatement) {
+ Stmt::Delete(ast::StmtDelete { targets, range: _ }) => {
+ if self.enabled(Rule::GlobalStatement) {
for target in targets.iter() {
- if let ExprKind::Name { id, .. } = &target.node {
+ if let Expr::Name(ast::ExprName { id, .. }) = target {
pylint::rules::global_statement(self, id);
}
}
}
}
- StmtKind::Expr { value, .. } => {
- if self.settings.rules.enabled(Rule::UselessComparison) {
+ Stmt::Expr(ast::StmtExpr { value, range: _ }) => {
+ if self.enabled(Rule::UselessComparison) {
flake8_bugbear::rules::useless_comparison(self, value);
}
- if self.settings.rules.enabled(Rule::UselessExpression) {
+ if self.enabled(Rule::UselessExpression) {
flake8_bugbear::rules::useless_expression(self, value);
}
- if self.settings.rules.enabled(Rule::AsyncioDanglingTask) {
+ if self.enabled(Rule::InvalidMockAccess) {
+ pygrep_hooks::rules::uncalled_mock_method(self, value);
+ }
+ if self.enabled(Rule::NamedExprWithoutContext) {
+ pylint::rules::named_expr_without_context(self, value);
+ }
+ if self.enabled(Rule::AsyncioDanglingTask) {
if let Some(diagnostic) = ruff::rules::asyncio_dangling_task(value, |expr| {
- self.ctx.resolve_call_path(expr)
+ self.semantic_model.resolve_call_path(expr)
}) {
self.diagnostics.push(diagnostic);
}
@@ -1970,197 +1814,224 @@ where
}
// Recurse.
- let prev_in_exception_handler = self.ctx.in_exception_handler;
- let prev_visible_scope = self.ctx.visible_scope;
- match &stmt.node {
- StmtKind::FunctionDef {
+ match stmt {
+ Stmt::FunctionDef(ast::StmtFunctionDef {
body,
name,
args,
decorator_list,
+ returns,
..
- }
- | StmtKind::AsyncFunctionDef {
+ })
+ | Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef {
body,
name,
args,
decorator_list,
+ returns,
..
- } => {
- if self.settings.rules.enabled(Rule::FStringDocstring) {
- flake8_bugbear::rules::f_string_docstring(self, body);
- }
- let definition = docstrings::extraction::extract(
- self.ctx.visible_scope,
- stmt,
- body,
- Documentable::Function,
- );
- if self.settings.rules.enabled(Rule::YieldInForLoop) {
- pyupgrade::rules::yield_in_for_loop(self, stmt);
+ }) => {
+ // Visit the decorators and arguments, but avoid the body, which will be
+ // deferred.
+ for expr in decorator_list {
+ self.visit_expr(expr);
}
- let scope = transition_scope(self.ctx.visible_scope, stmt, Documentable::Function);
- self.deferred.definitions.push((
- definition,
- scope.visibility,
- (self.ctx.scope_id, self.ctx.parents.clone()),
- ));
- self.ctx.visible_scope = scope;
- // If any global bindings don't already exist in the global scope, add it.
- let globals = helpers::extract_globals(body);
- for (name, stmt) in helpers::extract_globals(body) {
- if self
- .ctx
- .global_scope()
- .get(name)
- .map_or(true, |index| self.ctx.bindings[*index].kind.is_annotation())
- {
- let id = self.ctx.bindings.push(Binding {
- kind: BindingKind::Assignment,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: stmt.range(),
- source: Some(RefEquality(stmt)),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- });
- self.ctx.global_scope_mut().add(name, id);
+ // Function annotations are always evaluated at runtime, unless future annotations
+ // are enabled.
+ let runtime_annotation = !self.semantic_model.future_annotations();
+
+ for arg in &args.posonlyargs {
+ if let Some(expr) = &arg.annotation {
+ if runtime_annotation {
+ self.visit_type_definition(expr);
+ } else {
+ self.visit_annotation(expr);
+ };
+ }
+ }
+ for arg in &args.args {
+ if let Some(expr) = &arg.annotation {
+ if runtime_annotation {
+ self.visit_type_definition(expr);
+ } else {
+ self.visit_annotation(expr);
+ };
+ }
+ }
+ if let Some(arg) = &args.vararg {
+ if let Some(expr) = &arg.annotation {
+ if runtime_annotation {
+ self.visit_type_definition(expr);
+ } else {
+ self.visit_annotation(expr);
+ };
+ }
+ }
+ for arg in &args.kwonlyargs {
+ if let Some(expr) = &arg.annotation {
+ if runtime_annotation {
+ self.visit_type_definition(expr);
+ } else {
+ self.visit_annotation(expr);
+ };
}
}
+ if let Some(arg) = &args.kwarg {
+ if let Some(expr) = &arg.annotation {
+ if runtime_annotation {
+ self.visit_type_definition(expr);
+ } else {
+ self.visit_annotation(expr);
+ };
+ }
+ }
+ for expr in returns {
+ if runtime_annotation {
+ self.visit_type_definition(expr);
+ } else {
+ self.visit_annotation(expr);
+ };
+ }
+ for expr in &args.kw_defaults {
+ self.visit_expr(expr);
+ }
+ for expr in &args.defaults {
+ self.visit_expr(expr);
+ }
- self.ctx.push_scope(ScopeKind::Function(FunctionDef {
+ self.add_binding(
name,
- body,
- args,
- decorator_list,
- async_: matches!(stmt.node, StmtKind::AsyncFunctionDef { .. }),
- globals,
- }));
+ stmt.range(),
+ BindingKind::FunctionDefinition,
+ BindingFlags::empty(),
+ );
- self.deferred.functions.push((
- stmt,
- (self.ctx.scope_id, self.ctx.parents.clone()),
- self.ctx.visible_scope,
- ));
- }
- StmtKind::ClassDef {
- body,
- name,
- bases,
- keywords,
- decorator_list,
- ..
- } => {
- if self.settings.rules.enabled(Rule::FStringDocstring) {
- flake8_bugbear::rules::f_string_docstring(self, body);
- }
- let definition = docstrings::extraction::extract(
- self.ctx.visible_scope,
+ let definition = docstrings::extraction::extract_definition(
+ ExtractionTarget::Function,
stmt,
- body,
- Documentable::Class,
+ self.semantic_model.definition_id,
+ &self.semantic_model.definitions,
);
- let scope = transition_scope(self.ctx.visible_scope, stmt, Documentable::Class);
- self.deferred.definitions.push((
- definition,
- scope.visibility,
- (self.ctx.scope_id, self.ctx.parents.clone()),
- ));
- self.ctx.visible_scope = scope;
+ self.semantic_model.push_definition(definition);
- // If any global bindings don't already exist in the global scope, add it.
- let globals = helpers::extract_globals(body);
- for (name, stmt) in &globals {
- if self
- .ctx
- .global_scope()
- .get(name)
- .map_or(true, |index| self.ctx.bindings[*index].kind.is_annotation())
- {
- let id = self.ctx.bindings.push(Binding {
- kind: BindingKind::Assignment,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: stmt.range(),
- source: Some(RefEquality(stmt)),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- });
- self.ctx.global_scope_mut().add(name, id);
- }
- }
+ self.semantic_model.push_scope(match &stmt {
+ Stmt::FunctionDef(stmt) => ScopeKind::Function(stmt),
+ Stmt::AsyncFunctionDef(stmt) => ScopeKind::AsyncFunction(stmt),
+ _ => unreachable!("Expected Stmt::FunctionDef | Stmt::AsyncFunctionDef"),
+ });
- self.ctx.push_scope(ScopeKind::Class(ClassDef {
- name,
+ self.deferred.functions.push(self.semantic_model.snapshot());
+
+ // Extract any global bindings from the function body.
+ if let Some(globals) = Globals::from_body(body) {
+ self.semantic_model.set_globals(globals);
+ }
+ }
+ Stmt::ClassDef(
+ class_def @ ast::StmtClassDef {
+ body,
bases,
keywords,
decorator_list,
- globals,
- }));
+ ..
+ },
+ ) => {
+ for expr in bases {
+ self.visit_expr(expr);
+ }
+ for keyword in keywords {
+ self.visit_keyword(keyword);
+ }
+ for expr in decorator_list {
+ self.visit_expr(expr);
+ }
+
+ let definition = docstrings::extraction::extract_definition(
+ ExtractionTarget::Class,
+ stmt,
+ self.semantic_model.definition_id,
+ &self.semantic_model.definitions,
+ );
+ self.semantic_model.push_definition(definition);
+
+ self.semantic_model.push_scope(ScopeKind::Class(class_def));
+
+ // Extract any global bindings from the class body.
+ if let Some(globals) = Globals::from_body(body) {
+ self.semantic_model.set_globals(globals);
+ }
self.visit_body(body);
}
- StmtKind::Try {
+ Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
- }
- | StmtKind::TryStar {
+ range: _,
+ })
+ | Stmt::TryStar(ast::StmtTryStar {
body,
handlers,
orelse,
finalbody,
- } => {
+ range: _,
+ }) => {
let mut handled_exceptions = Exceptions::empty();
for type_ in extract_handled_exceptions(handlers) {
- if let Some(call_path) = self.ctx.resolve_call_path(type_) {
- if call_path.as_slice() == ["", "NameError"] {
- handled_exceptions |= Exceptions::NAME_ERROR;
- } else if call_path.as_slice() == ["", "ModuleNotFoundError"] {
- handled_exceptions |= Exceptions::MODULE_NOT_FOUND_ERROR;
+ if let Some(call_path) = self.semantic_model.resolve_call_path(type_) {
+ match call_path.as_slice() {
+ ["", "NameError"] => {
+ handled_exceptions |= Exceptions::NAME_ERROR;
+ }
+ ["", "ModuleNotFoundError"] => {
+ handled_exceptions |= Exceptions::MODULE_NOT_FOUND_ERROR;
+ }
+ ["", "ImportError"] => {
+ handled_exceptions |= Exceptions::IMPORT_ERROR;
+ }
+ _ => {}
}
}
}
- self.ctx.handled_exceptions.push(handled_exceptions);
+ self.semantic_model
+ .handled_exceptions
+ .push(handled_exceptions);
- if self.settings.rules.enabled(Rule::JumpStatementInFinally) {
+ if self.enabled(Rule::JumpStatementInFinally) {
flake8_bugbear::rules::jump_statement_in_finally(self, finalbody);
}
- if self.settings.rules.enabled(Rule::ContinueInFinally) {
+ if self.enabled(Rule::ContinueInFinally) {
if self.settings.target_version <= PythonVersion::Py38 {
pylint::rules::continue_in_finally(self, finalbody);
}
}
self.visit_body(body);
- self.ctx.handled_exceptions.pop();
+ self.semantic_model.handled_exceptions.pop();
- self.ctx.in_exception_handler = true;
+ self.semantic_model.flags |= SemanticModelFlags::EXCEPTION_HANDLER;
for excepthandler in handlers {
self.visit_excepthandler(excepthandler);
}
- self.ctx.in_exception_handler = prev_in_exception_handler;
self.visit_body(orelse);
self.visit_body(finalbody);
}
- StmtKind::AnnAssign {
+ Stmt::AnnAssign(ast::StmtAnnAssign {
target,
annotation,
value,
..
- } => {
+ }) => {
// If we're in a class or module scope, then the annotation needs to be
// available at runtime.
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
- let runtime_annotation = if self.ctx.annotations_future_enabled {
- if matches!(self.ctx.scope().kind, ScopeKind::Class(..)) {
+ let runtime_annotation = if self.semantic_model.future_annotations() {
+ if self.semantic_model.scope().kind.is_class() {
let baseclasses = &self
.settings
.flake8_type_checking
@@ -2170,7 +2041,7 @@ where
.flake8_type_checking
.runtime_evaluated_decorators;
flake8_type_checking::helpers::runtime_evaluated(
- &self.ctx,
+ &self.semantic_model,
baseclasses,
decorators,
)
@@ -2179,48 +2050,68 @@ where
}
} else {
matches!(
- self.ctx.scope().kind,
- ScopeKind::Class(..) | ScopeKind::Module
+ self.semantic_model.scope().kind,
+ ScopeKind::Class(_) | ScopeKind::Module
)
};
if runtime_annotation {
- visit_type_definition!(self, annotation);
+ self.visit_type_definition(annotation);
} else {
self.visit_annotation(annotation);
}
if let Some(expr) = value {
- if self.ctx.match_typing_expr(annotation, "TypeAlias") {
- visit_type_definition!(self, expr);
+ if self
+ .semantic_model
+ .match_typing_expr(annotation, "TypeAlias")
+ {
+ self.visit_type_definition(expr);
} else {
self.visit_expr(expr);
}
}
self.visit_expr(target);
}
- StmtKind::Assert { test, msg } => {
- visit_boolean_test!(self, test);
+ Stmt::Assert(ast::StmtAssert {
+ test,
+ msg,
+ range: _,
+ }) => {
+ self.visit_boolean_test(test);
if let Some(expr) = msg {
self.visit_expr(expr);
}
}
- StmtKind::While { test, body, orelse } => {
- visit_boolean_test!(self, test);
+ Stmt::While(ast::StmtWhile {
+ test,
+ body,
+ orelse,
+ range: _,
+ }) => {
+ self.visit_boolean_test(test);
self.visit_body(body);
self.visit_body(orelse);
}
- StmtKind::If { test, body, orelse } => {
- visit_boolean_test!(self, test);
+ Stmt::If(
+ stmt_if @ ast::StmtIf {
+ test,
+ body,
+ orelse,
+ range: _,
+ },
+ ) => {
+ self.visit_boolean_test(test);
- if flake8_type_checking::helpers::is_type_checking_block(&self.ctx, test) {
- if self.settings.rules.enabled(Rule::EmptyTypeCheckingBlock) {
- flake8_type_checking::rules::empty_type_checking_block(self, stmt, body);
+ if analyze::typing::is_type_checking_block(stmt_if, &self.semantic_model) {
+ if self.semantic_model.at_top_level() {
+ self.importer.visit_type_checking_block(stmt);
}
- let prev_in_type_checking_block = self.ctx.in_type_checking_block;
- self.ctx.in_type_checking_block = true;
- self.visit_body(body);
- self.ctx.in_type_checking_block = prev_in_type_checking_block;
+ if self.enabled(Rule::EmptyTypeCheckingBlock) {
+ flake8_type_checking::rules::empty_type_checking_block(self, stmt_if);
+ }
+
+ self.visit_type_checking_block(body);
} else {
self.visit_body(body);
}
@@ -2229,99 +2120,136 @@ where
}
_ => visitor::walk_stmt(self, stmt),
};
- self.ctx.visible_scope = prev_visible_scope;
// Post-visit.
- match &stmt.node {
- StmtKind::FunctionDef { .. } | StmtKind::AsyncFunctionDef { .. } => {
- self.ctx.pop_scope();
+ match stmt {
+ Stmt::FunctionDef(_) | Stmt::AsyncFunctionDef(_) => {
+ self.semantic_model.pop_scope();
+ self.semantic_model.pop_definition();
}
- StmtKind::ClassDef { name, .. } => {
- self.ctx.pop_scope();
+ Stmt::ClassDef(ast::StmtClassDef { name, .. }) => {
+ self.semantic_model.pop_scope();
+ self.semantic_model.pop_definition();
self.add_binding(
name,
- Binding {
- kind: BindingKind::ClassDefinition,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: stmt.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ stmt.range(),
+ BindingKind::ClassDefinition,
+ BindingFlags::empty(),
);
}
_ => {}
}
- self.ctx.pop_parent();
+ self.semantic_model.flags = flags_snapshot;
+ self.semantic_model.pop_stmt();
}
fn visit_annotation(&mut self, expr: &'b Expr) {
- let prev_in_annotation = self.ctx.in_annotation;
- self.ctx.in_annotation = true;
- visit_type_definition!(self, expr);
- self.ctx.in_annotation = prev_in_annotation;
+ let flags_snapshot = self.semantic_model.flags;
+ self.semantic_model.flags |= SemanticModelFlags::ANNOTATION;
+ self.visit_type_definition(expr);
+ self.semantic_model.flags = flags_snapshot;
}
fn visit_expr(&mut self, expr: &'b Expr) {
- if !self.ctx.in_f_string
- && !self.ctx.in_deferred_type_definition
- && self.ctx.in_deferred_string_type_definition.is_none()
- && self.ctx.in_type_definition
- && self.ctx.annotations_future_enabled
+ if !self.semantic_model.in_f_string()
+ && !self.semantic_model.in_deferred_type_definition()
+ && self.semantic_model.in_type_definition()
+ && self.semantic_model.future_annotations()
{
- if let ExprKind::Constant {
+ if let Expr::Constant(ast::ExprConstant {
value: Constant::Str(value),
..
- } = &expr.node
+ }) = expr
{
self.deferred.string_type_definitions.push((
expr.range(),
value,
- (self.ctx.in_annotation, self.ctx.in_type_checking_block),
- (self.ctx.scope_id, self.ctx.parents.clone()),
+ self.semantic_model.snapshot(),
));
} else {
- self.deferred.type_definitions.push((
- expr,
- (self.ctx.in_annotation, self.ctx.in_type_checking_block),
- (self.ctx.scope_id, self.ctx.parents.clone()),
- ));
+ self.deferred
+ .future_type_definitions
+ .push((expr, self.semantic_model.snapshot()));
}
return;
}
- self.ctx.push_expr(expr);
+ self.semantic_model.push_expr(expr);
- let prev_in_literal = self.ctx.in_literal;
- let prev_in_type_definition = self.ctx.in_type_definition;
- let prev_in_boolean_test = self.ctx.in_boolean_test;
+ // Store the flags prior to any further descent, so that we can restore them after visiting
+ // the node.
+ let flags_snapshot = self.semantic_model.flags;
- if !matches!(expr.node, ExprKind::BoolOp { .. }) {
- self.ctx.in_boolean_test = false;
+ // If we're in a boolean test (e.g., the `test` of a `Stmt::If`), but now within a
+ // subexpression (e.g., `a` in `f(a)`), then we're no longer in a boolean test.
+ if !matches!(
+ expr,
+ Expr::BoolOp(_)
+ | Expr::UnaryOp(ast::ExprUnaryOp {
+ op: Unaryop::Not,
+ ..
+ })
+ ) {
+ self.semantic_model.flags -= SemanticModelFlags::BOOLEAN_TEST;
}
// Pre-visit.
- match &expr.node {
- ExprKind::Subscript { value, slice, .. } => {
+ match expr {
+ Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => {
// Ex) Optional[...], Union[...]
- if !self.settings.pyupgrade.keep_runtime_typing
- && self.settings.rules.enabled(Rule::NonPEP604Annotation)
- && (self.settings.target_version >= PythonVersion::Py310
- || (self.settings.target_version >= PythonVersion::Py37
- && self.ctx.annotations_future_enabled
- && self.ctx.in_annotation))
- {
- pyupgrade::rules::use_pep604_annotation(self, expr, value, slice);
+ if self.any_enabled(&[
+ Rule::FutureRewritableTypeAnnotation,
+ Rule::NonPEP604Annotation,
+ ]) {
+ if let Some(operator) =
+ analyze::typing::to_pep604_operator(value, slice, &self.semantic_model)
+ {
+ if self.enabled(Rule::FutureRewritableTypeAnnotation) {
+ if self.settings.target_version < PythonVersion::Py310
+ && self.settings.target_version >= PythonVersion::Py37
+ && !self.semantic_model.future_annotations()
+ && self.semantic_model.in_annotation()
+ {
+ flake8_future_annotations::rules::future_rewritable_type_annotation(
+ self, value,
+ );
+ }
+ }
+ if self.enabled(Rule::NonPEP604Annotation) {
+ if self.settings.target_version >= PythonVersion::Py310
+ || (self.settings.target_version >= PythonVersion::Py37
+ && self.semantic_model.future_annotations()
+ && self.semantic_model.in_annotation())
+ {
+ pyupgrade::rules::use_pep604_annotation(
+ self, expr, slice, operator,
+ );
+ }
+ }
+ }
+ }
+
+ // Ex) list[...]
+ if self.enabled(Rule::FutureRequiredTypeAnnotation) {
+ if self.settings.target_version < PythonVersion::Py39
+ && !self.semantic_model.future_annotations()
+ && self.semantic_model.in_annotation()
+ && analyze::typing::is_pep585_generic(value, &self.semantic_model)
+ {
+ flake8_future_annotations::rules::future_required_type_annotation(
+ self,
+ expr,
+ flake8_future_annotations::rules::Reason::PEP585,
+ );
+ }
}
- if self.ctx.match_typing_expr(value, "Literal") {
- self.ctx.in_literal = true;
+ if self.semantic_model.match_typing_expr(value, "Literal") {
+ self.semantic_model.flags |= SemanticModelFlags::LITERAL;
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::SysVersionSlice3,
Rule::SysVersion2,
Rule::SysVersion0,
@@ -2330,24 +2258,27 @@ where
flake8_2020::rules::subscript(self, value, slice);
}
- if self
- .settings
- .rules
- .enabled(Rule::UncapitalizedEnvironmentVariables)
- {
+ if self.enabled(Rule::UncapitalizedEnvironmentVariables) {
flake8_simplify::rules::use_capital_environment_variables(self, expr);
}
+
+ pandas_vet::rules::subscript(self, value, expr);
}
- ExprKind::Tuple { elts, ctx } | ExprKind::List { elts, ctx } => {
+ Expr::Tuple(ast::ExprTuple {
+ elts,
+ ctx,
+ range: _,
+ })
+ | Expr::List(ast::ExprList {
+ elts,
+ ctx,
+ range: _,
+ }) => {
if matches!(ctx, ExprContext::Store) {
- let check_too_many_expressions = self
- .settings
- .rules
- .enabled(Rule::ExpressionsInStarAssignment);
- let check_two_starred_expressions = self
- .settings
- .rules
- .enabled(Rule::MultipleStarredExpressions);
+ let check_too_many_expressions =
+ self.enabled(Rule::ExpressionsInStarAssignment);
+ let check_two_starred_expressions =
+ self.enabled(Rule::MultipleStarredExpressions);
if let Some(diagnostic) = pyflakes::rules::starred_expressions(
elts,
check_too_many_expressions,
@@ -2358,32 +2289,60 @@ where
}
}
}
- ExprKind::Name { id, ctx } => {
+ Expr::Name(ast::ExprName { id, ctx, range: _ }) => {
match ctx {
ExprContext::Load => {
- if self.settings.rules.enabled(Rule::TypingTextStrAlias) {
+ if self.enabled(Rule::TypingTextStrAlias) {
pyupgrade::rules::typing_text_str_alias(self, expr);
}
- if self.settings.rules.enabled(Rule::NumpyDeprecatedTypeAlias) {
+ if self.enabled(Rule::NumpyDeprecatedTypeAlias) {
numpy::rules::deprecated_type_alias(self, expr);
}
+ if self.is_stub {
+ if self.enabled(Rule::CollectionsNamedTuple) {
+ flake8_pyi::rules::collections_named_tuple(self, expr);
+ }
+ }
// Ex) List[...]
- if !self.settings.pyupgrade.keep_runtime_typing
- && self.settings.rules.enabled(Rule::NonPEP585Annotation)
- && (self.settings.target_version >= PythonVersion::Py39
- || (self.settings.target_version >= PythonVersion::Py37
- && self.ctx.annotations_future_enabled
- && self.ctx.in_annotation))
- && analyze::typing::is_pep585_builtin(expr, &self.ctx)
- {
- pyupgrade::rules::use_pep585_annotation(self, expr);
+ if self.any_enabled(&[
+ Rule::FutureRewritableTypeAnnotation,
+ Rule::NonPEP585Annotation,
+ ]) {
+ if let Some(replacement) =
+ analyze::typing::to_pep585_generic(expr, &self.semantic_model)
+ {
+ if self.enabled(Rule::FutureRewritableTypeAnnotation) {
+ if self.settings.target_version < PythonVersion::Py39
+ && self.settings.target_version >= PythonVersion::Py37
+ && !self.semantic_model.future_annotations()
+ && self.semantic_model.in_annotation()
+ {
+ flake8_future_annotations::rules::future_rewritable_type_annotation(
+ self, expr,
+ );
+ }
+ }
+ if self.enabled(Rule::NonPEP585Annotation) {
+ if self.settings.target_version >= PythonVersion::Py39
+ || (self.settings.target_version >= PythonVersion::Py37
+ && self.semantic_model.future_annotations()
+ && self.semantic_model.in_annotation())
+ {
+ pyupgrade::rules::use_pep585_annotation(
+ self,
+ expr,
+ &replacement,
+ );
+ }
+ }
+ }
}
self.handle_node_load(expr);
}
ExprContext::Store => {
- if self.settings.rules.enabled(Rule::AmbiguousVariableName) {
+ if self.enabled(Rule::AmbiguousVariableName) {
if let Some(diagnostic) =
pycodestyle::rules::ambiguous_variable_name(id, expr.range())
{
@@ -2391,68 +2350,105 @@ where
}
}
- self.check_builtin_shadowing(id, expr, true);
+ if self.semantic_model.scope().kind.is_class() {
+ if self.enabled(Rule::BuiltinAttributeShadowing) {
+ flake8_builtins::rules::builtin_attribute_shadowing(
+ self,
+ id,
+ AnyShadowing::from(expr),
+ );
+ }
+ } else {
+ if self.enabled(Rule::BuiltinVariableShadowing) {
+ flake8_builtins::rules::builtin_variable_shadowing(
+ self,
+ id,
+ AnyShadowing::from(expr),
+ );
+ }
+ }
self.handle_node_store(id, expr);
}
ExprContext::Del => self.handle_node_delete(expr),
}
- if self.settings.rules.enabled(Rule::SixPY3) {
+ if self.enabled(Rule::SixPY3) {
flake8_2020::rules::name_or_attribute(self, expr);
}
- if self
- .settings
- .rules
- .enabled(Rule::LoadBeforeGlobalDeclaration)
- {
+ if self.enabled(Rule::LoadBeforeGlobalDeclaration) {
pylint::rules::load_before_global_declaration(self, id, expr);
}
}
- ExprKind::Attribute { attr, value, .. } => {
+ Expr::Attribute(ast::ExprAttribute { attr, value, .. }) => {
// Ex) typing.List[...]
- if !self.settings.pyupgrade.keep_runtime_typing
- && self.settings.rules.enabled(Rule::NonPEP585Annotation)
- && (self.settings.target_version >= PythonVersion::Py39
- || (self.settings.target_version >= PythonVersion::Py37
- && self.ctx.annotations_future_enabled
- && self.ctx.in_annotation))
- && analyze::typing::is_pep585_builtin(expr, &self.ctx)
- {
- pyupgrade::rules::use_pep585_annotation(self, expr);
+ if self.any_enabled(&[
+ Rule::FutureRewritableTypeAnnotation,
+ Rule::NonPEP585Annotation,
+ ]) {
+ if let Some(replacement) =
+ analyze::typing::to_pep585_generic(expr, &self.semantic_model)
+ {
+ if self.enabled(Rule::FutureRewritableTypeAnnotation) {
+ if self.settings.target_version < PythonVersion::Py39
+ && self.settings.target_version >= PythonVersion::Py37
+ && !self.semantic_model.future_annotations()
+ && self.semantic_model.in_annotation()
+ {
+ flake8_future_annotations::rules::future_rewritable_type_annotation(
+ self, expr,
+ );
+ }
+ }
+ if self.enabled(Rule::NonPEP585Annotation) {
+ if self.settings.target_version >= PythonVersion::Py39
+ || (self.settings.target_version >= PythonVersion::Py37
+ && self.semantic_model.future_annotations()
+ && self.semantic_model.in_annotation())
+ {
+ pyupgrade::rules::use_pep585_annotation(self, expr, &replacement);
+ }
+ }
+ }
}
- if self.settings.rules.enabled(Rule::DatetimeTimezoneUTC)
+ if self.enabled(Rule::DatetimeTimezoneUTC)
&& self.settings.target_version >= PythonVersion::Py311
{
pyupgrade::rules::datetime_utc_alias(self, expr);
}
- if self.settings.rules.enabled(Rule::TypingTextStrAlias) {
+ if self.enabled(Rule::TypingTextStrAlias) {
pyupgrade::rules::typing_text_str_alias(self, expr);
}
- if self.settings.rules.enabled(Rule::NumpyDeprecatedTypeAlias) {
+ if self.enabled(Rule::NumpyDeprecatedTypeAlias) {
numpy::rules::deprecated_type_alias(self, expr);
}
- if self.settings.rules.enabled(Rule::DeprecatedMockImport) {
+ if self.enabled(Rule::DeprecatedMockImport) {
pyupgrade::rules::deprecated_mock_attribute(self, expr);
}
- if self.settings.rules.enabled(Rule::SixPY3) {
+ if self.enabled(Rule::SixPY3) {
flake8_2020::rules::name_or_attribute(self, expr);
}
- if self.settings.rules.enabled(Rule::BannedApi) {
- flake8_tidy_imports::banned_api::banned_attribute_access(self, expr);
+ if self.enabled(Rule::BannedApi) {
+ flake8_tidy_imports::rules::banned_attribute_access(self, expr);
}
- if self.settings.rules.enabled(Rule::PrivateMemberAccess) {
+ if self.enabled(Rule::PrivateMemberAccess) {
flake8_self::rules::private_member_access(self, expr);
}
- pandas_vet::rules::check_attr(self, attr, value, expr);
+ if self.is_stub {
+ if self.enabled(Rule::CollectionsNamedTuple) {
+ flake8_pyi::rules::collections_named_tuple(self, expr);
+ }
+ }
+ pandas_vet::rules::attr(self, attr, value, expr);
}
- ExprKind::Call {
+ Expr::Call(ast::ExprCall {
func,
args,
keywords,
- } => {
- if self.settings.rules.any_enabled(&[
+ range: _,
+ }) => {
+ if self.any_enabled(&[
// pyflakes
Rule::StringDotFormatInvalidFormat,
Rule::StringDotFormatExtraNamedArguments,
@@ -2462,23 +2458,27 @@ where
// pyupgrade
Rule::FormatLiterals,
Rule::FString,
+ // flynt
+ Rule::StaticJoinToFString,
]) {
- if let ExprKind::Attribute { value, attr, .. } = &func.node {
- if let ExprKind::Constant {
+ if let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() {
+ let attr = attr.as_str();
+ if let Expr::Constant(ast::ExprConstant {
value: Constant::Str(value),
..
- } = &value.node
+ }) = value.as_ref()
{
- if attr == "format" {
+ if attr == "join" {
+ // "...".join(...) call
+ if self.enabled(Rule::StaticJoinToFString) {
+ flynt::rules::static_join_to_fstring(self, expr, value);
+ }
+ } else if attr == "format" {
// "...".format(...) call
let location = expr.range();
match pyflakes::format::FormatSummary::try_from(value.as_ref()) {
Err(e) => {
- if self
- .settings
- .rules
- .enabled(Rule::StringDotFormatInvalidFormat)
- {
+ if self.enabled(Rule::StringDotFormatInvalidFormat) {
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::StringDotFormatInvalidFormat {
message: pyflakes::format::error_to_string(&e),
@@ -2488,19 +2488,13 @@ where
}
}
Ok(summary) => {
- if self
- .settings
- .rules
- .enabled(Rule::StringDotFormatExtraNamedArguments)
- {
+ if self.enabled(Rule::StringDotFormatExtraNamedArguments) {
pyflakes::rules::string_dot_format_extra_named_arguments(
self, &summary, keywords, location,
);
}
if self
- .settings
- .rules
.enabled(Rule::StringDotFormatExtraPositionalArguments)
{
pyflakes::rules::string_dot_format_extra_positional_arguments(
@@ -2509,31 +2503,23 @@ where
);
}
- if self
- .settings
- .rules
- .enabled(Rule::StringDotFormatMissingArguments)
- {
+ if self.enabled(Rule::StringDotFormatMissingArguments) {
pyflakes::rules::string_dot_format_missing_argument(
self, &summary, args, keywords, location,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::StringDotFormatMixingAutomatic)
- {
+ if self.enabled(Rule::StringDotFormatMixingAutomatic) {
pyflakes::rules::string_dot_format_mixing_automatic(
self, &summary, location,
);
}
- if self.settings.rules.enabled(Rule::FormatLiterals) {
+ if self.enabled(Rule::FormatLiterals) {
pyupgrade::rules::format_literals(self, &summary, expr);
}
- if self.settings.rules.enabled(Rule::FString) {
+ if self.enabled(Rule::FString) {
pyupgrade::rules::f_strings(self, &summary, expr);
}
}
@@ -2544,53 +2530,60 @@ where
}
// pyupgrade
- if self.settings.rules.enabled(Rule::TypeOfPrimitive) {
+ if self.enabled(Rule::TypeOfPrimitive) {
pyupgrade::rules::type_of_primitive(self, expr, func, args);
}
- if self.settings.rules.enabled(Rule::DeprecatedUnittestAlias) {
+ if self.enabled(Rule::DeprecatedUnittestAlias) {
pyupgrade::rules::deprecated_unittest_alias(self, func);
}
- if self.settings.rules.enabled(Rule::SuperCallWithParameters) {
+ if self.enabled(Rule::SuperCallWithParameters) {
pyupgrade::rules::super_call_with_parameters(self, expr, func, args);
}
- if self.settings.rules.enabled(Rule::UnnecessaryEncodeUTF8) {
+ if self.enabled(Rule::UnnecessaryEncodeUTF8) {
pyupgrade::rules::unnecessary_encode_utf8(self, expr, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::RedundantOpenModes) {
+ if self.enabled(Rule::RedundantOpenModes) {
pyupgrade::rules::redundant_open_modes(self, expr);
}
- if self.settings.rules.enabled(Rule::NativeLiterals) {
+ if self.enabled(Rule::NativeLiterals) {
pyupgrade::rules::native_literals(self, expr, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::OpenAlias) {
+ if self.enabled(Rule::OpenAlias) {
pyupgrade::rules::open_alias(self, expr, func);
}
- if self.settings.rules.enabled(Rule::ReplaceUniversalNewlines) {
+ if self.enabled(Rule::ReplaceUniversalNewlines) {
pyupgrade::rules::replace_universal_newlines(self, func, keywords);
}
- if self.settings.rules.enabled(Rule::ReplaceStdoutStderr) {
+ if self.enabled(Rule::ReplaceStdoutStderr) {
pyupgrade::rules::replace_stdout_stderr(self, expr, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::OSErrorAlias) {
+ if self.enabled(Rule::OSErrorAlias) {
pyupgrade::rules::os_error_alias_call(self, func);
}
- if self.settings.rules.enabled(Rule::NonPEP604Isinstance)
+ if self.enabled(Rule::NonPEP604Isinstance)
&& self.settings.target_version >= PythonVersion::Py310
{
pyupgrade::rules::use_pep604_isinstance(self, expr, func, args);
}
+ // flake8-async
+ if self.enabled(Rule::BlockingHttpCallInAsyncFunction) {
+ flake8_async::rules::blocking_http_call(self, expr);
+ }
+ if self.enabled(Rule::OpenSleepOrSubprocessInAsyncFunction) {
+ flake8_async::rules::open_sleep_or_subprocess_call(self, expr);
+ }
+ if self.enabled(Rule::BlockingOsCallInAsyncFunction) {
+ flake8_async::rules::blocking_os_call(self, expr);
+ }
+
// flake8-print
- if self
- .settings
- .rules
- .any_enabled(&[Rule::Print, Rule::PPrint])
- {
+ if self.any_enabled(&[Rule::Print, Rule::PPrint]) {
flake8_print::rules::print_call(self, func, keywords);
}
// flake8-bandit
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::SuspiciousPickleUsage,
Rule::SuspiciousMarshalUsage,
Rule::SuspiciousInsecureHashUsage,
@@ -2617,168 +2610,150 @@ where
}
// flake8-bugbear
- if self.settings.rules.enabled(Rule::UnreliableCallableCheck) {
+ if self.enabled(Rule::UnreliableCallableCheck) {
flake8_bugbear::rules::unreliable_callable_check(self, expr, func, args);
}
- if self.settings.rules.enabled(Rule::StripWithMultiCharacters) {
+ if self.enabled(Rule::StripWithMultiCharacters) {
flake8_bugbear::rules::strip_with_multi_characters(self, expr, func, args);
}
- if self.settings.rules.enabled(Rule::GetAttrWithConstant) {
+ if self.enabled(Rule::GetAttrWithConstant) {
flake8_bugbear::rules::getattr_with_constant(self, expr, func, args);
}
- if self.settings.rules.enabled(Rule::SetAttrWithConstant) {
+ if self.enabled(Rule::SetAttrWithConstant) {
flake8_bugbear::rules::setattr_with_constant(self, expr, func, args);
}
- if self.settings.rules.enabled(Rule::UselessContextlibSuppress) {
+ if self.enabled(Rule::UselessContextlibSuppress) {
flake8_bugbear::rules::useless_contextlib_suppress(self, expr, func, args);
}
- if self
- .settings
- .rules
- .enabled(Rule::StarArgUnpackingAfterKeywordArg)
- {
+ if self.enabled(Rule::StarArgUnpackingAfterKeywordArg) {
flake8_bugbear::rules::star_arg_unpacking_after_keyword_arg(
self, args, keywords,
);
}
- if self.settings.rules.enabled(Rule::ZipWithoutExplicitStrict)
+ if self.enabled(Rule::ZipWithoutExplicitStrict)
&& self.settings.target_version >= PythonVersion::Py310
{
- flake8_bugbear::rules::zip_without_explicit_strict(self, expr, func, keywords);
+ flake8_bugbear::rules::zip_without_explicit_strict(
+ self, expr, func, args, keywords,
+ );
}
- if self.settings.rules.enabled(Rule::NoExplicitStacklevel) {
+ if self.enabled(Rule::NoExplicitStacklevel) {
flake8_bugbear::rules::no_explicit_stacklevel(self, func, args, keywords);
}
// flake8-pie
- if self.settings.rules.enabled(Rule::UnnecessaryDictKwargs) {
+ if self.enabled(Rule::UnnecessaryDictKwargs) {
flake8_pie::rules::unnecessary_dict_kwargs(self, expr, keywords);
}
// flake8-bandit
- if self.settings.rules.enabled(Rule::ExecBuiltin) {
+ if self.enabled(Rule::ExecBuiltin) {
if let Some(diagnostic) = flake8_bandit::rules::exec_used(expr, func) {
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.enabled(Rule::BadFilePermissions) {
+ if self.enabled(Rule::BadFilePermissions) {
flake8_bandit::rules::bad_file_permissions(self, func, args, keywords);
}
- if self
- .settings
- .rules
- .enabled(Rule::RequestWithNoCertValidation)
- {
+ if self.enabled(Rule::RequestWithNoCertValidation) {
flake8_bandit::rules::request_with_no_cert_validation(
self, func, args, keywords,
);
}
- if self.settings.rules.enabled(Rule::UnsafeYAMLLoad) {
+ if self.enabled(Rule::UnsafeYAMLLoad) {
flake8_bandit::rules::unsafe_yaml_load(self, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::SnmpInsecureVersion) {
+ if self.enabled(Rule::SnmpInsecureVersion) {
flake8_bandit::rules::snmp_insecure_version(self, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::SnmpWeakCryptography) {
+ if self.enabled(Rule::SnmpWeakCryptography) {
flake8_bandit::rules::snmp_weak_cryptography(self, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::Jinja2AutoescapeFalse) {
+ if self.enabled(Rule::Jinja2AutoescapeFalse) {
flake8_bandit::rules::jinja2_autoescape_false(self, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::HardcodedPasswordFuncArg) {
+ if self.enabled(Rule::HardcodedPasswordFuncArg) {
self.diagnostics
.extend(flake8_bandit::rules::hardcoded_password_func_arg(keywords));
}
- if self.settings.rules.enabled(Rule::HardcodedSQLExpression) {
+ if self.enabled(Rule::HardcodedSQLExpression) {
flake8_bandit::rules::hardcoded_sql_expression(self, expr);
}
- if self
- .settings
- .rules
- .enabled(Rule::HashlibInsecureHashFunction)
- {
+ if self.enabled(Rule::HashlibInsecureHashFunction) {
flake8_bandit::rules::hashlib_insecure_hash_functions(
self, func, args, keywords,
);
}
- if self.settings.rules.enabled(Rule::RequestWithoutTimeout) {
+ if self.enabled(Rule::RequestWithoutTimeout) {
flake8_bandit::rules::request_without_timeout(self, func, args, keywords);
}
- if self
- .settings
- .rules
- .enabled(Rule::LoggingConfigInsecureListen)
- {
+ if self.enabled(Rule::ParamikoCall) {
+ flake8_bandit::rules::paramiko_call(self, func);
+ }
+ if self.enabled(Rule::LoggingConfigInsecureListen) {
flake8_bandit::rules::logging_config_insecure_listen(
self, func, args, keywords,
);
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::SubprocessWithoutShellEqualsTrue,
Rule::SubprocessPopenWithShellEqualsTrue,
Rule::CallWithShellEqualsTrue,
Rule::StartProcessWithAShell,
Rule::StartProcessWithNoShell,
Rule::StartProcessWithPartialPath,
+ Rule::UnixCommandWildcardInjection,
]) {
flake8_bandit::rules::shell_injection(self, func, args, keywords);
}
// flake8-comprehensions
- if self.settings.rules.enabled(Rule::UnnecessaryGeneratorList) {
+ if self.enabled(Rule::UnnecessaryGeneratorList) {
flake8_comprehensions::rules::unnecessary_generator_list(
self, expr, func, args, keywords,
);
}
- if self.settings.rules.enabled(Rule::UnnecessaryGeneratorSet) {
+ if self.enabled(Rule::UnnecessaryGeneratorSet) {
flake8_comprehensions::rules::unnecessary_generator_set(
self,
expr,
- self.ctx.current_expr_parent().map(Into::into),
+ self.semantic_model.expr_parent(),
func,
args,
keywords,
);
}
- if self.settings.rules.enabled(Rule::UnnecessaryGeneratorDict) {
+ if self.enabled(Rule::UnnecessaryGeneratorDict) {
flake8_comprehensions::rules::unnecessary_generator_dict(
self,
expr,
- self.ctx.current_expr_parent().map(Into::into),
+ self.semantic_model.expr_parent(),
func,
args,
keywords,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryListComprehensionSet)
- {
+ if self.enabled(Rule::UnnecessaryListComprehensionSet) {
flake8_comprehensions::rules::unnecessary_list_comprehension_set(
self, expr, func, args, keywords,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryListComprehensionDict)
- {
+ if self.enabled(Rule::UnnecessaryListComprehensionDict) {
flake8_comprehensions::rules::unnecessary_list_comprehension_dict(
self, expr, func, args, keywords,
);
}
- if self.settings.rules.enabled(Rule::UnnecessaryLiteralSet) {
+ if self.enabled(Rule::UnnecessaryLiteralSet) {
flake8_comprehensions::rules::unnecessary_literal_set(
self, expr, func, args, keywords,
);
}
- if self.settings.rules.enabled(Rule::UnnecessaryLiteralDict) {
+ if self.enabled(Rule::UnnecessaryLiteralDict) {
flake8_comprehensions::rules::unnecessary_literal_dict(
self, expr, func, args, keywords,
);
}
- if self.settings.rules.enabled(Rule::UnnecessaryCollectionCall) {
+ if self.enabled(Rule::UnnecessaryCollectionCall) {
flake8_comprehensions::rules::unnecessary_collection_call(
self,
expr,
@@ -2788,124 +2763,89 @@ where
&self.settings.flake8_comprehensions,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryLiteralWithinTupleCall)
- {
+ if self.enabled(Rule::UnnecessaryLiteralWithinTupleCall) {
flake8_comprehensions::rules::unnecessary_literal_within_tuple_call(
self, expr, func, args, keywords,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryLiteralWithinListCall)
- {
+ if self.enabled(Rule::UnnecessaryLiteralWithinListCall) {
flake8_comprehensions::rules::unnecessary_literal_within_list_call(
self, expr, func, args, keywords,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryLiteralWithinDictCall)
- {
+ if self.enabled(Rule::UnnecessaryLiteralWithinDictCall) {
flake8_comprehensions::rules::unnecessary_literal_within_dict_call(
self, expr, func, args, keywords,
);
}
- if self.settings.rules.enabled(Rule::UnnecessaryListCall) {
+ if self.enabled(Rule::UnnecessaryListCall) {
flake8_comprehensions::rules::unnecessary_list_call(self, expr, func, args);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryCallAroundSorted)
- {
+ if self.enabled(Rule::UnnecessaryCallAroundSorted) {
flake8_comprehensions::rules::unnecessary_call_around_sorted(
self, expr, func, args,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryDoubleCastOrProcess)
- {
+ if self.enabled(Rule::UnnecessaryDoubleCastOrProcess) {
flake8_comprehensions::rules::unnecessary_double_cast_or_process(
self, expr, func, args,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessarySubscriptReversal)
- {
+ if self.enabled(Rule::UnnecessarySubscriptReversal) {
flake8_comprehensions::rules::unnecessary_subscript_reversal(
self, expr, func, args,
);
}
- if self.settings.rules.enabled(Rule::UnnecessaryMap) {
+ if self.enabled(Rule::UnnecessaryMap) {
flake8_comprehensions::rules::unnecessary_map(
self,
expr,
- self.ctx.current_expr_parent().map(Into::into),
+ self.semantic_model.expr_parent(),
func,
args,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryComprehensionAnyAll)
- {
+ if self.enabled(Rule::UnnecessaryComprehensionAnyAll) {
flake8_comprehensions::rules::unnecessary_comprehension_any_all(
self, expr, func, args, keywords,
);
}
// flake8-boolean-trap
- if self
- .settings
- .rules
- .enabled(Rule::BooleanPositionalValueInFunctionCall)
- {
+ if self.enabled(Rule::BooleanPositionalValueInFunctionCall) {
flake8_boolean_trap::rules::check_boolean_positional_value_in_function_call(
self, args, func,
);
}
- if let ExprKind::Name { id, ctx } = &func.node {
+ if let Expr::Name(ast::ExprName { id, ctx, range: _ }) = func.as_ref() {
if id == "locals" && matches!(ctx, ExprContext::Load) {
- let scope = self.ctx.scope_mut();
- scope.uses_locals = true;
+ let scope = self.semantic_model.scope_mut();
+ scope.set_uses_locals();
}
}
// flake8-debugger
- if self.settings.rules.enabled(Rule::Debugger) {
+ if self.enabled(Rule::Debugger) {
flake8_debugger::rules::debugger_call(self, expr, func);
}
// pandas-vet
- if self
- .settings
- .rules
- .enabled(Rule::PandasUseOfInplaceArgument)
- {
+ if self.enabled(Rule::PandasUseOfInplaceArgument) {
self.diagnostics.extend(
- pandas_vet::rules::inplace_argument(self, expr, args, keywords).into_iter(),
+ pandas_vet::rules::inplace_argument(self, expr, func, args, keywords)
+ .into_iter(),
);
}
- pandas_vet::rules::check_call(self, func);
+ pandas_vet::rules::call(self, func);
- if self.settings.rules.enabled(Rule::PandasUseOfPdMerge) {
+ if self.enabled(Rule::PandasUseOfPdMerge) {
if let Some(diagnostic) = pandas_vet::rules::use_of_pd_merge(func) {
self.diagnostics.push(diagnostic);
};
}
// flake8-datetimez
- if self.settings.rules.enabled(Rule::CallDatetimeWithoutTzinfo) {
+ if self.enabled(Rule::CallDatetimeWithoutTzinfo) {
flake8_datetimez::rules::call_datetime_without_tzinfo(
self,
func,
@@ -2914,28 +2854,20 @@ where
expr.range(),
);
}
- if self.settings.rules.enabled(Rule::CallDatetimeToday) {
+ if self.enabled(Rule::CallDatetimeToday) {
flake8_datetimez::rules::call_datetime_today(self, func, expr.range());
}
- if self.settings.rules.enabled(Rule::CallDatetimeUtcnow) {
+ if self.enabled(Rule::CallDatetimeUtcnow) {
flake8_datetimez::rules::call_datetime_utcnow(self, func, expr.range());
}
- if self
- .settings
- .rules
- .enabled(Rule::CallDatetimeUtcfromtimestamp)
- {
+ if self.enabled(Rule::CallDatetimeUtcfromtimestamp) {
flake8_datetimez::rules::call_datetime_utcfromtimestamp(
self,
func,
expr.range(),
);
}
- if self
- .settings
- .rules
- .enabled(Rule::CallDatetimeNowWithoutTzinfo)
- {
+ if self.enabled(Rule::CallDatetimeNowWithoutTzinfo) {
flake8_datetimez::rules::call_datetime_now_without_tzinfo(
self,
func,
@@ -2944,7 +2876,7 @@ where
expr.range(),
);
}
- if self.settings.rules.enabled(Rule::CallDatetimeFromtimestamp) {
+ if self.enabled(Rule::CallDatetimeFromtimestamp) {
flake8_datetimez::rules::call_datetime_fromtimestamp(
self,
func,
@@ -2953,11 +2885,7 @@ where
expr.range(),
);
}
- if self
- .settings
- .rules
- .enabled(Rule::CallDatetimeStrptimeWithoutZone)
- {
+ if self.enabled(Rule::CallDatetimeStrptimeWithoutZone) {
flake8_datetimez::rules::call_datetime_strptime_without_zone(
self,
func,
@@ -2965,51 +2893,50 @@ where
expr.range(),
);
}
- if self.settings.rules.enabled(Rule::CallDateToday) {
+ if self.enabled(Rule::CallDateToday) {
flake8_datetimez::rules::call_date_today(self, func, expr.range());
}
- if self.settings.rules.enabled(Rule::CallDateFromtimestamp) {
+ if self.enabled(Rule::CallDateFromtimestamp) {
flake8_datetimez::rules::call_date_fromtimestamp(self, func, expr.range());
}
// pygrep-hooks
- if self.settings.rules.enabled(Rule::Eval) {
+ if self.enabled(Rule::Eval) {
pygrep_hooks::rules::no_eval(self, func);
}
- if self.settings.rules.enabled(Rule::DeprecatedLogWarn) {
+ if self.enabled(Rule::DeprecatedLogWarn) {
pygrep_hooks::rules::deprecated_log_warn(self, func);
}
// pylint
- if self
- .settings
- .rules
- .enabled(Rule::UnnecessaryDirectLambdaCall)
- {
+ if self.enabled(Rule::UnnecessaryDirectLambdaCall) {
pylint::rules::unnecessary_direct_lambda_call(self, expr, func);
}
- if self.settings.rules.enabled(Rule::SysExitAlias) {
+ if self.enabled(Rule::SysExitAlias) {
pylint::rules::sys_exit_alias(self, func);
}
- if self.settings.rules.enabled(Rule::BadStrStripCall) {
+ if self.enabled(Rule::BadStrStripCall) {
pylint::rules::bad_str_strip_call(self, func, args);
}
- if self.settings.rules.enabled(Rule::InvalidEnvvarDefault) {
+ if self.enabled(Rule::InvalidEnvvarDefault) {
pylint::rules::invalid_envvar_default(self, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::InvalidEnvvarValue) {
+ if self.enabled(Rule::InvalidEnvvarValue) {
pylint::rules::invalid_envvar_value(self, func, args, keywords);
}
+ if self.enabled(Rule::NestedMinMax) {
+ pylint::rules::nested_min_max(self, expr, func, args, keywords);
+ }
// flake8-pytest-style
- if self.settings.rules.enabled(Rule::PytestPatchWithLambda) {
+ if self.enabled(Rule::PytestPatchWithLambda) {
if let Some(diagnostic) =
flake8_pytest_style::rules::patch_with_lambda(func, args, keywords)
{
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.enabled(Rule::PytestUnittestAssertion) {
+ if self.enabled(Rule::PytestUnittestAssertion) {
if let Some(diagnostic) = flake8_pytest_style::rules::unittest_assertion(
self, expr, func, args, keywords,
) {
@@ -3017,25 +2944,25 @@ where
}
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::PytestRaisesWithoutException,
Rule::PytestRaisesTooBroad,
]) {
flake8_pytest_style::rules::raises_call(self, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::PytestFailWithoutMessage) {
+ if self.enabled(Rule::PytestFailWithoutMessage) {
flake8_pytest_style::rules::fail_call(self, func, args, keywords);
}
- if self.settings.rules.enabled(Rule::PairwiseOverZipped) {
+ if self.enabled(Rule::PairwiseOverZipped) {
if self.settings.target_version >= PythonVersion::Py310 {
ruff::rules::pairwise_over_zipped(self, func, args);
}
}
// flake8-gettext
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::FStringInGetTextFuncCall,
Rule::FormatInGetTextFuncCall,
Rule::PrintfInGetTextFuncCall,
@@ -3043,49 +2970,41 @@ where
func,
&self.settings.flake8_gettext.functions_names,
) {
- if self.settings.rules.enabled(Rule::FStringInGetTextFuncCall) {
+ if self.enabled(Rule::FStringInGetTextFuncCall) {
self.diagnostics
.extend(flake8_gettext::rules::f_string_in_gettext_func_call(args));
}
- if self.settings.rules.enabled(Rule::FormatInGetTextFuncCall) {
+ if self.enabled(Rule::FormatInGetTextFuncCall) {
self.diagnostics
.extend(flake8_gettext::rules::format_in_gettext_func_call(args));
}
- if self.settings.rules.enabled(Rule::PrintfInGetTextFuncCall) {
+ if self.enabled(Rule::PrintfInGetTextFuncCall) {
self.diagnostics
.extend(flake8_gettext::rules::printf_in_gettext_func_call(args));
}
}
// flake8-simplify
- if self
- .settings
- .rules
- .enabled(Rule::UncapitalizedEnvironmentVariables)
- {
+ if self.enabled(Rule::UncapitalizedEnvironmentVariables) {
flake8_simplify::rules::use_capital_environment_variables(self, expr);
}
- if self
- .settings
- .rules
- .enabled(Rule::OpenFileWithContextHandler)
- {
+ if self.enabled(Rule::OpenFileWithContextHandler) {
flake8_simplify::rules::open_file_with_context_handler(self, func);
}
- if self.settings.rules.enabled(Rule::DictGetWithNoneDefault) {
+ if self.enabled(Rule::DictGetWithNoneDefault) {
flake8_simplify::rules::dict_get_with_none_default(self, expr);
}
// flake8-use-pathlib
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::OsPathAbspath,
Rule::OsChmod,
Rule::OsMkdir,
Rule::OsMakedirs,
Rule::OsRename,
- Rule::PathlibReplace,
+ Rule::OsReplace,
Rule::OsRmdir,
Rule::OsRemove,
Rule::OsUnlink,
@@ -3105,16 +3024,16 @@ where
Rule::BuiltinOpen,
Rule::PyPath,
]) {
- flake8_use_pathlib::helpers::replaceable_by_pathlib(self, func);
+ flake8_use_pathlib::rules::replaceable_by_pathlib(self, func);
}
// numpy
- if self.settings.rules.enabled(Rule::NumpyLegacyRandom) {
+ if self.enabled(Rule::NumpyLegacyRandom) {
numpy::rules::numpy_legacy_random(self, func);
}
// flake8-logging-format
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::LoggingStringFormat,
Rule::LoggingPercentFormat,
Rule::LoggingStringConcat,
@@ -3128,91 +3047,95 @@ where
}
// pylint logging checker
- if self
- .settings
- .rules
- .any_enabled(&[Rule::LoggingTooFewArgs, Rule::LoggingTooManyArgs])
- {
+ if self.any_enabled(&[Rule::LoggingTooFewArgs, Rule::LoggingTooManyArgs]) {
pylint::rules::logging_call(self, func, args, keywords);
}
// flake8-django
- if self
- .settings
- .rules
- .enabled(Rule::DjangoLocalsInRenderFunction)
- {
+ if self.enabled(Rule::DjangoLocalsInRenderFunction) {
flake8_django::rules::locals_in_render_function(self, func, args, keywords);
}
}
- ExprKind::Dict { keys, values } => {
- if self.settings.rules.any_enabled(&[
+ Expr::Dict(ast::ExprDict {
+ keys,
+ values,
+ range: _,
+ }) => {
+ if self.any_enabled(&[
Rule::MultiValueRepeatedKeyLiteral,
Rule::MultiValueRepeatedKeyVariable,
]) {
pyflakes::rules::repeated_keys(self, keys, values);
}
- if self.settings.rules.enabled(Rule::UnnecessarySpread) {
+ if self.enabled(Rule::UnnecessarySpread) {
flake8_pie::rules::unnecessary_spread(self, keys, values);
}
}
- ExprKind::Yield { .. } => {
- if self.settings.rules.enabled(Rule::YieldOutsideFunction) {
+ Expr::Set(ast::ExprSet { elts, range: _ }) => {
+ if self.enabled(Rule::DuplicateValue) {
+ flake8_bugbear::rules::duplicate_value(self, elts);
+ }
+ }
+ Expr::Yield(_) => {
+ if self.enabled(Rule::YieldOutsideFunction) {
pyflakes::rules::yield_outside_function(self, expr);
}
- if self.settings.rules.enabled(Rule::YieldInInit) {
+ if self.enabled(Rule::YieldInInit) {
pylint::rules::yield_in_init(self, expr);
}
}
- ExprKind::YieldFrom { .. } => {
- if self.settings.rules.enabled(Rule::YieldOutsideFunction) {
+ Expr::YieldFrom(yield_from) => {
+ if self.enabled(Rule::YieldOutsideFunction) {
pyflakes::rules::yield_outside_function(self, expr);
}
- if self.settings.rules.enabled(Rule::YieldInInit) {
+ if self.enabled(Rule::YieldInInit) {
pylint::rules::yield_in_init(self, expr);
}
+ if self.enabled(Rule::YieldFromInAsyncFunction) {
+ pylint::rules::yield_from_in_async_function(self, yield_from);
+ }
}
- ExprKind::Await { .. } => {
- if self.settings.rules.enabled(Rule::YieldOutsideFunction) {
+ Expr::Await(_) => {
+ if self.enabled(Rule::YieldOutsideFunction) {
pyflakes::rules::yield_outside_function(self, expr);
}
- if self.settings.rules.enabled(Rule::AwaitOutsideAsync) {
+ if self.enabled(Rule::AwaitOutsideAsync) {
pylint::rules::await_outside_async(self, expr);
}
}
- ExprKind::JoinedStr { values } => {
- if self
- .settings
- .rules
- .enabled(Rule::FStringMissingPlaceholders)
- {
+ Expr::JoinedStr(ast::ExprJoinedStr { values, range: _ }) => {
+ if self.enabled(Rule::FStringMissingPlaceholders) {
pyflakes::rules::f_string_missing_placeholders(expr, values, self);
}
- if self.settings.rules.enabled(Rule::HardcodedSQLExpression) {
+ if self.enabled(Rule::HardcodedSQLExpression) {
flake8_bandit::rules::hardcoded_sql_expression(self, expr);
}
+ if self.enabled(Rule::ExplicitFStringTypeConversion) {
+ ruff::rules::explicit_f_string_type_conversion(self, expr, values);
+ }
}
- ExprKind::BinOp {
+ Expr::BinOp(ast::ExprBinOp {
left,
op: Operator::RShift,
..
- } => {
- if self.settings.rules.enabled(Rule::InvalidPrintSyntax) {
+ }) => {
+ if self.enabled(Rule::InvalidPrintSyntax) {
pyflakes::rules::invalid_print_syntax(self, left);
}
}
- ExprKind::BinOp {
+ Expr::BinOp(ast::ExprBinOp {
left,
op: Operator::Mod,
right,
- } => {
- if let ExprKind::Constant {
+ range: _,
+ }) => {
+ if let Expr::Constant(ast::ExprConstant {
value: Constant::Str(value),
..
- } = &left.node
+ }) = left.as_ref()
{
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::PercentFormatInvalidFormat,
Rule::PercentFormatExpectedMapping,
Rule::PercentFormatExpectedSequence,
@@ -3229,11 +3152,7 @@ where
typ: CFormatErrorType::UnsupportedFormatChar(c),
..
}) => {
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatUnsupportedFormatCharacter)
- {
+ if self.enabled(Rule::PercentFormatUnsupportedFormatCharacter) {
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::PercentFormatUnsupportedFormatCharacter {
char: c,
@@ -3243,11 +3162,7 @@ where
}
}
Err(e) => {
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatInvalidFormat)
- {
+ if self.enabled(Rule::PercentFormatInvalidFormat) {
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::PercentFormatInvalidFormat {
message: e.to_string(),
@@ -3257,65 +3172,37 @@ where
}
}
Ok(summary) => {
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatExpectedMapping)
- {
+ if self.enabled(Rule::PercentFormatExpectedMapping) {
pyflakes::rules::percent_format_expected_mapping(
self, &summary, right, location,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatExpectedSequence)
- {
+ if self.enabled(Rule::PercentFormatExpectedSequence) {
pyflakes::rules::percent_format_expected_sequence(
self, &summary, right, location,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatExtraNamedArguments)
- {
+ if self.enabled(Rule::PercentFormatExtraNamedArguments) {
pyflakes::rules::percent_format_extra_named_arguments(
self, &summary, right, location,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatMissingArgument)
- {
+ if self.enabled(Rule::PercentFormatMissingArgument) {
pyflakes::rules::percent_format_missing_arguments(
self, &summary, right, location,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatMixedPositionalAndNamed)
- {
+ if self.enabled(Rule::PercentFormatMixedPositionalAndNamed) {
pyflakes::rules::percent_format_mixed_positional_and_named(
self, &summary, location,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatPositionalCountMismatch)
- {
+ if self.enabled(Rule::PercentFormatPositionalCountMismatch) {
pyflakes::rules::percent_format_positional_count_mismatch(
self, &summary, right, location,
);
}
- if self
- .settings
- .rules
- .enabled(Rule::PercentFormatStarRequiresSequence)
- {
+ if self.enabled(Rule::PercentFormatStarRequiresSequence) {
pyflakes::rules::percent_format_star_requires_sequence(
self, &summary, right, location,
);
@@ -3324,54 +3211,60 @@ where
}
}
- if self.settings.rules.enabled(Rule::PrintfStringFormatting) {
+ if self.enabled(Rule::PrintfStringFormatting) {
pyupgrade::rules::printf_string_formatting(self, expr, right, self.locator);
}
- if self.settings.rules.enabled(Rule::BadStringFormatType) {
+ if self.enabled(Rule::BadStringFormatType) {
pylint::rules::bad_string_format_type(self, expr, right);
}
- if self.settings.rules.enabled(Rule::HardcodedSQLExpression) {
+ if self.enabled(Rule::HardcodedSQLExpression) {
flake8_bandit::rules::hardcoded_sql_expression(self, expr);
}
}
}
- ExprKind::BinOp {
+ Expr::BinOp(ast::ExprBinOp {
op: Operator::Add, ..
- } => {
- if self
- .settings
- .rules
- .enabled(Rule::ExplicitStringConcatenation)
- {
+ }) => {
+ if self.enabled(Rule::ExplicitStringConcatenation) {
if let Some(diagnostic) = flake8_implicit_str_concat::rules::explicit(expr) {
self.diagnostics.push(diagnostic);
}
}
- if self
- .settings
- .rules
- .enabled(Rule::CollectionLiteralConcatenation)
- {
+ if self.enabled(Rule::CollectionLiteralConcatenation) {
ruff::rules::collection_literal_concatenation(self, expr);
}
- if self.settings.rules.enabled(Rule::HardcodedSQLExpression) {
+ if self.enabled(Rule::HardcodedSQLExpression) {
flake8_bandit::rules::hardcoded_sql_expression(self, expr);
}
}
- ExprKind::BinOp {
+ Expr::BinOp(ast::ExprBinOp {
op: Operator::BitOr,
..
- } => {
+ }) => {
+ // Ex) `str | None`
+ if self.enabled(Rule::FutureRequiredTypeAnnotation) {
+ if self.settings.target_version < PythonVersion::Py310
+ && !self.semantic_model.future_annotations()
+ && self.semantic_model.in_annotation()
+ {
+ flake8_future_annotations::rules::future_required_type_annotation(
+ self,
+ expr,
+ flake8_future_annotations::rules::Reason::PEP604,
+ );
+ }
+ }
+
if self.is_stub {
- if self.settings.rules.enabled(Rule::DuplicateUnionMember)
- && self.ctx.in_type_definition
- && self.ctx.current_expr_parent().map_or(true, |parent| {
+ if self.enabled(Rule::DuplicateUnionMember)
+ && self.semantic_model.in_type_definition()
+ && self.semantic_model.expr_parent().map_or(true, |parent| {
!matches!(
- parent.node,
- ExprKind::BinOp {
+ parent,
+ Expr::BinOp(ast::ExprBinOp {
op: Operator::BitOr,
..
- }
+ })
)
})
{
@@ -3379,42 +3272,46 @@ where
}
}
}
- ExprKind::UnaryOp { op, operand } => {
- let check_not_in = self.settings.rules.enabled(Rule::NotInTest);
- let check_not_is = self.settings.rules.enabled(Rule::NotIsTest);
+ Expr::UnaryOp(ast::ExprUnaryOp {
+ op,
+ operand,
+ range: _,
+ }) => {
+ let check_not_in = self.enabled(Rule::NotInTest);
+ let check_not_is = self.enabled(Rule::NotIsTest);
if check_not_in || check_not_is {
pycodestyle::rules::not_tests(
self,
expr,
- op,
+ *op,
operand,
check_not_in,
check_not_is,
);
}
- if self.settings.rules.enabled(Rule::UnaryPrefixIncrement) {
- flake8_bugbear::rules::unary_prefix_increment(self, expr, op, operand);
+ if self.enabled(Rule::UnaryPrefixIncrement) {
+ flake8_bugbear::rules::unary_prefix_increment(self, expr, *op, operand);
}
- if self.settings.rules.enabled(Rule::NegateEqualOp) {
- flake8_simplify::rules::negation_with_equal_op(self, expr, op, operand);
+ if self.enabled(Rule::NegateEqualOp) {
+ flake8_simplify::rules::negation_with_equal_op(self, expr, *op, operand);
}
- if self.settings.rules.enabled(Rule::NegateNotEqualOp) {
- flake8_simplify::rules::negation_with_not_equal_op(self, expr, op, operand);
+ if self.enabled(Rule::NegateNotEqualOp) {
+ flake8_simplify::rules::negation_with_not_equal_op(self, expr, *op, operand);
}
- if self.settings.rules.enabled(Rule::DoubleNegation) {
- flake8_simplify::rules::double_negation(self, expr, op, operand);
+ if self.enabled(Rule::DoubleNegation) {
+ flake8_simplify::rules::double_negation(self, expr, *op, operand);
}
}
- ExprKind::Compare {
+ Expr::Compare(ast::ExprCompare {
left,
ops,
comparators,
- } => {
- let check_none_comparisons = self.settings.rules.enabled(Rule::NoneComparison);
- let check_true_false_comparisons =
- self.settings.rules.enabled(Rule::TrueFalseComparison);
+ range: _,
+ }) => {
+ let check_none_comparisons = self.enabled(Rule::NoneComparison);
+ let check_true_false_comparisons = self.enabled(Rule::TrueFalseComparison);
if check_none_comparisons || check_true_false_comparisons {
pycodestyle::rules::literal_comparisons(
self,
@@ -3427,7 +3324,7 @@ where
);
}
- if self.settings.rules.enabled(Rule::IsLiteral) {
+ if self.enabled(Rule::IsLiteral) {
pyflakes::rules::invalid_literal_comparison(
self,
left,
@@ -3437,11 +3334,11 @@ where
);
}
- if self.settings.rules.enabled(Rule::TypeComparison) {
+ if self.enabled(Rule::TypeComparison) {
pycodestyle::rules::type_comparison(self, expr, ops, comparators);
}
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::SysVersionCmpStr3,
Rule::SysVersionInfo0Eq3,
Rule::SysVersionInfo1CmpInt,
@@ -3451,7 +3348,7 @@ where
flake8_2020::rules::compare(self, left, ops, comparators);
}
- if self.settings.rules.enabled(Rule::HardcodedPasswordString) {
+ if self.enabled(Rule::HardcodedPasswordString) {
self.diagnostics.extend(
flake8_bandit::rules::compare_to_hardcoded_password_string(
left,
@@ -3460,28 +3357,28 @@ where
);
}
- if self.settings.rules.enabled(Rule::ComparisonOfConstant) {
+ if self.enabled(Rule::ComparisonOfConstant) {
pylint::rules::comparison_of_constant(self, left, ops, comparators);
}
- if self.settings.rules.enabled(Rule::CompareToEmptyString) {
+ if self.enabled(Rule::CompareToEmptyString) {
pylint::rules::compare_to_empty_string(self, left, ops, comparators);
}
- if self.settings.rules.enabled(Rule::MagicValueComparison) {
+ if self.enabled(Rule::MagicValueComparison) {
pylint::rules::magic_value_comparison(self, left, comparators);
}
- if self.settings.rules.enabled(Rule::InDictKeys) {
+ if self.enabled(Rule::InDictKeys) {
flake8_simplify::rules::key_in_dict_compare(self, expr, left, ops, comparators);
}
- if self.settings.rules.enabled(Rule::YodaConditions) {
+ if self.enabled(Rule::YodaConditions) {
flake8_simplify::rules::yoda_conditions(self, expr, left, ops, comparators);
}
if self.is_stub {
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::UnrecognizedPlatformCheck,
Rule::UnrecognizedPlatformName,
]) {
@@ -3494,7 +3391,7 @@ where
);
}
- if self.settings.rules.enabled(Rule::BadVersionInfoComparison) {
+ if self.enabled(Rule::BadVersionInfoComparison) {
flake8_pyi::rules::bad_version_info_comparison(
self,
expr,
@@ -3505,30 +3402,47 @@ where
}
}
}
- ExprKind::Constant {
+ Expr::Constant(ast::ExprConstant {
+ value: Constant::Int(_) | Constant::Float(_) | Constant::Complex { .. },
+ kind: _,
+ range: _,
+ }) => {
+ if self.is_stub && self.enabled(Rule::NumericLiteralTooLong) {
+ flake8_pyi::rules::numeric_literal_too_long(self, expr);
+ }
+ }
+ Expr::Constant(ast::ExprConstant {
+ value: Constant::Bytes(_),
+ kind: _,
+ range: _,
+ }) => {
+ if self.is_stub && self.enabled(Rule::StringOrBytesTooLong) {
+ flake8_pyi::rules::string_or_bytes_too_long(self, expr);
+ }
+ }
+ Expr::Constant(ast::ExprConstant {
value: Constant::Str(value),
kind,
- } => {
- if self.ctx.in_type_definition && !self.ctx.in_literal && !self.ctx.in_f_string {
+ range: _,
+ }) => {
+ if self.semantic_model.in_type_definition()
+ && !self.semantic_model.in_literal()
+ && !self.semantic_model.in_f_string()
+ {
self.deferred.string_type_definitions.push((
expr.range(),
value,
- (self.ctx.in_annotation, self.ctx.in_type_checking_block),
- (self.ctx.scope_id, self.ctx.parents.clone()),
+ self.semantic_model.snapshot(),
));
}
- if self
- .settings
- .rules
- .enabled(Rule::HardcodedBindAllInterfaces)
- {
+ if self.enabled(Rule::HardcodedBindAllInterfaces) {
if let Some(diagnostic) =
flake8_bandit::rules::hardcoded_bind_all_interfaces(value, expr.range())
{
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.enabled(Rule::HardcodedTempFile) {
+ if self.enabled(Rule::HardcodedTempFile) {
if let Some(diagnostic) = flake8_bandit::rules::hardcoded_tmp_directory(
expr,
value,
@@ -3537,13 +3451,22 @@ where
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.enabled(Rule::UnicodeKindPrefix) {
+ if self.enabled(Rule::UnicodeKindPrefix) {
pyupgrade::rules::unicode_kind_prefix(self, expr, kind.as_deref());
}
+ if self.is_stub && self.enabled(Rule::StringOrBytesTooLong) {
+ flake8_pyi::rules::string_or_bytes_too_long(self, expr);
+ }
}
- ExprKind::Lambda { args, body, .. } => {
- if self.settings.rules.enabled(Rule::ReimplementedListBuiltin) {
- flake8_pie::rules::reimplemented_list_builtin(self, expr);
+ Expr::Lambda(
+ lambda @ ast::ExprLambda {
+ args,
+ body: _,
+ range: _,
+ },
+ ) => {
+ if self.enabled(Rule::ReimplementedListBuiltin) {
+ flake8_pie::rules::reimplemented_list_builtin(self, lambda);
}
// Visit the default arguments, but avoid the body, which will be deferred.
@@ -3553,79 +3476,140 @@ where
for expr in &args.defaults {
self.visit_expr(expr);
}
- self.ctx
- .push_scope(ScopeKind::Lambda(Lambda { args, body }));
+ self.semantic_model.push_scope(ScopeKind::Lambda(lambda));
}
- ExprKind::IfExp { test, body, orelse } => {
- if self.settings.rules.enabled(Rule::IfExprWithTrueFalse) {
+ Expr::IfExp(ast::ExprIfExp {
+ test,
+ body,
+ orelse,
+ range: _,
+ }) => {
+ if self.enabled(Rule::IfExprWithTrueFalse) {
flake8_simplify::rules::explicit_true_false_in_ifexpr(
self, expr, test, body, orelse,
);
}
- if self.settings.rules.enabled(Rule::IfExprWithFalseTrue) {
+ if self.enabled(Rule::IfExprWithFalseTrue) {
flake8_simplify::rules::explicit_false_true_in_ifexpr(
self, expr, test, body, orelse,
);
}
- if self.settings.rules.enabled(Rule::IfExprWithTwistedArms) {
+ if self.enabled(Rule::IfExprWithTwistedArms) {
flake8_simplify::rules::twisted_arms_in_ifexpr(self, expr, test, body, orelse);
}
}
- ExprKind::ListComp { elt, generators } | ExprKind::SetComp { elt, generators } => {
- if self.settings.rules.enabled(Rule::UnnecessaryComprehension) {
+ Expr::ListComp(ast::ExprListComp {
+ elt,
+ generators,
+ range: _,
+ })
+ | Expr::SetComp(ast::ExprSetComp {
+ elt,
+ generators,
+ range: _,
+ }) => {
+ if self.enabled(Rule::UnnecessaryComprehension) {
flake8_comprehensions::rules::unnecessary_list_set_comprehension(
self, expr, elt, generators,
);
}
- if self.settings.rules.enabled(Rule::FunctionUsesLoopVariable) {
+ if self.enabled(Rule::FunctionUsesLoopVariable) {
flake8_bugbear::rules::function_uses_loop_variable(self, &Node::Expr(expr));
}
- self.ctx.push_scope(ScopeKind::Generator);
+ if self.enabled(Rule::InDictKeys) {
+ for generator in generators {
+ flake8_simplify::rules::key_in_dict_for(
+ self,
+ &generator.target,
+ &generator.iter,
+ );
+ }
+ }
+ if self.enabled(Rule::IterationOverSet) {
+ for generator in generators {
+ pylint::rules::iteration_over_set(self, &generator.iter);
+ }
+ }
}
- ExprKind::DictComp {
+ Expr::DictComp(ast::ExprDictComp {
key,
value,
generators,
- } => {
- if self.settings.rules.enabled(Rule::UnnecessaryComprehension) {
+ range: _,
+ }) => {
+ if self.enabled(Rule::UnnecessaryComprehension) {
flake8_comprehensions::rules::unnecessary_dict_comprehension(
self, expr, key, value, generators,
);
}
- if self.settings.rules.enabled(Rule::FunctionUsesLoopVariable) {
+ if self.enabled(Rule::FunctionUsesLoopVariable) {
flake8_bugbear::rules::function_uses_loop_variable(self, &Node::Expr(expr));
}
- self.ctx.push_scope(ScopeKind::Generator);
+ if self.enabled(Rule::InDictKeys) {
+ for generator in generators {
+ flake8_simplify::rules::key_in_dict_for(
+ self,
+ &generator.target,
+ &generator.iter,
+ );
+ }
+ }
+ if self.enabled(Rule::IterationOverSet) {
+ for generator in generators {
+ pylint::rules::iteration_over_set(self, &generator.iter);
+ }
+ }
}
- ExprKind::GeneratorExp { .. } => {
- if self.settings.rules.enabled(Rule::FunctionUsesLoopVariable) {
+ Expr::GeneratorExp(ast::ExprGeneratorExp {
+ generators,
+ elt: _,
+ range: _,
+ }) => {
+ if self.enabled(Rule::FunctionUsesLoopVariable) {
flake8_bugbear::rules::function_uses_loop_variable(self, &Node::Expr(expr));
}
- self.ctx.push_scope(ScopeKind::Generator);
- }
- ExprKind::BoolOp { op, values } => {
- if self.settings.rules.enabled(Rule::RepeatedIsinstanceCalls) {
- pylint::rules::repeated_isinstance_calls(self, expr, op, values);
+ if self.enabled(Rule::InDictKeys) {
+ for generator in generators {
+ flake8_simplify::rules::key_in_dict_for(
+ self,
+ &generator.target,
+ &generator.iter,
+ );
+ }
}
- if self.settings.rules.enabled(Rule::MultipleStartsEndsWith) {
+ if self.enabled(Rule::IterationOverSet) {
+ for generator in generators {
+ pylint::rules::iteration_over_set(self, &generator.iter);
+ }
+ }
+ }
+ Expr::BoolOp(ast::ExprBoolOp {
+ op,
+ values,
+ range: _,
+ }) => {
+ if self.enabled(Rule::RepeatedIsinstanceCalls) {
+ pylint::rules::repeated_isinstance_calls(self, expr, *op, values);
+ }
+ if self.enabled(Rule::MultipleStartsEndsWith) {
flake8_pie::rules::multiple_starts_ends_with(self, expr);
}
- if self.settings.rules.enabled(Rule::DuplicateIsinstanceCall) {
+ if self.enabled(Rule::DuplicateIsinstanceCall) {
flake8_simplify::rules::duplicate_isinstance_call(self, expr);
}
- if self.settings.rules.enabled(Rule::CompareWithTuple) {
+ if self.enabled(Rule::CompareWithTuple) {
flake8_simplify::rules::compare_with_tuple(self, expr);
}
- if self.settings.rules.enabled(Rule::ExprAndNotExpr) {
+ if self.enabled(Rule::ExprAndNotExpr) {
flake8_simplify::rules::expr_and_not_expr(self, expr);
}
- if self.settings.rules.enabled(Rule::ExprOrNotExpr) {
+ if self.enabled(Rule::ExprOrNotExpr) {
flake8_simplify::rules::expr_or_not_expr(self, expr);
}
- if self.settings.rules.enabled(Rule::ExprOrTrue) {
+ if self.enabled(Rule::ExprOrTrue) {
flake8_simplify::rules::expr_or_true(self, expr);
}
- if self.settings.rules.enabled(Rule::ExprAndFalse) {
+ if self.enabled(Rule::ExprAndFalse) {
flake8_simplify::rules::expr_and_false(self, expr);
}
}
@@ -3633,88 +3617,154 @@ where
};
// Recurse.
- match &expr.node {
- ExprKind::Lambda { .. } => {
- self.deferred
- .lambdas
- .push((expr, (self.ctx.scope_id, self.ctx.parents.clone())));
+ match expr {
+ Expr::ListComp(ast::ExprListComp {
+ elt,
+ generators,
+ range: _,
+ })
+ | Expr::SetComp(ast::ExprSetComp {
+ elt,
+ generators,
+ range: _,
+ })
+ | Expr::GeneratorExp(ast::ExprGeneratorExp {
+ elt,
+ generators,
+ range: _,
+ }) => {
+ self.visit_generators(generators);
+ self.visit_expr(elt);
}
- ExprKind::IfExp { test, body, orelse } => {
- visit_boolean_test!(self, test);
- self.visit_expr(body);
- self.visit_expr(orelse);
+ Expr::DictComp(ast::ExprDictComp {
+ key,
+ value,
+ generators,
+ range: _,
+ }) => {
+ self.visit_generators(generators);
+ self.visit_expr(key);
+ self.visit_expr(value);
+ }
+ Expr::Lambda(_) => {
+ self.deferred
+ .lambdas
+ .push((expr, self.semantic_model.snapshot()));
+ }
+ Expr::IfExp(ast::ExprIfExp {
+ test,
+ body,
+ orelse,
+ range: _,
+ }) => {
+ self.visit_boolean_test(test);
+ self.visit_expr(body);
+ self.visit_expr(orelse);
}
- ExprKind::Call {
+ Expr::Call(ast::ExprCall {
func,
args,
keywords,
- } => {
- let callable = self.ctx.resolve_call_path(func).and_then(|call_path| {
- if self.ctx.match_typing_call_path(&call_path, "cast") {
- Some(Callable::Cast)
- } else if self.ctx.match_typing_call_path(&call_path, "NewType") {
- Some(Callable::NewType)
- } else if self.ctx.match_typing_call_path(&call_path, "TypeVar") {
- Some(Callable::TypeVar)
- } else if self.ctx.match_typing_call_path(&call_path, "NamedTuple") {
- Some(Callable::NamedTuple)
- } else if self.ctx.match_typing_call_path(&call_path, "TypedDict") {
- Some(Callable::TypedDict)
- } else if [
- "Arg",
- "DefaultArg",
- "NamedArg",
- "DefaultNamedArg",
- "VarArg",
- "KwArg",
- ]
- .iter()
- .any(|target| call_path.as_slice() == ["mypy_extensions", target])
- {
- Some(Callable::MypyExtension)
- } else if call_path.as_slice() == ["", "bool"] {
- Some(Callable::Bool)
- } else {
- None
- }
- });
+ range: _,
+ }) => {
+ let callable = self
+ .semantic_model
+ .resolve_call_path(func)
+ .and_then(|call_path| {
+ if self
+ .semantic_model
+ .match_typing_call_path(&call_path, "cast")
+ {
+ Some(Callable::Cast)
+ } else if self
+ .semantic_model
+ .match_typing_call_path(&call_path, "NewType")
+ {
+ Some(Callable::NewType)
+ } else if self
+ .semantic_model
+ .match_typing_call_path(&call_path, "TypeVar")
+ {
+ Some(Callable::TypeVar)
+ } else if self
+ .semantic_model
+ .match_typing_call_path(&call_path, "NamedTuple")
+ {
+ Some(Callable::NamedTuple)
+ } else if self
+ .semantic_model
+ .match_typing_call_path(&call_path, "TypedDict")
+ {
+ Some(Callable::TypedDict)
+ } else if [
+ "Arg",
+ "DefaultArg",
+ "NamedArg",
+ "DefaultNamedArg",
+ "VarArg",
+ "KwArg",
+ ]
+ .iter()
+ .any(|target| call_path.as_slice() == ["mypy_extensions", target])
+ {
+ Some(Callable::MypyExtension)
+ } else if call_path.as_slice() == ["", "bool"] {
+ Some(Callable::Bool)
+ } else {
+ None
+ }
+ });
match callable {
Some(Callable::Bool) => {
self.visit_expr(func);
- if !args.is_empty() {
- visit_boolean_test!(self, &args[0]);
+ let mut args = args.iter();
+ if let Some(arg) = args.next() {
+ self.visit_boolean_test(arg);
}
- for expr in args.iter().skip(1) {
- self.visit_expr(expr);
+ for arg in args {
+ self.visit_expr(arg);
}
}
Some(Callable::Cast) => {
self.visit_expr(func);
- if !args.is_empty() {
- visit_type_definition!(self, &args[0]);
+ let mut args = args.iter();
+ if let Some(arg) = args.next() {
+ self.visit_type_definition(arg);
}
- for expr in args.iter().skip(1) {
- self.visit_expr(expr);
+ for arg in args {
+ self.visit_expr(arg);
}
}
Some(Callable::NewType) => {
self.visit_expr(func);
- for expr in args.iter().skip(1) {
- visit_type_definition!(self, expr);
+ let mut args = args.iter();
+ if let Some(arg) = args.next() {
+ self.visit_non_type_definition(arg);
+ }
+ for arg in args {
+ self.visit_type_definition(arg);
}
}
Some(Callable::TypeVar) => {
self.visit_expr(func);
- for expr in args.iter().skip(1) {
- visit_type_definition!(self, expr);
+ let mut args = args.iter();
+ if let Some(arg) = args.next() {
+ self.visit_non_type_definition(arg);
+ }
+ for arg in args {
+ self.visit_type_definition(arg);
}
for keyword in keywords {
- let KeywordData { arg, value } = &keyword.node;
+ let Keyword {
+ arg,
+ value,
+ range: _,
+ } = keyword;
if let Some(id) = arg {
if id == "bound" {
- visit_type_definition!(self, value);
+ self.visit_type_definition(value);
} else {
- visit_non_type_definition!(self, value);
+ self.visit_non_type_definition(value);
}
}
}
@@ -3723,75 +3773,98 @@ where
self.visit_expr(func);
// Ex) NamedTuple("a", [("a", int)])
- if args.len() > 1 {
- match &args[1].node {
- ExprKind::List { elts, .. } | ExprKind::Tuple { elts, .. } => {
- for elt in elts {
- match &elt.node {
- ExprKind::List { elts, .. }
- | ExprKind::Tuple { elts, .. } => {
- if elts.len() == 2 {
- visit_non_type_definition!(self, &elts[0]);
- visit_type_definition!(self, &elts[1]);
- }
- }
- _ => {}
+ let mut args = args.iter();
+ if let Some(arg) = args.next() {
+ self.visit_non_type_definition(arg);
+ }
+ for arg in args {
+ if let Expr::List(ast::ExprList { elts, .. })
+ | Expr::Tuple(ast::ExprTuple { elts, .. }) = arg
+ {
+ for elt in elts {
+ match elt {
+ Expr::List(ast::ExprList { elts, .. })
+ | Expr::Tuple(ast::ExprTuple { elts, .. })
+ if elts.len() == 2 =>
+ {
+ self.visit_non_type_definition(&elts[0]);
+ self.visit_type_definition(&elts[1]);
+ }
+ _ => {
+ self.visit_non_type_definition(elt);
}
}
}
- _ => {}
+ } else {
+ self.visit_non_type_definition(arg);
}
}
// Ex) NamedTuple("a", a=int)
for keyword in keywords {
- let KeywordData { value, .. } = &keyword.node;
- visit_type_definition!(self, value);
+ let Keyword { value, .. } = keyword;
+ self.visit_type_definition(value);
}
}
Some(Callable::TypedDict) => {
self.visit_expr(func);
// Ex) TypedDict("a", {"a": int})
- if args.len() > 1 {
- if let ExprKind::Dict { keys, values } = &args[1].node {
+ let mut args = args.iter();
+ if let Some(arg) = args.next() {
+ self.visit_non_type_definition(arg);
+ }
+ for arg in args {
+ if let Expr::Dict(ast::ExprDict {
+ keys,
+ values,
+ range: _,
+ }) = arg
+ {
for key in keys.iter().flatten() {
- visit_non_type_definition!(self, key);
+ self.visit_non_type_definition(key);
}
for value in values {
- visit_type_definition!(self, value);
+ self.visit_type_definition(value);
}
+ } else {
+ self.visit_non_type_definition(arg);
}
}
// Ex) TypedDict("a", a=int)
for keyword in keywords {
- let KeywordData { value, .. } = &keyword.node;
- visit_type_definition!(self, value);
+ let Keyword { value, .. } = keyword;
+ self.visit_type_definition(value);
}
}
Some(Callable::MypyExtension) => {
self.visit_expr(func);
- if let Some(arg) = args.first() {
+ let mut args = args.iter();
+ if let Some(arg) = args.next() {
// Ex) DefaultNamedArg(bool | None, name="some_prop_name")
- visit_type_definition!(self, arg);
+ self.visit_type_definition(arg);
- for arg in args.iter().skip(1) {
- visit_non_type_definition!(self, arg);
+ for arg in args {
+ self.visit_non_type_definition(arg);
}
for keyword in keywords {
- let KeywordData { value, .. } = &keyword.node;
- visit_non_type_definition!(self, value);
+ let Keyword { value, .. } = keyword;
+ self.visit_non_type_definition(value);
}
} else {
// Ex) DefaultNamedArg(type="bool", name="some_prop_name")
for keyword in keywords {
- let KeywordData { value, arg } = &keyword.node;
+ let Keyword {
+ value,
+ arg,
+ range: _,
+ } = keyword;
if arg.as_ref().map_or(false, |arg| arg == "type") {
- visit_type_definition!(self, value);
+ self.visit_type_definition(value);
} else {
- visit_non_type_definition!(self, value);
+ self.visit_non_type_definition(value);
}
}
}
@@ -3802,38 +3875,43 @@ where
// any strings as deferred type definitions).
self.visit_expr(func);
for arg in args {
- visit_non_type_definition!(self, arg);
+ self.visit_non_type_definition(arg);
}
for keyword in keywords {
- let KeywordData { value, .. } = &keyword.node;
- visit_non_type_definition!(self, value);
+ let Keyword { value, .. } = keyword;
+ self.visit_non_type_definition(value);
}
}
}
}
- ExprKind::Subscript { value, slice, ctx } => {
+ Expr::Subscript(ast::ExprSubscript {
+ value,
+ slice,
+ ctx,
+ range: _,
+ }) => {
// Only allow annotations in `ExprContext::Load`. If we have, e.g.,
// `obj["foo"]["bar"]`, we need to avoid treating the `obj["foo"]`
// portion as an annotation, despite having `ExprContext::Load`. Thus, we track
// the `ExprContext` at the top-level.
- let prev_in_subscript = self.ctx.in_subscript;
- if self.ctx.in_subscript {
+ if self.semantic_model.in_subscript() {
visitor::walk_expr(self, expr);
} else if matches!(ctx, ExprContext::Store | ExprContext::Del) {
- self.ctx.in_subscript = true;
+ self.semantic_model.flags |= SemanticModelFlags::SUBSCRIPT;
visitor::walk_expr(self, expr);
} else {
match analyze::typing::match_annotated_subscript(
value,
- &self.ctx,
+ &self.semantic_model,
self.settings.typing_modules.iter().map(String::as_str),
+ &self.settings.pyflakes.extend_generics,
) {
Some(subscript) => {
match subscript {
// Ex) Optional[int]
SubscriptKind::AnnotatedSubscript => {
self.visit_expr(value);
- visit_type_definition!(self, slice);
+ self.visit_type_definition(slice);
self.visit_expr_context(ctx);
}
// Ex) Annotated[int, "Hello, world!"]
@@ -3841,17 +3919,22 @@ where
// First argument is a type (including forward references); the
// rest are arbitrary Python objects.
self.visit_expr(value);
- if let ExprKind::Tuple { elts, ctx } = &slice.node {
+ if let Expr::Tuple(ast::ExprTuple {
+ elts,
+ ctx,
+ range: _,
+ }) = slice.as_ref()
+ {
if let Some(expr) = elts.first() {
self.visit_expr(expr);
for expr in elts.iter().skip(1) {
- visit_non_type_definition!(self, expr);
+ self.visit_non_type_definition(expr);
}
self.visit_expr_context(ctx);
}
} else {
error!(
- "Found non-ExprKind::Tuple argument to PEP 593 \
+ "Found non-Expr::Tuple argument to PEP 593 \
Annotation."
);
}
@@ -3861,57 +3944,44 @@ where
None => visitor::walk_expr(self, expr),
}
}
- self.ctx.in_subscript = prev_in_subscript;
}
- ExprKind::JoinedStr { .. } => {
- let prev_in_f_string = self.ctx.in_f_string;
- self.ctx.in_f_string = true;
+ Expr::JoinedStr(_) => {
+ self.semantic_model.flags |= if self.semantic_model.in_f_string() {
+ SemanticModelFlags::NESTED_F_STRING
+ } else {
+ SemanticModelFlags::F_STRING
+ };
visitor::walk_expr(self, expr);
- self.ctx.in_f_string = prev_in_f_string;
}
_ => visitor::walk_expr(self, expr),
}
// Post-visit.
- match &expr.node {
- ExprKind::Lambda { .. }
- | ExprKind::GeneratorExp { .. }
- | ExprKind::ListComp { .. }
- | ExprKind::DictComp { .. }
- | ExprKind::SetComp { .. } => {
- self.ctx.pop_scope();
+ match expr {
+ Expr::Lambda(_)
+ | Expr::GeneratorExp(_)
+ | Expr::ListComp(_)
+ | Expr::DictComp(_)
+ | Expr::SetComp(_) => {
+ self.semantic_model.pop_scope();
}
_ => {}
};
- self.ctx.in_type_definition = prev_in_type_definition;
- self.ctx.in_literal = prev_in_literal;
- self.ctx.in_boolean_test = prev_in_boolean_test;
-
- self.ctx.pop_expr();
- }
-
- fn visit_comprehension(&mut self, comprehension: &'b Comprehension) {
- if self.settings.rules.enabled(Rule::InDictKeys) {
- flake8_simplify::rules::key_in_dict_for(
- self,
- &comprehension.target,
- &comprehension.iter,
- );
- }
- self.visit_expr(&comprehension.iter);
- self.visit_expr(&comprehension.target);
- for expr in &comprehension.ifs {
- visit_boolean_test!(self, expr);
- }
+ self.semantic_model.flags = flags_snapshot;
+ self.semantic_model.pop_expr();
}
fn visit_excepthandler(&mut self, excepthandler: &'b Excepthandler) {
- match &excepthandler.node {
- ExcepthandlerKind::ExceptHandler {
- type_, name, body, ..
- } => {
- if self.settings.rules.enabled(Rule::BareExcept) {
+ match excepthandler {
+ Excepthandler::ExceptHandler(ast::ExcepthandlerExceptHandler {
+ type_,
+ name,
+ body,
+ range: _,
+ }) => {
+ let name = name.as_deref();
+ if self.enabled(Rule::BareExcept) {
if let Some(diagnostic) = pycodestyle::rules::bare_except(
type_.as_deref(),
body,
@@ -3921,61 +3991,48 @@ where
self.diagnostics.push(diagnostic);
}
}
- if self
- .settings
- .rules
- .enabled(Rule::RaiseWithoutFromInsideExcept)
- {
+ if self.enabled(Rule::RaiseWithoutFromInsideExcept) {
flake8_bugbear::rules::raise_without_from_inside_except(self, body);
}
- if self.settings.rules.enabled(Rule::BlindExcept) {
- flake8_blind_except::rules::blind_except(
- self,
- type_.as_deref(),
- name.as_deref(),
- body,
- );
+ if self.enabled(Rule::BlindExcept) {
+ flake8_blind_except::rules::blind_except(self, type_.as_deref(), name, body);
}
- if self.settings.rules.enabled(Rule::TryExceptPass) {
+ if self.enabled(Rule::TryExceptPass) {
flake8_bandit::rules::try_except_pass(
self,
excepthandler,
type_.as_deref(),
- name.as_deref(),
+ name,
body,
self.settings.flake8_bandit.check_typed_exception,
);
}
- if self.settings.rules.enabled(Rule::TryExceptContinue) {
+ if self.enabled(Rule::TryExceptContinue) {
flake8_bandit::rules::try_except_continue(
self,
excepthandler,
type_.as_deref(),
- name.as_deref(),
+ name,
body,
self.settings.flake8_bandit.check_typed_exception,
);
}
- if self.settings.rules.enabled(Rule::ExceptWithEmptyTuple) {
+ if self.enabled(Rule::ExceptWithEmptyTuple) {
flake8_bugbear::rules::except_with_empty_tuple(self, excepthandler);
}
- if self
- .settings
- .rules
- .enabled(Rule::ExceptWithNonExceptionClasses)
- {
+ if self.enabled(Rule::ExceptWithNonExceptionClasses) {
flake8_bugbear::rules::except_with_non_exception_classes(self, excepthandler);
}
- if self.settings.rules.enabled(Rule::ReraiseNoCause) {
+ if self.enabled(Rule::ReraiseNoCause) {
tryceratops::rules::reraise_no_cause(self, body);
}
- if self.settings.rules.enabled(Rule::BinaryOpException) {
+ if self.enabled(Rule::BinaryOpException) {
pylint::rules::binary_op_exception(self, excepthandler);
}
match name {
Some(name) => {
- if self.settings.rules.enabled(Rule::AmbiguousVariableName) {
+ if self.enabled(Rule::AmbiguousVariableName) {
if let Some(diagnostic) = pycodestyle::rules::ambiguous_variable_name(
name,
helpers::excepthandler_name_range(excepthandler, self.locator)
@@ -3985,52 +4042,53 @@ where
}
}
- self.check_builtin_shadowing(name, excepthandler, false);
+ if self.enabled(Rule::BuiltinVariableShadowing) {
+ flake8_builtins::rules::builtin_variable_shadowing(
+ self,
+ name,
+ AnyShadowing::from(excepthandler),
+ );
+ }
let name_range =
helpers::excepthandler_name_range(excepthandler, self.locator).unwrap();
- if self.ctx.scope().defines(name.as_str()) {
+ if self.semantic_model.scope().has(name) {
self.handle_node_store(
name,
- &Expr::with_range(
- ExprKind::Name {
- id: name.to_string(),
- ctx: ExprContext::Store,
- },
- name_range,
- ),
+ &Expr::Name(ast::ExprName {
+ id: name.into(),
+ ctx: ExprContext::Store,
+ range: name_range,
+ }),
);
}
- let definition = self.ctx.scope().get(name.as_str()).copied();
+ let definition = self.semantic_model.scope().get(name);
self.handle_node_store(
name,
- &Expr::with_range(
- ExprKind::Name {
- id: name.to_string(),
- ctx: ExprContext::Store,
- },
- name_range,
- ),
+ &Expr::Name(ast::ExprName {
+ id: name.into(),
+ ctx: ExprContext::Store,
+ range: name_range,
+ }),
);
walk_excepthandler(self, excepthandler);
- if let Some(index) = {
- let scope = self.ctx.scope_mut();
- &scope.remove(name.as_str())
+ if let Some(binding_id) = {
+ let scope = self.semantic_model.scope_mut();
+ scope.delete(name)
} {
- if !self.ctx.bindings[*index].used() {
- if self.settings.rules.enabled(Rule::UnusedVariable) {
+ if !self.semantic_model.is_used(binding_id) {
+ if self.enabled(Rule::UnusedVariable) {
let mut diagnostic = Diagnostic::new(
- pyflakes::rules::UnusedVariable {
- name: name.to_string(),
- },
+ pyflakes::rules::UnusedVariable { name: name.into() },
name_range,
);
if self.patch(Rule::UnusedVariable) {
- diagnostic.try_set_fix(|| {
+ #[allow(deprecated)]
+ diagnostic.try_set_fix_from_edit(|| {
pyflakes::fixes::remove_exception_handler_assignment(
excepthandler,
self.locator,
@@ -4042,9 +4100,9 @@ where
}
}
- if let Some(index) = definition {
- let scope = self.ctx.scope_mut();
- scope.add(name, index);
+ if let Some(binding_id) = definition {
+ let scope = self.semantic_model.scope_mut();
+ scope.add(name, binding_id);
}
}
None => walk_excepthandler(self, excepthandler),
@@ -4054,8 +4112,8 @@ where
}
fn visit_format_spec(&mut self, format_spec: &'b Expr) {
- match &format_spec.node {
- ExprKind::JoinedStr { values } => {
+ match format_spec {
+ Expr::JoinedStr(ast::ExprJoinedStr { values, range: _ }) => {
for value in values {
self.visit_expr(value);
}
@@ -4065,28 +4123,20 @@ where
}
fn visit_arguments(&mut self, arguments: &'b Arguments) {
- if self.settings.rules.enabled(Rule::MutableArgumentDefault) {
+ if self.enabled(Rule::MutableArgumentDefault) {
flake8_bugbear::rules::mutable_argument_default(self, arguments);
}
- if self
- .settings
- .rules
- .enabled(Rule::FunctionCallInDefaultArgument)
- {
+ if self.enabled(Rule::FunctionCallInDefaultArgument) {
flake8_bugbear::rules::function_call_argument_default(self, arguments);
}
if self.is_stub {
- if self
- .settings
- .rules
- .enabled(Rule::TypedArgumentDefaultInStub)
- {
+ if self.enabled(Rule::TypedArgumentDefaultInStub) {
flake8_pyi::rules::typed_argument_simple_defaults(self, arguments);
}
}
if self.is_stub {
- if self.settings.rules.enabled(Rule::ArgumentDefaultInStub) {
+ if self.enabled(Rule::ArgumentDefaultInStub) {
flake8_pyi::rules::argument_simple_defaults(self, arguments);
}
}
@@ -4114,30 +4164,23 @@ where
// Bind, but intentionally avoid walking the annotation, as we handle it
// upstream.
self.add_binding(
- &arg.node.arg,
- Binding {
- kind: BindingKind::Argument,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: arg.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ &arg.arg,
+ arg.range(),
+ BindingKind::Argument,
+ BindingFlags::empty(),
);
- if self.settings.rules.enabled(Rule::AmbiguousVariableName) {
+ if self.enabled(Rule::AmbiguousVariableName) {
if let Some(diagnostic) =
- pycodestyle::rules::ambiguous_variable_name(&arg.node.arg, arg.range())
+ pycodestyle::rules::ambiguous_variable_name(&arg.arg, arg.range())
{
self.diagnostics.push(diagnostic);
}
}
- if self.settings.rules.enabled(Rule::InvalidArgumentName) {
+ if self.enabled(Rule::InvalidArgumentName) {
if let Some(diagnostic) = pep8_naming::rules::invalid_argument_name(
- &arg.node.arg,
+ &arg.arg,
arg,
&self.settings.pep8_naming.ignore_names,
) {
@@ -4145,30 +4188,28 @@ where
}
}
- self.check_builtin_arg_shadowing(&arg.node.arg, arg);
+ if self.enabled(Rule::BuiltinArgumentShadowing) {
+ flake8_builtins::rules::builtin_argument_shadowing(self, arg);
+ }
}
fn visit_pattern(&mut self, pattern: &'b Pattern) {
- if let PatternKind::MatchAs {
+ if let Pattern::MatchAs(ast::PatternMatchAs {
name: Some(name), ..
- }
- | PatternKind::MatchStar { name: Some(name) }
- | PatternKind::MatchMapping {
+ })
+ | Pattern::MatchStar(ast::PatternMatchStar {
+ name: Some(name),
+ range: _,
+ })
+ | Pattern::MatchMapping(ast::PatternMatchMapping {
rest: Some(name), ..
- } = &pattern.node
+ }) = pattern
{
self.add_binding(
name,
- Binding {
- kind: BindingKind::Assignment,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: pattern.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ pattern.range(),
+ BindingKind::Assignment,
+ BindingFlags::empty(),
);
}
@@ -4176,62 +4217,177 @@ where
}
fn visit_body(&mut self, body: &'b [Stmt]) {
- if self.settings.rules.enabled(Rule::UnnecessaryPass) {
+ if self.enabled(Rule::UnnecessaryPass) {
flake8_pie::rules::no_unnecessary_pass(self, body);
}
- let prev_body = self.ctx.body;
- let prev_body_index = self.ctx.body_index;
- self.ctx.body = body;
- self.ctx.body_index = 0;
+ let prev_body = self.semantic_model.body;
+ let prev_body_index = self.semantic_model.body_index;
+ self.semantic_model.body = body;
+ self.semantic_model.body_index = 0;
for stmt in body {
self.visit_stmt(stmt);
- self.ctx.body_index += 1;
+ self.semantic_model.body_index += 1;
}
- self.ctx.body = prev_body;
- self.ctx.body_index = prev_body_index;
+ self.semantic_model.body = prev_body;
+ self.semantic_model.body_index = prev_body_index;
}
}
impl<'a> Checker<'a> {
- fn add_binding(&mut self, name: &'a str, binding: Binding<'a>) {
- let binding_id = self.ctx.bindings.next_id();
- if let Some((stack_index, existing_binding_id)) = self
- .ctx
+ /// Visit a [`Module`]. Returns `true` if the module contains a module-level docstring.
+ fn visit_module(&mut self, python_ast: &'a Suite) -> bool {
+ if self.enabled(Rule::FStringDocstring) {
+ flake8_bugbear::rules::f_string_docstring(self, python_ast);
+ }
+ let docstring = docstrings::extraction::docstring_from(python_ast);
+ docstring.is_some()
+ }
+
+ /// Visit a list of [`Comprehension`] nodes, assumed to be the comprehensions that compose a
+ /// generator expression, like a list or set comprehension.
+ fn visit_generators(&mut self, generators: &'a [Comprehension]) {
+ let mut generators = generators.iter();
+
+ let Some(generator) = generators.next() else {
+ unreachable!("Generator expression must contain at least one generator");
+ };
+
+ // Generators are compiled as nested functions. (This may change with PEP 709.)
+ // As such, the `iter` of the first generator is evaluated in the outer scope, while all
+ // subsequent nodes are evaluated in the inner scope.
+ //
+ // For example, given:
+ // ```py
+ // class A:
+ // T = range(10)
+ //
+ // L = [x for x in T for y in T]
+ // ```
+ //
+ // Conceptually, this is compiled as:
+ // ```py
+ // class A:
+ // T = range(10)
+ //
+ // def foo(x=T):
+ // def bar(y=T):
+ // pass
+ // return bar()
+ // foo()
+ // ```
+ //
+ // Following Python's scoping rules, the `T` in `x=T` is thus evaluated in the outer scope,
+ // while all subsequent reads and writes are evaluated in the inner scope. In particular,
+ // `x` is local to `foo`, and the `T` in `y=T` skips the class scope when resolving.
+ self.visit_expr(&generator.iter);
+ self.semantic_model.push_scope(ScopeKind::Generator);
+ self.visit_expr(&generator.target);
+ for expr in &generator.ifs {
+ self.visit_boolean_test(expr);
+ }
+
+ for generator in generators {
+ self.visit_expr(&generator.iter);
+ self.visit_expr(&generator.target);
+ for expr in &generator.ifs {
+ self.visit_boolean_test(expr);
+ }
+ }
+ }
+
+ /// Visit an body of [`Stmt`] nodes within a type-checking block.
+ fn visit_type_checking_block(&mut self, body: &'a [Stmt]) {
+ let snapshot = self.semantic_model.flags;
+ self.semantic_model.flags |= SemanticModelFlags::TYPE_CHECKING_BLOCK;
+ self.visit_body(body);
+ self.semantic_model.flags = snapshot;
+ }
+
+ /// Visit an [`Expr`], and treat it as a type definition.
+ fn visit_type_definition(&mut self, expr: &'a Expr) {
+ let snapshot = self.semantic_model.flags;
+ self.semantic_model.flags |= SemanticModelFlags::TYPE_DEFINITION;
+ self.visit_expr(expr);
+ self.semantic_model.flags = snapshot;
+ }
+
+ /// Visit an [`Expr`], and treat it as _not_ a type definition.
+ fn visit_non_type_definition(&mut self, expr: &'a Expr) {
+ let snapshot = self.semantic_model.flags;
+ self.semantic_model.flags -= SemanticModelFlags::TYPE_DEFINITION;
+ self.visit_expr(expr);
+ self.semantic_model.flags = snapshot;
+ }
+
+ /// Visit an [`Expr`], and treat it as a boolean test. This is useful for detecting whether an
+ /// expressions return value is significant, or whether the calling context only relies on
+ /// its truthiness.
+ fn visit_boolean_test(&mut self, expr: &'a Expr) {
+ let snapshot = self.semantic_model.flags;
+ self.semantic_model.flags |= SemanticModelFlags::BOOLEAN_TEST;
+ self.visit_expr(expr);
+ self.semantic_model.flags = snapshot;
+ }
+
+ /// Add a [`Binding`] to the current scope, bound to the given name.
+ fn add_binding(
+ &mut self,
+ name: &'a str,
+ range: TextRange,
+ kind: BindingKind<'a>,
+ flags: BindingFlags,
+ ) -> BindingId {
+ // Determine the scope to which the binding belongs.
+ // Per [PEP 572](https://peps.python.org/pep-0572/#scope-of-the-target), named
+ // expressions in generators and comprehensions bind to the scope that contains the
+ // outermost comprehension.
+ let scope_id = if kind.is_named_expr_assignment() {
+ self.semantic_model
+ .scopes
+ .ancestor_ids(self.semantic_model.scope_id)
+ .find_or_last(|scope_id| !self.semantic_model.scopes[*scope_id].kind.is_generator())
+ .unwrap_or(self.semantic_model.scope_id)
+ } else {
+ self.semantic_model.scope_id
+ };
+
+ // Create the `Binding`.
+ let binding_id = self.semantic_model.push_binding(range, kind, flags);
+ let binding = &self.semantic_model.bindings[binding_id];
+
+ // Determine whether the binding shadows any existing bindings.
+ if let Some((stack_index, shadowed_id)) = self
+ .semantic_model
.scopes
- .ancestors(self.ctx.scope_id)
+ .ancestors(self.semantic_model.scope_id)
.enumerate()
.find_map(|(stack_index, scope)| {
- scope.get(name).map(|binding_id| (stack_index, *binding_id))
+ scope.get(name).map(|binding_id| (stack_index, binding_id))
})
{
- let existing = &self.ctx.bindings[existing_binding_id];
+ let shadowed = &self.semantic_model.bindings[shadowed_id];
let in_current_scope = stack_index == 0;
- if !existing.kind.is_builtin()
- && existing.source.map_or(true, |left| {
+ if !shadowed.kind.is_builtin()
+ && shadowed.source.map_or(true, |left| {
binding.source.map_or(true, |right| {
- !branch_detection::different_forks(
- left,
- right,
- &self.ctx.depths,
- &self.ctx.child_to_parent,
- )
+ !branch_detection::different_forks(left, right, &self.semantic_model.stmts)
})
})
{
- let existing_is_import = matches!(
- existing.kind,
+ let shadows_import = matches!(
+ shadowed.kind,
BindingKind::Importation(..)
| BindingKind::FromImportation(..)
| BindingKind::SubmoduleImportation(..)
| BindingKind::FutureImportation
);
- if binding.kind.is_loop_var() && existing_is_import {
- if self.settings.rules.enabled(Rule::ImportShadowedByLoopVar) {
+ if binding.kind.is_loop_var() && shadows_import {
+ if self.enabled(Rule::ImportShadowedByLoopVar) {
#[allow(deprecated)]
- let line = self.locator.compute_line_index(existing.range.start());
+ let line = self.locator.compute_line_index(shadowed.range.start());
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::ImportShadowedByLoopVar {
@@ -4242,364 +4398,217 @@ impl<'a> Checker<'a> {
));
}
} else if in_current_scope {
- if !existing.used()
- && binding.redefines(existing)
- && (!self.settings.dummy_variable_rgx.is_match(name) || existing_is_import)
- && !(existing.kind.is_function_definition()
+ if !shadowed.is_used()
+ && binding.redefines(shadowed)
+ && (!self.settings.dummy_variable_rgx.is_match(name) || shadows_import)
+ && !(shadowed.kind.is_function_definition()
&& analyze::visibility::is_overload(
- &self.ctx,
- cast::decorator_list(existing.source.as_ref().unwrap()),
+ &self.semantic_model,
+ cast::decorator_list(
+ self.semantic_model.stmts[shadowed.source.unwrap()],
+ ),
))
{
- if self.settings.rules.enabled(Rule::RedefinedWhileUnused) {
+ if self.enabled(Rule::RedefinedWhileUnused) {
#[allow(deprecated)]
- let line = self.locator.compute_line_index(existing.range.start());
+ let line = self.locator.compute_line_index(
+ shadowed
+ .trimmed_range(&self.semantic_model, self.locator)
+ .start(),
+ );
let mut diagnostic = Diagnostic::new(
pyflakes::rules::RedefinedWhileUnused {
name: name.to_string(),
line,
},
- matches!(
- binding.kind,
- BindingKind::ClassDefinition | BindingKind::FunctionDefinition
- )
- .then(|| {
- binding.source.as_ref().map_or(binding.range, |source| {
- helpers::identifier_range(source, self.locator)
- })
- })
- .unwrap_or(binding.range),
+ binding.trimmed_range(&self.semantic_model, self.locator),
);
- if let Some(parent) = binding.source.as_ref() {
- if matches!(parent.node, StmtKind::ImportFrom { .. })
- && parent.range().contains_range(binding.range)
- {
- diagnostic.set_parent(parent.start());
- }
+ if let Some(range) = binding.parent_range(&self.semantic_model) {
+ diagnostic.set_parent(range.start());
}
self.diagnostics.push(diagnostic);
}
}
- } else if existing_is_import && binding.redefines(existing) {
- self.ctx
+ } else if shadows_import && binding.redefines(shadowed) {
+ self.semantic_model
.shadowed_bindings
- .entry(existing_binding_id)
- .or_insert_with(Vec::new)
- .push(binding_id);
+ .insert(binding_id, shadowed_id);
}
}
}
- // Per [PEP 572](https://peps.python.org/pep-0572/#scope-of-the-target), named
- // expressions in generators and comprehensions bind to the scope that contains the
- // outermost comprehension.
- let scope_id = if binding.kind.is_named_expr_assignment() {
- self.ctx
- .scopes
- .ancestor_ids(self.ctx.scope_id)
- .find_or_last(|scope_id| !self.ctx.scopes[*scope_id].kind.is_generator())
- .unwrap_or(self.ctx.scope_id)
- } else {
- self.ctx.scope_id
- };
- let scope = &mut self.ctx.scopes[scope_id];
-
- let binding = if let Some(index) = scope.get(name) {
- let existing = &self.ctx.bindings[*index];
- match &existing.kind {
+ // If there's an existing binding in this scope, copy its references.
+ if let Some(shadowed) = self.semantic_model.scopes[scope_id]
+ .get(name)
+ .map(|binding_id| &self.semantic_model.bindings[binding_id])
+ {
+ match &shadowed.kind {
BindingKind::Builtin => {
// Avoid overriding builtins.
- binding
}
kind @ (BindingKind::Global | BindingKind::Nonlocal) => {
- // If the original binding was a global or nonlocal, and the new binding conflicts within
- // the current scope, then the new binding is also as the same.
- Binding {
- runtime_usage: existing.runtime_usage,
- synthetic_usage: existing.synthetic_usage,
- typing_usage: existing.typing_usage,
- kind: kind.clone(),
- ..binding
- }
- }
- _ => Binding {
- runtime_usage: existing.runtime_usage,
- synthetic_usage: existing.synthetic_usage,
- typing_usage: existing.typing_usage,
- ..binding
- },
+ // If the original binding was a global or nonlocal, then the new binding is
+ // too.
+ let references = shadowed.references.clone();
+ self.semantic_model.bindings[binding_id].kind = kind.clone();
+ self.semantic_model.bindings[binding_id].references = references;
+ }
+ _ => {
+ let references = shadowed.references.clone();
+ self.semantic_model.bindings[binding_id].references = references;
+ }
}
- } else {
- binding
- };
- // Don't treat annotations as assignments if there is an existing value
- // in scope.
- if binding.kind.is_annotation() && scope.defines(name) {
- self.ctx.bindings.push(binding);
- return;
+ // If this is an annotation, and we already have an existing value in the same scope,
+ // don't treat it as an assignment (i.e., avoid adding it to the scope).
+ if self.semantic_model.bindings[binding_id]
+ .kind
+ .is_annotation()
+ {
+ return binding_id;
+ }
}
// Add the binding to the scope.
+ let scope = &mut self.semantic_model.scopes[scope_id];
scope.add(name, binding_id);
- // Add the binding to the arena.
- self.ctx.bindings.push(binding);
+ binding_id
}
fn bind_builtins(&mut self) {
- let scope = &mut self.ctx.scopes[self.ctx.scope_id];
-
for builtin in BUILTINS
.iter()
.chain(MAGIC_GLOBALS.iter())
.copied()
.chain(self.settings.builtins.iter().map(String::as_str))
{
- let id = self.ctx.bindings.push(Binding {
- kind: BindingKind::Builtin,
- range: TextRange::default(),
- runtime_usage: None,
- synthetic_usage: Some((ScopeId::global(), TextRange::default())),
- typing_usage: None,
- source: None,
- context: ExecutionContext::Runtime,
- exceptions: Exceptions::empty(),
- });
- scope.add(builtin, id);
+ // Add the builtin to the scope.
+ let binding_id = self.semantic_model.push_builtin();
+ let scope = self.semantic_model.scope_mut();
+ scope.add(builtin, binding_id);
}
}
fn handle_node_load(&mut self, expr: &Expr) {
- let ExprKind::Name { id, .. } = &expr.node else {
+ let Expr::Name(ast::ExprName { id, .. } )= expr else {
return;
};
-
- let mut first_iter = true;
- let mut in_generator = false;
- let mut import_starred = false;
-
- for scope in self.ctx.scopes.ancestors(self.ctx.scope_id) {
- if scope.kind.is_class() {
- if id == "__class__" {
- return;
- } else if !first_iter && !in_generator {
- continue;
- }
+ match self.semantic_model.resolve_reference(id, expr.range()) {
+ ResolvedReference::Resolved(..) | ResolvedReference::ImplicitGlobal => {
+ // Nothing to do.
}
-
- if let Some(index) = scope.get(id.as_str()) {
- // Mark the binding as used.
- let context = self.ctx.execution_context();
- self.ctx.bindings[*index].mark_used(self.ctx.scope_id, expr.range(), context);
-
- if self.ctx.bindings[*index].kind.is_annotation()
- && self.ctx.in_deferred_string_type_definition.is_none()
- && !self.ctx.in_deferred_type_definition
- {
- continue;
+ ResolvedReference::StarImport => {
+ // F405
+ if self.enabled(Rule::UndefinedLocalWithImportStarUsage) {
+ let sources: Vec = self
+ .semantic_model
+ .scopes
+ .iter()
+ .flat_map(Scope::star_imports)
+ .map(|StarImportation { level, module }| {
+ helpers::format_import_from(*level, *module)
+ })
+ .sorted()
+ .dedup()
+ .collect();
+ self.diagnostics.push(Diagnostic::new(
+ pyflakes::rules::UndefinedLocalWithImportStarUsage {
+ name: id.to_string(),
+ sources,
+ },
+ expr.range(),
+ ));
}
+ }
+ ResolvedReference::NotFound => {
+ // F821
+ if self.enabled(Rule::UndefinedName) {
+ // Allow __path__.
+ if self.path.ends_with("__init__.py") && id == "__path__" {
+ return;
+ }
- // If the name of the sub-importation is the same as an alias of another
- // importation and the alias is used, that sub-importation should be
- // marked as used too.
- //
- // This handles code like:
- // import pyarrow as pa
- // import pyarrow.csv
- // print(pa.csv.read_csv("test.csv"))
- match &self.ctx.bindings[*index].kind {
- BindingKind::Importation(Importation { name, full_name })
- | BindingKind::SubmoduleImportation(SubmoduleImportation { name, full_name }) =>
+ // Avoid flagging if `NameError` is handled.
+ if self
+ .semantic_model
+ .handled_exceptions
+ .iter()
+ .any(|handler_names| handler_names.contains(Exceptions::NAME_ERROR))
{
- let has_alias = full_name
- .split('.')
- .last()
- .map(|segment| &segment != name)
- .unwrap_or_default();
- if has_alias {
- // Mark the sub-importation as used.
- if let Some(index) = scope.get(full_name) {
- self.ctx.bindings[*index].mark_used(
- self.ctx.scope_id,
- expr.range(),
- context,
- );
- }
- }
- }
- BindingKind::FromImportation(FromImportation { name, full_name }) => {
- let has_alias = full_name
- .split('.')
- .last()
- .map(|segment| &segment != name)
- .unwrap_or_default();
- if has_alias {
- // Mark the sub-importation as used.
- if let Some(index) = scope.get(full_name.as_str()) {
- self.ctx.bindings[*index].mark_used(
- self.ctx.scope_id,
- expr.range(),
- context,
- );
- }
- }
+ return;
}
- _ => {}
- }
-
- return;
- }
- first_iter = false;
- in_generator = matches!(scope.kind, ScopeKind::Generator);
- import_starred = import_starred || scope.uses_star_imports();
- }
-
- if import_starred {
- // F405
- if self
- .settings
- .rules
- .enabled(Rule::UndefinedLocalWithImportStarUsage)
- {
- let sources: Vec = self
- .ctx
- .scopes
- .iter()
- .flat_map(Scope::star_imports)
- .map(|StarImportation { level, module }| {
- helpers::format_import_from(*level, *module)
- })
- .sorted()
- .dedup()
- .collect();
- self.diagnostics.push(Diagnostic::new(
- pyflakes::rules::UndefinedLocalWithImportStarUsage {
- name: id.to_string(),
- sources,
- },
- expr.range(),
- ));
- }
- return;
- }
-
- if self.settings.rules.enabled(Rule::UndefinedName) {
- // Allow __path__.
- if self.path.ends_with("__init__.py") && id == "__path__" {
- return;
- }
-
- // Allow "__module__" and "__qualname__" in class scopes.
- if (id == "__module__" || id == "__qualname__")
- && matches!(self.ctx.scope().kind, ScopeKind::Class(..))
- {
- return;
- }
-
- // Avoid flagging if NameError is handled.
- if self
- .ctx
- .handled_exceptions
- .iter()
- .any(|handler_names| handler_names.contains(Exceptions::NAME_ERROR))
- {
- return;
+ self.diagnostics.push(Diagnostic::new(
+ pyflakes::rules::UndefinedName {
+ name: id.to_string(),
+ },
+ expr.range(),
+ ));
+ }
}
-
- self.diagnostics.push(Diagnostic::new(
- pyflakes::rules::UndefinedName { name: id.clone() },
- expr.range(),
- ));
}
}
fn handle_node_store(&mut self, id: &'a str, expr: &Expr) {
- let parent = self.ctx.current_stmt().0;
+ let parent = self.semantic_model.stmt();
- if self.settings.rules.enabled(Rule::UndefinedLocal) {
+ if self.enabled(Rule::UndefinedLocal) {
pyflakes::rules::undefined_local(self, id);
}
- if self
- .settings
- .rules
- .enabled(Rule::NonLowercaseVariableInFunction)
- {
- if matches!(self.ctx.scope().kind, ScopeKind::Function(..)) {
+ if self.enabled(Rule::NonLowercaseVariableInFunction) {
+ if self.semantic_model.scope().kind.is_any_function() {
// Ignore globals.
if !self
- .ctx
+ .semantic_model
.scope()
.get(id)
- .map_or(false, |index| self.ctx.bindings[*index].kind.is_global())
+ .map_or(false, |binding_id| {
+ self.semantic_model.bindings[binding_id].kind.is_global()
+ })
{
pep8_naming::rules::non_lowercase_variable_in_function(self, expr, parent, id);
}
}
}
- if self
- .settings
- .rules
- .enabled(Rule::MixedCaseVariableInClassScope)
- {
- if let ScopeKind::Class(class) = &self.ctx.scope().kind {
+ if self.enabled(Rule::MixedCaseVariableInClassScope) {
+ if let ScopeKind::Class(ast::StmtClassDef { bases, .. }) =
+ &self.semantic_model.scope().kind
+ {
pep8_naming::rules::mixed_case_variable_in_class_scope(
- self,
- expr,
- parent,
- id,
- class.bases,
+ self, expr, parent, id, bases,
);
}
}
- if self
- .settings
- .rules
- .enabled(Rule::MixedCaseVariableInGlobalScope)
- {
- if matches!(self.ctx.scope().kind, ScopeKind::Module) {
+ if self.enabled(Rule::MixedCaseVariableInGlobalScope) {
+ if matches!(self.semantic_model.scope().kind, ScopeKind::Module) {
pep8_naming::rules::mixed_case_variable_in_global_scope(self, expr, parent, id);
}
}
- if matches!(parent.node, StmtKind::AnnAssign { value: None, .. }) {
+ if matches!(
+ parent,
+ Stmt::AnnAssign(ast::StmtAnnAssign { value: None, .. })
+ ) {
self.add_binding(
id,
- Binding {
- kind: BindingKind::Annotation,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: expr.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ expr.range(),
+ BindingKind::Annotation,
+ BindingFlags::empty(),
);
return;
}
- if matches!(
- parent.node,
- StmtKind::For { .. } | StmtKind::AsyncFor { .. }
- ) {
+ if matches!(parent, Stmt::For(_) | Stmt::AsyncFor(_)) {
self.add_binding(
id,
- Binding {
- kind: BindingKind::LoopVar,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: expr.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ expr.range(),
+ BindingKind::LoopVar,
+ BindingFlags::empty(),
);
return;
}
@@ -4607,199 +4616,119 @@ impl<'a> Checker<'a> {
if helpers::is_unpacking_assignment(parent, expr) {
self.add_binding(
id,
- Binding {
- kind: BindingKind::Binding,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: expr.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ expr.range(),
+ BindingKind::Binding,
+ BindingFlags::empty(),
);
return;
}
- let scope = self.ctx.scope();
+ let scope = self.semantic_model.scope();
- if id == "__all__"
- && scope.kind.is_module()
- && matches!(
- parent.node,
- StmtKind::Assign { .. } | StmtKind::AugAssign { .. } | StmtKind::AnnAssign { .. }
- )
- {
- if match &parent.node {
- StmtKind::Assign { targets, .. } => {
- if let Some(ExprKind::Name { id, .. }) =
- targets.first().map(|target| &target.node)
- {
+ if scope.kind.is_module()
+ && match parent {
+ Stmt::Assign(ast::StmtAssign { targets, .. }) => {
+ if let Some(Expr::Name(ast::ExprName { id, .. })) = targets.first() {
id == "__all__"
} else {
false
}
}
- StmtKind::AugAssign { target, .. } => {
- if let ExprKind::Name { id, .. } = &target.node {
+ Stmt::AugAssign(ast::StmtAugAssign { target, .. }) => {
+ if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
id == "__all__"
} else {
false
}
}
- StmtKind::AnnAssign { target, .. } => {
- if let ExprKind::Name { id, .. } = &target.node {
+ Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
+ if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
id == "__all__"
} else {
false
}
}
_ => false,
- } {
- let (all_names, all_names_flags) = {
- let (mut names, flags) =
- extract_all_names(parent, |name| self.ctx.is_builtin(name));
-
- // Grab the existing bound __all__ values.
- if let StmtKind::AugAssign { .. } = &parent.node {
- if let Some(index) = scope.get("__all__") {
- if let BindingKind::Export(Export { names: existing }) =
- &self.ctx.bindings[*index].kind
- {
- names.extend_from_slice(existing);
- }
- }
- }
-
- (names, flags)
- };
+ }
+ {
+ let (names, flags) =
+ extract_all_names(parent, |name| self.semantic_model.is_builtin(name));
- if self.settings.rules.enabled(Rule::InvalidAllFormat) {
- if matches!(all_names_flags, AllNamesFlags::INVALID_FORMAT) {
- self.diagnostics
- .push(pylint::rules::invalid_all_format(expr));
- }
+ if self.enabled(Rule::InvalidAllFormat) {
+ if matches!(flags, AllNamesFlags::INVALID_FORMAT) {
+ self.diagnostics
+ .push(pylint::rules::invalid_all_format(expr));
}
+ }
- if self.settings.rules.enabled(Rule::InvalidAllObject) {
- if matches!(all_names_flags, AllNamesFlags::INVALID_OBJECT) {
- self.diagnostics
- .push(pylint::rules::invalid_all_object(expr));
- }
+ if self.enabled(Rule::InvalidAllObject) {
+ if matches!(flags, AllNamesFlags::INVALID_OBJECT) {
+ self.diagnostics
+ .push(pylint::rules::invalid_all_object(expr));
}
-
- self.add_binding(
- id,
- Binding {
- kind: BindingKind::Export(Export { names: all_names }),
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: expr.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
- );
- return;
}
+
+ self.add_binding(
+ id,
+ expr.range(),
+ BindingKind::Export(Export { names }),
+ BindingFlags::empty(),
+ );
+ return;
}
if self
- .ctx
+ .semantic_model
.expr_ancestors()
- .any(|expr| matches!(expr.node, ExprKind::NamedExpr { .. }))
+ .any(|expr| matches!(expr, Expr::NamedExpr(_)))
{
self.add_binding(
id,
- Binding {
- kind: BindingKind::NamedExprAssignment,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: expr.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ expr.range(),
+ BindingKind::NamedExprAssignment,
+ BindingFlags::empty(),
);
return;
}
self.add_binding(
id,
- Binding {
- kind: BindingKind::Assignment,
- runtime_usage: None,
- synthetic_usage: None,
- typing_usage: None,
- range: expr.range(),
- source: Some(*self.ctx.current_stmt()),
- context: self.ctx.execution_context(),
- exceptions: self.ctx.exceptions(),
- },
+ expr.range(),
+ BindingKind::Assignment,
+ BindingFlags::empty(),
);
}
fn handle_node_delete(&mut self, expr: &'a Expr) {
- let ExprKind::Name { id, .. } = &expr.node else {
+ let Expr::Name(ast::ExprName { id, .. } )= expr else {
return;
};
- if helpers::on_conditional_branch(&mut self.ctx.parents.iter().rev().map(Into::into)) {
+ if helpers::on_conditional_branch(&mut self.semantic_model.parents()) {
return;
}
- let scope = self.ctx.scope_mut();
- if scope.remove(id.as_str()).is_some() {
- return;
- }
- if !self.settings.rules.enabled(Rule::UndefinedName) {
- return;
+ let scope = self.semantic_model.scope_mut();
+ if scope.delete(id.as_str()).is_none() {
+ if self.enabled(Rule::UndefinedName) {
+ self.diagnostics.push(Diagnostic::new(
+ pyflakes::rules::UndefinedName {
+ name: id.to_string(),
+ },
+ expr.range(),
+ ));
+ }
}
-
- self.diagnostics.push(Diagnostic::new(
- pyflakes::rules::UndefinedName {
- name: id.to_string(),
- },
- expr.range(),
- ));
}
- fn visit_docstring(&mut self, python_ast: &'a Suite) -> bool {
- if self.settings.rules.enabled(Rule::FStringDocstring) {
- flake8_bugbear::rules::f_string_docstring(self, python_ast);
- }
- let docstring = docstrings::extraction::docstring_from(python_ast);
- self.deferred.definitions.push((
- Definition {
- kind: if self.path.ends_with("__init__.py") {
- DefinitionKind::Package
- } else {
- DefinitionKind::Module
- },
- docstring,
- },
- self.ctx.visible_scope.visibility,
- (self.ctx.scope_id, self.ctx.parents.clone()),
- ));
- docstring.is_some()
- }
+ fn check_deferred_future_type_definitions(&mut self) {
+ while !self.deferred.future_type_definitions.is_empty() {
+ let type_definitions = std::mem::take(&mut self.deferred.future_type_definitions);
+ for (expr, snapshot) in type_definitions {
+ self.semantic_model.restore(snapshot);
- fn check_deferred_type_definitions(&mut self) {
- while !self.deferred.type_definitions.is_empty() {
- let type_definitions = std::mem::take(&mut self.deferred.type_definitions);
- for (expr, (in_annotation, in_type_checking_block), (scope_id, parents)) in
- type_definitions
- {
- self.ctx.scope_id = scope_id;
- self.ctx.parents = parents;
- self.ctx.in_annotation = in_annotation;
- self.ctx.in_type_checking_block = in_type_checking_block;
- self.ctx.in_type_definition = true;
- self.ctx.in_deferred_type_definition = true;
+ self.semantic_model.flags |= SemanticModelFlags::TYPE_DEFINITION
+ | SemanticModelFlags::FUTURE_TYPE_DEFINITION;
self.visit_expr(expr);
- self.ctx.in_deferred_type_definition = false;
- self.ctx.in_type_definition = false;
}
}
}
@@ -4807,38 +4736,37 @@ impl<'a> Checker<'a> {
fn check_deferred_string_type_definitions(&mut self, allocator: &'a typed_arena::Arena) {
while !self.deferred.string_type_definitions.is_empty() {
let type_definitions = std::mem::take(&mut self.deferred.string_type_definitions);
- for (range, value, (in_annotation, in_type_checking_block), (scope_id, parents)) in
- type_definitions
- {
+ for (range, value, snapshot) in type_definitions {
if let Ok((expr, kind)) = parse_type_annotation(value, range, self.locator) {
- if in_annotation && self.ctx.annotations_future_enabled {
- if self.settings.rules.enabled(Rule::QuotedAnnotation) {
+ let expr = allocator.alloc(expr);
+
+ self.semantic_model.restore(snapshot);
+
+ if self.semantic_model.in_annotation()
+ && self.semantic_model.future_annotations()
+ {
+ if self.enabled(Rule::QuotedAnnotation) {
pyupgrade::rules::quoted_annotation(self, value, range);
}
}
if self.is_stub {
- if self.settings.rules.enabled(Rule::QuotedAnnotationInStub) {
+ if self.enabled(Rule::QuotedAnnotationInStub) {
flake8_pyi::rules::quoted_annotation_in_stub(self, value, range);
}
}
- let expr = allocator.alloc(expr);
+ let type_definition_flag = match kind {
+ AnnotationKind::Simple => SemanticModelFlags::SIMPLE_STRING_TYPE_DEFINITION,
+ AnnotationKind::Complex => {
+ SemanticModelFlags::COMPLEX_STRING_TYPE_DEFINITION
+ }
+ };
- self.ctx.scope_id = scope_id;
- self.ctx.parents = parents;
- self.ctx.in_annotation = in_annotation;
- self.ctx.in_type_checking_block = in_type_checking_block;
- self.ctx.in_type_definition = true;
- self.ctx.in_deferred_string_type_definition = Some(kind);
+ self.semantic_model.flags |=
+ SemanticModelFlags::TYPE_DEFINITION | type_definition_flag;
self.visit_expr(expr);
- self.ctx.in_deferred_string_type_definition = None;
- self.ctx.in_type_definition = false;
} else {
- if self
- .settings
- .rules
- .enabled(Rule::ForwardAnnotationSyntaxError)
- {
+ if self.enabled(Rule::ForwardAnnotationSyntaxError) {
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::ForwardAnnotationSyntaxError {
body: value.to_string(),
@@ -4854,27 +4782,21 @@ impl<'a> Checker<'a> {
fn check_deferred_functions(&mut self) {
while !self.deferred.functions.is_empty() {
let deferred_functions = std::mem::take(&mut self.deferred.functions);
- for (stmt, (scope_id, parents), visibility) in deferred_functions {
- let parents_snapshot = parents.len();
- self.ctx.scope_id = scope_id;
- self.ctx.parents = parents;
- self.ctx.visible_scope = visibility;
-
- match &stmt.node {
- StmtKind::FunctionDef { body, args, .. }
- | StmtKind::AsyncFunctionDef { body, args, .. } => {
+ for snapshot in deferred_functions {
+ self.semantic_model.restore(snapshot);
+
+ match &self.semantic_model.stmt() {
+ Stmt::FunctionDef(ast::StmtFunctionDef { body, args, .. })
+ | Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef { body, args, .. }) => {
self.visit_arguments(args);
self.visit_body(body);
}
_ => {
- unreachable!("Expected StmtKind::FunctionDef | StmtKind::AsyncFunctionDef")
+ unreachable!("Expected Stmt::FunctionDef | Stmt::AsyncFunctionDef")
}
}
- let mut parents = std::mem::take(&mut self.ctx.parents);
- parents.truncate(parents_snapshot);
-
- self.deferred.assignments.push((scope_id, parents));
+ self.deferred.assignments.push(snapshot);
}
}
}
@@ -4882,22 +4804,22 @@ impl<'a> Checker<'a> {
fn check_deferred_lambdas(&mut self) {
while !self.deferred.lambdas.is_empty() {
let lambdas = std::mem::take(&mut self.deferred.lambdas);
- for (expr, (scope_id, parents)) in lambdas {
- let parents_snapshot = parents.len();
+ for (expr, snapshot) in lambdas {
+ self.semantic_model.restore(snapshot);
- self.ctx.scope_id = scope_id;
- self.ctx.parents = parents;
-
- if let ExprKind::Lambda { args, body } = &expr.node {
+ if let Expr::Lambda(ast::ExprLambda {
+ args,
+ body,
+ range: _,
+ }) = expr
+ {
self.visit_arguments(args);
self.visit_expr(body);
} else {
- unreachable!("Expected ExprKind::Lambda");
+ unreachable!("Expected Expr::Lambda");
}
- let mut parents = std::mem::take(&mut self.ctx.parents);
- parents.truncate(parents_snapshot);
- self.deferred.assignments.push((scope_id, parents));
+ self.deferred.assignments.push(snapshot);
}
}
}
@@ -4905,32 +4827,34 @@ impl<'a> Checker<'a> {
fn check_deferred_assignments(&mut self) {
while !self.deferred.assignments.is_empty() {
let assignments = std::mem::take(&mut self.deferred.assignments);
- for (scope_id, ..) in assignments {
+ for snapshot in assignments {
+ self.semantic_model.restore(snapshot);
+
// pyflakes
- if self.settings.rules.enabled(Rule::UnusedVariable) {
- pyflakes::rules::unused_variable(self, scope_id);
+ if self.enabled(Rule::UnusedVariable) {
+ pyflakes::rules::unused_variable(self, self.semantic_model.scope_id);
}
- if self.settings.rules.enabled(Rule::UnusedAnnotation) {
- pyflakes::rules::unused_annotation(self, scope_id);
+ if self.enabled(Rule::UnusedAnnotation) {
+ pyflakes::rules::unused_annotation(self, self.semantic_model.scope_id);
}
if !self.is_stub {
// flake8-unused-arguments
- if self.settings.rules.any_enabled(&[
+ if self.any_enabled(&[
Rule::UnusedFunctionArgument,
Rule::UnusedMethodArgument,
Rule::UnusedClassMethodArgument,
Rule::UnusedStaticMethodArgument,
Rule::UnusedLambdaArgument,
]) {
- let scope = &self.ctx.scopes[scope_id];
- let parent = &self.ctx.scopes[scope.parent.unwrap()];
+ let scope = &self.semantic_model.scopes[self.semantic_model.scope_id];
+ let parent = &self.semantic_model.scopes[scope.parent.unwrap()];
self.diagnostics
.extend(flake8_unused_arguments::rules::unused_arguments(
self,
parent,
scope,
- &self.ctx.bindings,
+ &self.semantic_model.bindings,
));
}
}
@@ -4942,20 +4866,18 @@ impl<'a> Checker<'a> {
while !self.deferred.for_loops.is_empty() {
let for_loops = std::mem::take(&mut self.deferred.for_loops);
- for (stmt, (scope_id, parents)) in for_loops {
- self.ctx.scope_id = scope_id;
- self.ctx.parents = parents;
+ for snapshot in for_loops {
+ self.semantic_model.restore(snapshot);
- if let StmtKind::For { target, body, .. }
- | StmtKind::AsyncFor { target, body, .. } = &stmt.node
+ if let Stmt::For(ast::StmtFor { target, body, .. })
+ | Stmt::AsyncFor(ast::StmtAsyncFor { target, body, .. }) =
+ &self.semantic_model.stmt()
{
- if self.settings.rules.enabled(Rule::UnusedLoopControlVariable) {
- flake8_bugbear::rules::unused_loop_control_variable(
- self, stmt, target, body,
- );
+ if self.enabled(Rule::UnusedLoopControlVariable) {
+ flake8_bugbear::rules::unused_loop_control_variable(self, target, body);
}
} else {
- unreachable!("Expected ExprKind::For | ExprKind::AsyncFor");
+ unreachable!("Expected Expr::For | Expr::AsyncFor");
}
}
}
@@ -4963,7 +4885,7 @@ impl<'a> Checker<'a> {
fn check_dead_scopes(&mut self) {
let enforce_typing_imports = !self.is_stub
- && self.settings.rules.any_enabled(&[
+ && self.any_enabled(&[
Rule::GlobalVariableNotAssigned,
Rule::RuntimeImportInTypeCheckingBlock,
Rule::TypingOnlyFirstPartyImport,
@@ -4972,7 +4894,7 @@ impl<'a> Checker<'a> {
]);
if !(enforce_typing_imports
- || self.settings.rules.any_enabled(&[
+ || self.any_enabled(&[
Rule::UnusedImport,
Rule::UndefinedLocalWithImportStarUsage,
Rule::RedefinedWhileUnused,
@@ -4983,48 +4905,31 @@ impl<'a> Checker<'a> {
}
// Mark anything referenced in `__all__` as used.
- let all_bindings: Option<(Vec, TextRange)> = {
- let global_scope = self.ctx.global_scope();
- let all_names: Option<(&Vec<&str>, TextRange)> = global_scope
- .get("__all__")
- .map(|index| &self.ctx.bindings[*index])
- .and_then(|binding| match &binding.kind {
- BindingKind::Export(Export { names }) => Some((names, binding.range)),
+ let exports: Vec<(&str, TextRange)> = {
+ let global_scope = self.semantic_model.global_scope();
+ global_scope
+ .bindings_for_name("__all__")
+ .map(|binding_id| &self.semantic_model.bindings[binding_id])
+ .filter_map(|binding| match &binding.kind {
+ BindingKind::Export(Export { names }) => {
+ Some(names.iter().map(|name| (*name, binding.range)))
+ }
_ => None,
- });
-
- all_names.map(|(names, range)| {
- (
- names
- .iter()
- .filter_map(|name| global_scope.get(name).copied())
- .collect(),
- range,
- )
- })
+ })
+ .flatten()
+ .collect()
};
- if let Some((bindings, range)) = all_bindings {
- for index in bindings {
- self.ctx.bindings[index].mark_used(
- ScopeId::global(),
- range,
+ for (name, range) in &exports {
+ if let Some(binding_id) = self.semantic_model.global_scope().get(name) {
+ self.semantic_model.add_global_reference(
+ binding_id,
+ *range,
ExecutionContext::Runtime,
);
}
}
- // Extract `__all__` names from the global scope.
- let all_names: Option<(&[&str], TextRange)> = self
- .ctx
- .global_scope()
- .get("__all__")
- .map(|index| &self.ctx.bindings[*index])
- .and_then(|binding| match &binding.kind {
- BindingKind::Export(Export { names }) => Some((names.as_slice(), binding.range)),
- _ => None,
- });
-
// Identify any valid runtime imports. If a module is imported at runtime, and
// used at runtime, then by default, we avoid flagging any other
// imports from that model as typing-only.
@@ -5032,15 +4937,18 @@ impl<'a> Checker<'a> {
if self.settings.flake8_type_checking.strict {
vec![]
} else {
- self.ctx
+ self.semantic_model
.scopes
.iter()
.map(|scope| {
scope
.binding_ids()
- .map(|index| &self.ctx.bindings[*index])
+ .map(|binding_id| &self.semantic_model.bindings[binding_id])
.filter(|binding| {
- flake8_type_checking::helpers::is_valid_runtime_import(binding)
+ flake8_type_checking::helpers::is_valid_runtime_import(
+ &self.semantic_model,
+ binding,
+ )
})
.collect()
})
@@ -5051,46 +4959,40 @@ impl<'a> Checker<'a> {
};
let mut diagnostics: Vec = vec![];
- for scope_id in self.ctx.dead_scopes.iter().rev() {
- let scope = &self.ctx.scopes[*scope_id];
+ for scope_id in self.semantic_model.dead_scopes.iter().rev() {
+ let scope = &self.semantic_model.scopes[*scope_id];
if scope.kind.is_module() {
// F822
- if self.settings.rules.enabled(Rule::UndefinedExport) {
+ if self.enabled(Rule::UndefinedExport) {
if !self.path.ends_with("__init__.py") {
- if let Some((names, range)) = all_names {
+ for (name, range) in &exports {
diagnostics
- .extend(pyflakes::rules::undefined_export(names, range, scope));
+ .extend(pyflakes::rules::undefined_export(name, *range, scope));
}
}
}
// F405
- if self
- .settings
- .rules
- .enabled(Rule::UndefinedLocalWithImportStarUsage)
- {
- if let Some((names, range)) = &all_names {
- let sources: Vec = scope
- .star_imports()
- .map(|StarImportation { level, module }| {
- helpers::format_import_from(*level, *module)
- })
- .sorted()
- .dedup()
- .collect();
- if !sources.is_empty() {
- for &name in names.iter() {
- if !scope.defines(name) {
- diagnostics.push(Diagnostic::new(
- pyflakes::rules::UndefinedLocalWithImportStarUsage {
- name: name.to_string(),
- sources: sources.clone(),
- },
- *range,
- ));
- }
+ if self.enabled(Rule::UndefinedLocalWithImportStarUsage) {
+ let sources: Vec = scope
+ .star_imports()
+ .map(|StarImportation { level, module }| {
+ helpers::format_import_from(*level, *module)
+ })
+ .sorted()
+ .dedup()
+ .collect();
+ if !sources.is_empty() {
+ for (name, range) in &exports {
+ if !scope.has(name) {
+ diagnostics.push(Diagnostic::new(
+ pyflakes::rules::UndefinedLocalWithImportStarUsage {
+ name: (*name).to_string(),
+ sources: sources.clone(),
+ },
+ *range,
+ ));
}
}
}
@@ -5098,12 +5000,13 @@ impl<'a> Checker<'a> {
}
// PLW0602
- if self.settings.rules.enabled(Rule::GlobalVariableNotAssigned) {
- for (name, index) in scope.bindings() {
- let binding = &self.ctx.bindings[*index];
+ if self.enabled(Rule::GlobalVariableNotAssigned) {
+ for (name, binding_id) in scope.bindings() {
+ let binding = &self.semantic_model.bindings[binding_id];
if binding.kind.is_global() {
- if let Some(stmt) = &binding.source {
- if matches!(stmt.node, StmtKind::Global { .. }) {
+ if let Some(source) = binding.source {
+ let stmt = &self.semantic_model.stmts[source];
+ if stmt.is_global_stmt() {
diagnostics.push(Diagnostic::new(
pylint::rules::GlobalVariableNotAssigned {
name: (*name).to_string(),
@@ -5117,61 +5020,40 @@ impl<'a> Checker<'a> {
}
// Imports in classes are public members.
- if matches!(scope.kind, ScopeKind::Class(..)) {
+ if scope.kind.is_class() {
continue;
}
// Look for any bindings that were redefined in another scope, and remain
- // unused. Note that we only store references in `redefinitions` if
+ // unused. Note that we only store references in `shadowed_bindings` if
// the bindings are in different scopes.
- if self.settings.rules.enabled(Rule::RedefinedWhileUnused) {
- for (name, index) in scope.bindings() {
- let binding = &self.ctx.bindings[*index];
-
- if matches!(
- binding.kind,
- BindingKind::Importation(..)
- | BindingKind::FromImportation(..)
- | BindingKind::SubmoduleImportation(..)
- | BindingKind::FutureImportation
- ) {
- if binding.used() {
+ if self.enabled(Rule::RedefinedWhileUnused) {
+ for (name, binding_id) in scope.bindings() {
+ if let Some(shadowed) = self.semantic_model.shadowed_binding(binding_id) {
+ if shadowed.is_used() {
continue;
}
- if let Some(indices) = self.ctx.shadowed_bindings.get(index) {
- for index in indices {
- let rebound = &self.ctx.bindings[*index];
- #[allow(deprecated)]
- let line = self.locator.compute_line_index(binding.range.start());
+ let binding = &self.semantic_model.bindings[binding_id];
- let mut diagnostic = Diagnostic::new(
- pyflakes::rules::RedefinedWhileUnused {
- name: (*name).to_string(),
- line,
- },
- matches!(
- rebound.kind,
- BindingKind::ClassDefinition
- | BindingKind::FunctionDefinition
- )
- .then(|| {
- rebound.source.as_ref().map_or(rebound.range, |source| {
- helpers::identifier_range(source, self.locator)
- })
- })
- .unwrap_or(rebound.range),
- );
- if let Some(parent) = &rebound.source {
- if matches!(parent.node, StmtKind::ImportFrom { .. })
- && parent.range().contains_range(rebound.range)
- {
- diagnostic.set_parent(parent.start());
- }
- };
- diagnostics.push(diagnostic);
- }
+ #[allow(deprecated)]
+ let line = self.locator.compute_line_index(
+ shadowed
+ .trimmed_range(&self.semantic_model, self.locator)
+ .start(),
+ );
+
+ let mut diagnostic = Diagnostic::new(
+ pyflakes::rules::RedefinedWhileUnused {
+ name: (*name).to_string(),
+ line,
+ },
+ binding.trimmed_range(&self.semantic_model, self.locator),
+ );
+ if let Some(range) = binding.parent_range(&self.semantic_model) {
+ diagnostic.set_parent(range.start());
}
+ diagnostics.push(diagnostic);
}
}
}
@@ -5180,198 +5062,42 @@ impl<'a> Checker<'a> {
let runtime_imports: Vec<&Binding> = if self.settings.flake8_type_checking.strict {
vec![]
} else {
- self.ctx
+ self.semantic_model
.scopes
.ancestor_ids(*scope_id)
- .flat_map(|scope_id| runtime_imports[usize::from(scope_id)].iter())
+ .flat_map(|scope_id| runtime_imports[scope_id.as_usize()].iter())
.copied()
.collect()
};
- for index in scope.binding_ids() {
- let binding = &self.ctx.bindings[*index];
- if let Some(diagnostic) =
- flake8_type_checking::rules::runtime_import_in_type_checking_block(binding)
- {
- if self.settings.rules.enabled(diagnostic.kind.rule()) {
- diagnostics.push(diagnostic);
- }
- }
- if let Some(diagnostic) =
- flake8_type_checking::rules::typing_only_runtime_import(
- binding,
- &runtime_imports,
- self.package,
- self.settings,
- )
- {
- if self.settings.rules.enabled(diagnostic.kind.rule()) {
- diagnostics.push(diagnostic);
- }
- }
- }
- }
-
- if self.settings.rules.enabled(Rule::UnusedImport) {
- // Collect all unused imports by location. (Multiple unused imports at the same
- // location indicates an `import from`.)
- type UnusedImport<'a> = (&'a str, &'a TextRange);
- type BindingContext<'a, 'b> = (
- &'a RefEquality<'b, Stmt>,
- Option<&'a RefEquality<'b, Stmt>>,
- Exceptions,
+ flake8_type_checking::rules::runtime_import_in_type_checking_block(
+ self,
+ scope,
+ &mut diagnostics,
);
- let mut unused: FxHashMap> = FxHashMap::default();
- let mut ignored: FxHashMap> =
- FxHashMap::default();
-
- for index in scope.binding_ids() {
- let binding = &self.ctx.bindings[*index];
-
- let full_name = match &binding.kind {
- BindingKind::Importation(Importation { full_name, .. }) => full_name,
- BindingKind::FromImportation(FromImportation { full_name, .. }) => {
- full_name.as_str()
- }
- BindingKind::SubmoduleImportation(SubmoduleImportation {
- full_name,
- ..
- }) => full_name,
- _ => continue,
- };
-
- if binding.used() {
- continue;
- }
-
- let defined_by = binding.source.as_ref().unwrap();
- let defined_in = self.ctx.child_to_parent.get(defined_by);
- let exceptions = binding.exceptions;
- let child: &Stmt = defined_by.into();
-
- let diagnostic_offset = binding.range.start();
- let parent_offset = if matches!(child.node, StmtKind::ImportFrom { .. }) {
- Some(child.start())
- } else {
- None
- };
-
- if self.rule_is_ignored(Rule::UnusedImport, diagnostic_offset)
- || parent_offset.map_or(false, |parent_offset| {
- self.rule_is_ignored(Rule::UnusedImport, parent_offset)
- })
- {
- ignored
- .entry((defined_by, defined_in, exceptions))
- .or_default()
- .push((full_name, &binding.range));
- } else {
- unused
- .entry((defined_by, defined_in, exceptions))
- .or_default()
- .push((full_name, &binding.range));
- }
- }
-
- let in_init =
- self.settings.ignore_init_module_imports && self.path.ends_with("__init__.py");
- for ((defined_by, defined_in, exceptions), unused_imports) in unused
- .into_iter()
- .sorted_by_key(|((defined_by, ..), ..)| defined_by.start())
- {
- let child: &Stmt = defined_by.into();
- let parent: Option<&Stmt> = defined_in.map(Into::into);
- let multiple = unused_imports.len() > 1;
- let in_except_handler = exceptions
- .intersects(Exceptions::MODULE_NOT_FOUND_ERROR | Exceptions::IMPORT_ERROR);
-
- let fix = if !in_init && !in_except_handler && self.patch(Rule::UnusedImport) {
- let deleted: Vec<&Stmt> = self.deletions.iter().map(Into::into).collect();
- match autofix::actions::remove_unused_imports(
- unused_imports.iter().map(|(full_name, _)| *full_name),
- child,
- parent,
- &deleted,
- self.locator,
- self.indexer,
- self.stylist,
- ) {
- Ok(fix) => {
- if fix.is_deletion() || fix.content() == Some("pass") {
- self.deletions.insert(*defined_by);
- }
- Some(fix)
- }
- Err(e) => {
- error!("Failed to remove unused imports: {e}");
- None
- }
- }
- } else {
- None
- };
+ flake8_type_checking::rules::typing_only_runtime_import(
+ self,
+ scope,
+ &runtime_imports,
+ &mut diagnostics,
+ );
+ }
- for (full_name, range) in unused_imports {
- let mut diagnostic = Diagnostic::new(
- pyflakes::rules::UnusedImport {
- name: full_name.to_string(),
- context: if in_except_handler {
- Some(pyflakes::rules::UnusedImportContext::ExceptHandler)
- } else if in_init {
- Some(pyflakes::rules::UnusedImportContext::Init)
- } else {
- None
- },
- multiple,
- },
- *range,
- );
- if matches!(child.node, StmtKind::ImportFrom { .. }) {
- diagnostic.set_parent(child.start());
- }
- if let Some(fix) = &fix {
- diagnostic.set_fix(fix.clone());
- }
- diagnostics.push(diagnostic);
- }
- }
- for ((defined_by, .., exceptions), unused_imports) in ignored
- .into_iter()
- .sorted_by_key(|((defined_by, ..), ..)| defined_by.start())
- {
- let child: &Stmt = defined_by.into();
- let multiple = unused_imports.len() > 1;
- let in_except_handler = exceptions
- .intersects(Exceptions::MODULE_NOT_FOUND_ERROR | Exceptions::IMPORT_ERROR);
- for (full_name, range) in unused_imports {
- let mut diagnostic = Diagnostic::new(
- pyflakes::rules::UnusedImport {
- name: full_name.to_string(),
- context: if in_except_handler {
- Some(pyflakes::rules::UnusedImportContext::ExceptHandler)
- } else if in_init {
- Some(pyflakes::rules::UnusedImportContext::Init)
- } else {
- None
- },
- multiple,
- },
- *range,
- );
- if matches!(child.node, StmtKind::ImportFrom { .. }) {
- diagnostic.set_parent(child.start());
- }
- diagnostics.push(diagnostic);
- }
- }
+ if self.enabled(Rule::UnusedImport) {
+ pyflakes::rules::unused_import(self, scope, &mut diagnostics);
}
}
self.diagnostics.extend(diagnostics);
}
+ /// Visit all the [`Definition`] nodes in the AST.
+ ///
+ /// This phase is expected to run after the AST has been traversed in its entirety; as such,
+ /// it is expected that all [`Definition`] nodes have been visited by the time, and that this
+ /// method will not recurse into any other nodes.
fn check_definitions(&mut self) {
- let enforce_annotations = self.settings.rules.any_enabled(&[
+ let enforce_annotations = self.any_enabled(&[
Rule::MissingTypeFunctionArgument,
Rule::MissingTypeArgs,
Rule::MissingTypeKwargs,
@@ -5384,7 +5110,9 @@ impl<'a> Checker<'a> {
Rule::MissingReturnTypeClassMethod,
Rule::AnyType,
]);
- let enforce_docstrings = self.settings.rules.any_enabled(&[
+ let enforce_stubs = self.is_stub
+ && self.any_enabled(&[Rule::DocstringInStub, Rule::IterMethodReturnIterable]);
+ let enforce_docstrings = self.any_enabled(&[
Rule::UndocumentedPublicModule,
Rule::UndocumentedPublicClass,
Rule::UndocumentedPublicMethod,
@@ -5433,252 +5161,246 @@ impl<'a> Checker<'a> {
Rule::EmptyDocstring,
]);
+ if !enforce_annotations && !enforce_docstrings && !enforce_stubs {
+ return;
+ }
+
+ // Compute visibility of all definitions.
+ let global_scope = self.semantic_model.global_scope();
+ let exports: Option<&[&str]> = global_scope
+ .get("__all__")
+ .map(|binding_id| &self.semantic_model.bindings[binding_id])
+ .and_then(|binding| match &binding.kind {
+ BindingKind::Export(Export { names }) => Some(names.as_slice()),
+ _ => None,
+ });
+ let definitions = std::mem::take(&mut self.semantic_model.definitions);
+
let mut overloaded_name: Option = None;
- while !self.deferred.definitions.is_empty() {
- let definitions = std::mem::take(&mut self.deferred.definitions);
- for (definition, visibility, (scope_id, parents)) in definitions {
- self.ctx.scope_id = scope_id;
- self.ctx.parents = parents;
-
- // flake8-annotations
- if enforce_annotations {
- // TODO(charlie): This should be even stricter, in that an overload
- // implementation should come immediately after the overloaded
- // interfaces, without any AST nodes in between. Right now, we
- // only error when traversing definition boundaries (functions,
- // classes, etc.).
- if !overloaded_name.map_or(false, |overloaded_name| {
- flake8_annotations::helpers::is_overload_impl(
+ for ContextualizedDefinition {
+ definition,
+ visibility,
+ } in definitions.resolve(exports).iter()
+ {
+ let docstring = docstrings::extraction::extract_docstring(definition);
+
+ // flake8-annotations
+ if enforce_annotations {
+ // TODO(charlie): This should be even stricter, in that an overload
+ // implementation should come immediately after the overloaded
+ // interfaces, without any AST nodes in between. Right now, we
+ // only error when traversing definition boundaries (functions,
+ // classes, etc.).
+ if !overloaded_name.map_or(false, |overloaded_name| {
+ flake8_annotations::helpers::is_overload_impl(
+ &self.semantic_model,
+ definition,
+ &overloaded_name,
+ )
+ }) {
+ self.diagnostics
+ .extend(flake8_annotations::rules::definition(
self,
- &definition,
- &overloaded_name,
- )
- }) {
- self.diagnostics
- .extend(flake8_annotations::rules::definition(
- self,
- &definition,
- visibility,
- ));
- }
- overloaded_name =
- flake8_annotations::helpers::overloaded_name(self, &definition);
+ definition,
+ *visibility,
+ ));
}
+ overloaded_name =
+ flake8_annotations::helpers::overloaded_name(&self.semantic_model, definition);
+ }
+ // flake8-pyi
+ if enforce_stubs {
if self.is_stub {
- if self.settings.rules.enabled(Rule::DocstringInStub) {
- flake8_pyi::rules::docstring_in_stubs(self, definition.docstring);
+ if self.enabled(Rule::DocstringInStub) {
+ flake8_pyi::rules::docstring_in_stubs(self, docstring);
}
- }
-
- // pydocstyle
- if enforce_docstrings {
- if pydocstyle::helpers::should_ignore_definition(
- self,
- &definition,
- &self.settings.pydocstyle.ignore_decorators,
- ) {
- continue;
+ if self.enabled(Rule::IterMethodReturnIterable) {
+ flake8_pyi::rules::iter_method_return_iterable(self, definition);
}
+ }
+ }
- if definition.docstring.is_none() {
- pydocstyle::rules::not_missing(self, &definition, visibility);
- continue;
- }
+ // pydocstyle
+ if enforce_docstrings {
+ if pydocstyle::helpers::should_ignore_definition(
+ &self.semantic_model,
+ definition,
+ &self.settings.pydocstyle.ignore_decorators,
+ ) {
+ continue;
+ }
- // Extract a `Docstring` from a `Definition`.
- let expr = definition.docstring.unwrap();
- let contents = self.locator.slice(expr.range());
+ // Extract a `Docstring` from a `Definition`.
+ let Some(expr) = docstring else {
+ pydocstyle::rules::not_missing(self, definition, *visibility);
+ continue;
+ };
- let indentation = self.locator.slice(TextRange::new(
- self.locator.line_start(expr.start()),
- expr.start(),
- ));
+ let contents = self.locator.slice(expr.range());
- if pydocstyle::helpers::should_ignore_docstring(contents) {
- #[allow(deprecated)]
- let location = self.locator.compute_source_location(expr.start());
- warn_user!(
- "Docstring at {}:{}:{} contains implicit string concatenation; ignoring...",
- relativize_path(self.path),
- location.row,
- location.column
- );
- continue;
- }
+ let indentation = self.locator.slice(TextRange::new(
+ self.locator.line_start(expr.start()),
+ expr.start(),
+ ));
- // SAFETY: Safe for docstrings that pass `should_ignore_docstring`.
- let body_range = str::raw_contents_range(contents).unwrap();
- let docstring = Docstring {
- kind: definition.kind,
- expr,
- contents,
- indentation,
- body_range,
- };
+ if pydocstyle::helpers::should_ignore_docstring(contents) {
+ #[allow(deprecated)]
+ let location = self.locator.compute_source_location(expr.start());
+ warn_user!(
+ "Docstring at {}:{}:{} contains implicit string concatenation; ignoring...",
+ relativize_path(self.path),
+ location.row,
+ location.column
+ );
+ continue;
+ }
- if !pydocstyle::rules::not_empty(self, &docstring) {
- continue;
- }
+ // SAFETY: Safe for docstrings that pass `should_ignore_docstring`.
+ let body_range = str::raw_contents_range(contents).unwrap();
+ let docstring = Docstring {
+ definition,
+ expr,
+ contents,
+ body_range,
+ indentation,
+ };
- if self.settings.rules.enabled(Rule::FitsOnOneLine) {
- pydocstyle::rules::one_liner(self, &docstring);
- }
- if self.settings.rules.any_enabled(&[
- Rule::NoBlankLineBeforeFunction,
- Rule::NoBlankLineAfterFunction,
- ]) {
- pydocstyle::rules::blank_before_after_function(self, &docstring);
- }
- if self.settings.rules.any_enabled(&[
- Rule::OneBlankLineBeforeClass,
- Rule::OneBlankLineAfterClass,
- Rule::BlankLineBeforeClass,
- ]) {
- pydocstyle::rules::blank_before_after_class(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::BlankLineAfterSummary) {
- pydocstyle::rules::blank_after_summary(self, &docstring);
- }
- if self.settings.rules.any_enabled(&[
- Rule::IndentWithSpaces,
- Rule::UnderIndentation,
- Rule::OverIndentation,
- ]) {
- pydocstyle::rules::indent(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::NewLineAfterLastParagraph) {
- pydocstyle::rules::newline_after_last_paragraph(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::SurroundingWhitespace) {
- pydocstyle::rules::no_surrounding_whitespace(self, &docstring);
- }
- if self.settings.rules.any_enabled(&[
- Rule::MultiLineSummaryFirstLine,
- Rule::MultiLineSummarySecondLine,
- ]) {
- pydocstyle::rules::multi_line_summary_start(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::TripleSingleQuotes) {
- pydocstyle::rules::triple_quotes(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::EscapeSequenceInDocstring) {
- pydocstyle::rules::backslashes(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::EndsInPeriod) {
- pydocstyle::rules::ends_with_period(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::NonImperativeMood) {
- pydocstyle::rules::non_imperative_mood(
- self,
- &docstring,
- &self.settings.pydocstyle.property_decorators,
- );
- }
- if self.settings.rules.enabled(Rule::NoSignature) {
- pydocstyle::rules::no_signature(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::FirstLineCapitalized) {
- pydocstyle::rules::capitalized(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::DocstringStartsWithThis) {
- pydocstyle::rules::starts_with_this(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::EndsInPunctuation) {
- pydocstyle::rules::ends_with_punctuation(self, &docstring);
- }
- if self.settings.rules.enabled(Rule::OverloadWithDocstring) {
- pydocstyle::rules::if_needed(self, &docstring);
- }
- if self.settings.rules.any_enabled(&[
- Rule::MultiLineSummaryFirstLine,
- Rule::SectionNotOverIndented,
- Rule::SectionUnderlineNotOverIndented,
- Rule::CapitalizeSectionName,
- Rule::NewLineAfterSectionName,
- Rule::DashedUnderlineAfterSection,
- Rule::SectionUnderlineAfterName,
- Rule::SectionUnderlineMatchesSectionLength,
- Rule::NoBlankLineAfterSection,
- Rule::NoBlankLineBeforeSection,
- Rule::BlankLinesBetweenHeaderAndContent,
- Rule::BlankLineAfterLastSection,
- Rule::EmptyDocstringSection,
- Rule::SectionNameEndsInColon,
- Rule::UndocumentedParam,
- ]) {
- pydocstyle::rules::sections(
- self,
- &docstring,
- self.settings.pydocstyle.convention.as_ref(),
- );
- }
+ if !pydocstyle::rules::not_empty(self, &docstring) {
+ continue;
}
- }
- }
- }
- fn check_builtin_shadowing(&mut self, name: &str, located: &Located, is_attribute: bool) {
- if is_attribute && matches!(self.ctx.scope().kind, ScopeKind::Class(_)) {
- if self.settings.rules.enabled(Rule::BuiltinAttributeShadowing) {
- if let Some(diagnostic) = flake8_builtins::rules::builtin_shadowing(
- name,
- located,
- flake8_builtins::types::ShadowingType::Attribute,
- &self.settings.flake8_builtins.builtins_ignorelist,
- ) {
- self.diagnostics.push(diagnostic);
+ if self.enabled(Rule::FitsOnOneLine) {
+ pydocstyle::rules::one_liner(self, &docstring);
}
- }
- } else {
- if self.settings.rules.enabled(Rule::BuiltinVariableShadowing) {
- if let Some(diagnostic) = flake8_builtins::rules::builtin_shadowing(
- name,
- located,
- flake8_builtins::types::ShadowingType::Variable,
- &self.settings.flake8_builtins.builtins_ignorelist,
- ) {
- self.diagnostics.push(diagnostic);
+ if self.any_enabled(&[
+ Rule::NoBlankLineBeforeFunction,
+ Rule::NoBlankLineAfterFunction,
+ ]) {
+ pydocstyle::rules::blank_before_after_function(self, &docstring);
+ }
+ if self.any_enabled(&[
+ Rule::OneBlankLineBeforeClass,
+ Rule::OneBlankLineAfterClass,
+ Rule::BlankLineBeforeClass,
+ ]) {
+ pydocstyle::rules::blank_before_after_class(self, &docstring);
+ }
+ if self.enabled(Rule::BlankLineAfterSummary) {
+ pydocstyle::rules::blank_after_summary(self, &docstring);
+ }
+ if self.any_enabled(&[
+ Rule::IndentWithSpaces,
+ Rule::UnderIndentation,
+ Rule::OverIndentation,
+ ]) {
+ pydocstyle::rules::indent(self, &docstring);
+ }
+ if self.enabled(Rule::NewLineAfterLastParagraph) {
+ pydocstyle::rules::newline_after_last_paragraph(self, &docstring);
+ }
+ if self.enabled(Rule::SurroundingWhitespace) {
+ pydocstyle::rules::no_surrounding_whitespace(self, &docstring);
+ }
+ if self.any_enabled(&[
+ Rule::MultiLineSummaryFirstLine,
+ Rule::MultiLineSummarySecondLine,
+ ]) {
+ pydocstyle::rules::multi_line_summary_start(self, &docstring);
+ }
+ if self.enabled(Rule::TripleSingleQuotes) {
+ pydocstyle::rules::triple_quotes(self, &docstring);
+ }
+ if self.enabled(Rule::EscapeSequenceInDocstring) {
+ pydocstyle::rules::backslashes(self, &docstring);
+ }
+ if self.enabled(Rule::EndsInPeriod) {
+ pydocstyle::rules::ends_with_period(self, &docstring);
+ }
+ if self.enabled(Rule::NonImperativeMood) {
+ pydocstyle::rules::non_imperative_mood(
+ self,
+ &docstring,
+ &self.settings.pydocstyle.property_decorators,
+ );
+ }
+ if self.enabled(Rule::NoSignature) {
+ pydocstyle::rules::no_signature(self, &docstring);
+ }
+ if self.enabled(Rule::FirstLineCapitalized) {
+ pydocstyle::rules::capitalized(self, &docstring);
+ }
+ if self.enabled(Rule::DocstringStartsWithThis) {
+ pydocstyle::rules::starts_with_this(self, &docstring);
+ }
+ if self.enabled(Rule::EndsInPunctuation) {
+ pydocstyle::rules::ends_with_punctuation(self, &docstring);
+ }
+ if self.enabled(Rule::OverloadWithDocstring) {
+ pydocstyle::rules::if_needed(self, &docstring);
+ }
+ if self.any_enabled(&[
+ Rule::MultiLineSummaryFirstLine,
+ Rule::SectionNotOverIndented,
+ Rule::SectionUnderlineNotOverIndented,
+ Rule::CapitalizeSectionName,
+ Rule::NewLineAfterSectionName,
+ Rule::DashedUnderlineAfterSection,
+ Rule::SectionUnderlineAfterName,
+ Rule::SectionUnderlineMatchesSectionLength,
+ Rule::NoBlankLineAfterSection,
+ Rule::NoBlankLineBeforeSection,
+ Rule::BlankLinesBetweenHeaderAndContent,
+ Rule::BlankLineAfterLastSection,
+ Rule::EmptyDocstringSection,
+ Rule::SectionNameEndsInColon,
+ Rule::UndocumentedParam,
+ ]) {
+ pydocstyle::rules::sections(
+ self,
+ &docstring,
+ self.settings.pydocstyle.convention.as_ref(),
+ );
}
- }
- }
- }
-
- fn check_builtin_arg_shadowing(&mut self, name: &str, arg: &Arg) {
- if self.settings.rules.enabled(Rule::BuiltinArgumentShadowing) {
- if let Some(diagnostic) = flake8_builtins::rules::builtin_shadowing(
- name,
- arg,
- flake8_builtins::types::ShadowingType::Argument,
- &self.settings.flake8_builtins.builtins_ignorelist,
- ) {
- self.diagnostics.push(diagnostic);
}
}
}
}
#[allow(clippy::too_many_arguments)]
-pub fn check_ast(
+pub(crate) fn check_ast(
python_ast: &Suite,
locator: &Locator,
stylist: &Stylist,
indexer: &Indexer,
noqa_line_for: &NoqaMapping,
settings: &Settings,
- autofix: flags::Autofix,
noqa: flags::Noqa,
path: &Path,
package: Option<&Path>,
) -> Vec {
+ let module_path = package.and_then(|package| to_module_path(package, path));
+ let module = Module {
+ kind: if path.ends_with("__init__.py") {
+ ModuleKind::Package
+ } else {
+ ModuleKind::Module
+ },
+ source: if let Some(module_path) = module_path.as_ref() {
+ ModuleSource::Path(module_path)
+ } else {
+ ModuleSource::File(path)
+ },
+ python_ast,
+ };
+
let mut checker = Checker::new(
settings,
noqa_line_for,
- autofix,
noqa,
path,
package,
- package.and_then(|package| to_module_path(package, path)),
+ module,
locator,
stylist,
indexer,
@@ -5687,18 +5409,19 @@ pub fn check_ast(
checker.bind_builtins();
// Check for module docstring.
- let python_ast = if checker.visit_docstring(python_ast) {
+ let python_ast = if checker.visit_module(python_ast) {
&python_ast[1..]
} else {
python_ast
};
+
// Iterate over the AST.
checker.visit_body(python_ast);
// Check any deferred statements.
checker.check_deferred_functions();
checker.check_deferred_lambdas();
- checker.check_deferred_type_definitions();
+ checker.check_deferred_future_type_definitions();
let allocator = typed_arena::Arena::new();
checker.check_deferred_string_type_definitions(&allocator);
checker.check_deferred_assignments();
@@ -5708,8 +5431,8 @@ pub fn check_ast(
checker.check_definitions();
// Reset the scope to module-level, and check all consumed scopes.
- checker.ctx.scope_id = ScopeId::global();
- checker.ctx.dead_scopes.push(ScopeId::global());
+ checker.semantic_model.scope_id = ScopeId::global();
+ checker.semantic_model.dead_scopes.push(ScopeId::global());
checker.check_dead_scopes();
checker.diagnostics
diff --git a/crates/ruff/src/checkers/filesystem.rs b/crates/ruff/src/checkers/filesystem.rs
index 0238b6b8946f3..83df0ba87b8f7 100644
--- a/crates/ruff/src/checkers/filesystem.rs
+++ b/crates/ruff/src/checkers/filesystem.rs
@@ -7,7 +7,7 @@ use crate::rules::flake8_no_pep420::rules::implicit_namespace_package;
use crate::rules::pep8_naming::rules::invalid_module_name;
use crate::settings::Settings;
-pub fn check_file_path(
+pub(crate) fn check_file_path(
path: &Path,
package: Option<&Path>,
settings: &Settings,
diff --git a/crates/ruff/src/checkers/imports.rs b/crates/ruff/src/checkers/imports.rs
index ad0cba81f2993..a985c0daafdec 100644
--- a/crates/ruff/src/checkers/imports.rs
+++ b/crates/ruff/src/checkers/imports.rs
@@ -2,20 +2,20 @@
use std::borrow::Cow;
use std::path::Path;
-use rustpython_parser::ast::{StmtKind, Suite};
+use rustpython_parser::ast::{self, Ranged, Stmt, Suite};
use ruff_diagnostics::Diagnostic;
use ruff_python_ast::helpers::to_module_path;
use ruff_python_ast::imports::{ImportMap, ModuleImport};
use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
-use ruff_python_ast::visitor::Visitor;
+use ruff_python_ast::statement_visitor::StatementVisitor;
use ruff_python_stdlib::path::is_python_stub_file;
use crate::directives::IsortDirectives;
use crate::registry::Rule;
use crate::rules::isort;
-use crate::rules::isort::track::{Block, ImportTracker};
-use crate::settings::{flags, Settings};
+use crate::rules::isort::block::{Block, BlockBuilder};
+use crate::settings::Settings;
fn extract_import_map(path: &Path, package: Option<&Path>, blocks: &[&Block]) -> Option {
let Some(package) = package else {
@@ -28,21 +28,23 @@ fn extract_import_map(path: &Path, package: Option<&Path>, blocks: &[&Block]) ->
let num_imports = blocks.iter().map(|block| block.imports.len()).sum();
let mut module_imports = Vec::with_capacity(num_imports);
for stmt in blocks.iter().flat_map(|block| &block.imports) {
- match &stmt.node {
- StmtKind::Import { names } => {
+ match stmt {
+ Stmt::Import(ast::StmtImport { names, range: _ }) => {
module_imports.extend(
names
.iter()
- .map(|name| ModuleImport::new(name.node.name.clone(), stmt.range())),
+ .map(|name| ModuleImport::new(name.name.to_string(), stmt.range())),
);
}
- StmtKind::ImportFrom {
+ Stmt::ImportFrom(ast::StmtImportFrom {
module,
names,
level,
- } => {
- let level = level.unwrap_or(0);
+ range: _,
+ }) => {
+ let level = level.map_or(0, |level| level.to_usize());
let module = if let Some(module) = module {
+ let module: &String = module.as_ref();
if level == 0 {
Cow::Borrowed(module)
} else {
@@ -59,10 +61,10 @@ fn extract_import_map(path: &Path, package: Option<&Path>, blocks: &[&Block]) ->
Cow::Owned(module_path[..module_path.len() - level].join("."))
};
module_imports.extend(names.iter().map(|name| {
- ModuleImport::new(format!("{}.{}", module, name.node.name), name.range())
+ ModuleImport::new(format!("{}.{}", module, name.name), name.range())
}));
}
- _ => panic!("Expected StmtKind::Import | StmtKind::ImportFrom"),
+ _ => panic!("Expected Stmt::Import | Stmt::ImportFrom"),
}
}
@@ -72,22 +74,21 @@ fn extract_import_map(path: &Path, package: Option<&Path>, blocks: &[&Block]) ->
}
#[allow(clippy::too_many_arguments)]
-pub fn check_imports(
+pub(crate) fn check_imports(
python_ast: &Suite,
locator: &Locator,
indexer: &Indexer,
directives: &IsortDirectives,
settings: &Settings,
stylist: &Stylist,
- autofix: flags::Autofix,
path: &Path,
package: Option<&Path>,
) -> (Vec, Option) {
let is_stub = is_python_stub_file(path);
- // Extract all imports from the AST.
+ // Extract all import blocks from the AST.
let tracker = {
- let mut tracker = ImportTracker::new(locator, directives, is_stub);
+ let mut tracker = BlockBuilder::new(locator, directives, is_stub);
tracker.visit_body(python_ast);
tracker
};
@@ -99,7 +100,7 @@ pub fn check_imports(
for block in &blocks {
if !block.imports.is_empty() {
if let Some(diagnostic) = isort::rules::organize_imports(
- block, locator, stylist, indexer, settings, autofix, package,
+ block, locator, stylist, indexer, settings, package,
) {
diagnostics.push(diagnostic);
}
@@ -108,7 +109,7 @@ pub fn check_imports(
}
if settings.rules.enabled(Rule::MissingRequiredImport) {
diagnostics.extend(isort::rules::add_required_imports(
- &blocks, python_ast, locator, stylist, settings, autofix, is_stub,
+ python_ast, locator, stylist, settings, is_stub,
));
}
diff --git a/crates/ruff/src/checkers/logical_lines.rs b/crates/ruff/src/checkers/logical_lines.rs
index d51ea0937d272..cbf9263c70a65 100644
--- a/crates/ruff/src/checkers/logical_lines.rs
+++ b/crates/ruff/src/checkers/logical_lines.rs
@@ -1,7 +1,7 @@
use ruff_text_size::TextRange;
use rustpython_parser::lexer::LexResult;
-use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
+use ruff_diagnostics::{Diagnostic, DiagnosticKind};
use ruff_python_ast::source_code::{Locator, Stylist};
use ruff_python_ast::token_kind::TokenKind;
@@ -12,7 +12,7 @@ use crate::rules::pycodestyle::rules::logical_lines::{
whitespace_around_named_parameter_equals, whitespace_before_comment,
whitespace_before_parameters, LogicalLines, TokenFlags,
};
-use crate::settings::{flags, Settings};
+use crate::settings::Settings;
/// Return the amount of indentation, expanding tabs to the next multiple of 8.
fn expand_indent(line: &str) -> usize {
@@ -30,28 +30,24 @@ fn expand_indent(line: &str) -> usize {
indent
}
-pub fn check_logical_lines(
+pub(crate) fn check_logical_lines(
tokens: &[LexResult],
locator: &Locator,
stylist: &Stylist,
settings: &Settings,
- autofix: flags::Autofix,
) -> Vec {
let mut context = LogicalLinesContext::new(settings);
- #[cfg(feature = "logical_lines")]
- let should_fix_missing_whitespace =
- autofix.into() && settings.rules.should_fix(Rule::MissingWhitespace);
-
- #[cfg(not(feature = "logical_lines"))]
- let should_fix_missing_whitespace = false;
-
- #[cfg(feature = "logical_lines")]
+ let should_fix_missing_whitespace = settings.rules.should_fix(Rule::MissingWhitespace);
let should_fix_whitespace_before_parameters =
- autofix.into() && settings.rules.should_fix(Rule::WhitespaceBeforeParameters);
-
- #[cfg(not(feature = "logical_lines"))]
- let should_fix_whitespace_before_parameters = false;
+ settings.rules.should_fix(Rule::WhitespaceBeforeParameters);
+ let should_fix_whitespace_after_open_bracket =
+ settings.rules.should_fix(Rule::WhitespaceAfterOpenBracket);
+ let should_fix_whitespace_before_close_bracket = settings
+ .rules
+ .should_fix(Rule::WhitespaceBeforeCloseBracket);
+ let should_fix_whitespace_before_punctuation =
+ settings.rules.should_fix(Rule::WhitespaceBeforePunctuation);
let mut prev_line = None;
let mut prev_indent_level = None;
@@ -67,17 +63,24 @@ pub fn check_logical_lines(
if line
.flags()
- .contains(TokenFlags::OPERATOR | TokenFlags::PUNCTUATION)
+ .intersects(TokenFlags::OPERATOR | TokenFlags::BRACKET | TokenFlags::PUNCTUATION)
{
- extraneous_whitespace(&line, &mut context);
+ extraneous_whitespace(
+ &line,
+ &mut context,
+ should_fix_whitespace_after_open_bracket,
+ should_fix_whitespace_before_close_bracket,
+ should_fix_whitespace_before_punctuation,
+ );
}
+
if line.flags().contains(TokenFlags::KEYWORD) {
whitespace_around_keywords(&line, &mut context);
missing_whitespace_after_keyword(&line, &mut context);
}
if line.flags().contains(TokenFlags::COMMENT) {
- whitespace_before_comment(&line, locator, prev_line.is_none(), &mut context);
+ whitespace_before_comment(&line, locator, &mut context);
}
if line.flags().contains(TokenFlags::BRACKET) {
@@ -138,115 +141,21 @@ impl<'a> LogicalLinesContext<'a> {
}
}
- pub fn push>(&mut self, kind: K, range: TextRange) {
+ pub(crate) fn push>(&mut self, kind: K, range: TextRange) {
let kind = kind.into();
if self.settings.rules.enabled(kind.rule()) {
self.diagnostics.push(Diagnostic {
kind,
range,
- fix: Fix::empty(),
+ fix: None,
parent: None,
});
}
}
- pub fn push_diagnostic(&mut self, diagnostic: Diagnostic) {
+ pub(crate) fn push_diagnostic(&mut self, diagnostic: Diagnostic) {
if self.settings.rules.enabled(diagnostic.kind.rule()) {
self.diagnostics.push(diagnostic);
}
}
}
-
-#[cfg(test)]
-mod tests {
- use rustpython_parser::lexer::LexResult;
- use rustpython_parser::{lexer, Mode};
-
- use crate::rules::pycodestyle::rules::logical_lines::LogicalLines;
- use ruff_python_ast::source_code::Locator;
-
- #[test]
- fn split_logical_lines() {
- let contents = r#"
-x = 1
-y = 2
-z = x + 1"#;
- let lxr: Vec = lexer::lex(contents, Mode::Module).collect();
- let locator = Locator::new(contents);
- let actual: Vec = LogicalLines::from_tokens(&lxr, &locator)
- .into_iter()
- .map(|line| line.text_trimmed().to_string())
- .collect();
- let expected = vec![
- "x = 1".to_string(),
- "y = 2".to_string(),
- "z = x + 1".to_string(),
- ];
- assert_eq!(actual, expected);
-
- let contents = r#"
-x = [
- 1,
- 2,
- 3,
-]
-y = 2
-z = x + 1"#;
- let lxr: Vec = lexer::lex(contents, Mode::Module).collect();
- let locator = Locator::new(contents);
- let actual: Vec = LogicalLines::from_tokens(&lxr, &locator)
- .into_iter()
- .map(|line| line.text_trimmed().to_string())
- .collect();
- let expected = vec![
- "x = [\n 1,\n 2,\n 3,\n]".to_string(),
- "y = 2".to_string(),
- "z = x + 1".to_string(),
- ];
- assert_eq!(actual, expected);
-
- let contents = "x = 'abc'";
- let lxr: Vec = lexer::lex(contents, Mode::Module).collect();
- let locator = Locator::new(contents);
- let actual: Vec = LogicalLines::from_tokens(&lxr, &locator)
- .into_iter()
- .map(|line| line.text_trimmed().to_string())
- .collect();
- let expected = vec!["x = 'abc'".to_string()];
- assert_eq!(actual, expected);
-
- let contents = r#"
-def f():
- x = 1
-f()"#;
- let lxr: Vec = lexer::lex(contents, Mode::Module).collect();
- let locator = Locator::new(contents);
- let actual: Vec = LogicalLines::from_tokens(&lxr, &locator)
- .into_iter()
- .map(|line| line.text_trimmed().to_string())
- .collect();
- let expected = vec!["def f():", "x = 1", "f()"];
- assert_eq!(actual, expected);
-
- let contents = r#"
-def f():
- """Docstring goes here."""
- # Comment goes here.
- x = 1
-f()"#;
- let lxr: Vec = lexer::lex(contents, Mode::Module).collect();
- let locator = Locator::new(contents);
- let actual: Vec = LogicalLines::from_tokens(&lxr, &locator)
- .into_iter()
- .map(|line| line.text_trimmed().to_string())
- .collect();
- let expected = vec![
- "def f():",
- "\"\"\"Docstring goes here.\"\"\"",
- "",
- "x = 1",
- "f()",
- ];
- assert_eq!(actual, expected);
- }
-}
diff --git a/crates/ruff/src/checkers/mod.rs b/crates/ruff/src/checkers/mod.rs
index 13c937dd60e2f..5178e7e6647ec 100644
--- a/crates/ruff/src/checkers/mod.rs
+++ b/crates/ruff/src/checkers/mod.rs
@@ -1,8 +1,7 @@
-pub mod ast;
-pub mod filesystem;
-pub mod imports;
-#[cfg(feature = "logical_lines")]
+pub(crate) mod ast;
+pub(crate) mod filesystem;
+pub(crate) mod imports;
pub(crate) mod logical_lines;
-pub mod noqa;
-pub mod physical_lines;
-pub mod tokens;
+pub(crate) mod noqa;
+pub(crate) mod physical_lines;
+pub(crate) mod tokens;
diff --git a/crates/ruff/src/checkers/noqa.rs b/crates/ruff/src/checkers/noqa.rs
index 25ad27a9992cd..52d47182b5343 100644
--- a/crates/ruff/src/checkers/noqa.rs
+++ b/crates/ruff/src/checkers/noqa.rs
@@ -3,7 +3,7 @@
use itertools::Itertools;
use ruff_text_size::{TextLen, TextRange, TextSize};
-use ruff_diagnostics::{Diagnostic, Edit};
+use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_python_ast::source_code::Locator;
use crate::noqa;
@@ -11,18 +11,16 @@ use crate::noqa::{Directive, FileExemption, NoqaDirectives, NoqaMapping};
use crate::registry::{AsRule, Rule};
use crate::rule_redirects::get_redirect_target;
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
-use crate::settings::{flags, Settings};
+use crate::settings::Settings;
-pub fn check_noqa(
+pub(crate) fn check_noqa(
diagnostics: &mut Vec,
locator: &Locator,
comment_ranges: &[TextRange],
noqa_line_for: &NoqaMapping,
+ analyze_directives: bool,
settings: &Settings,
- autofix: flags::Autofix,
) -> Vec {
- let enforce_noqa = settings.rules.enabled(Rule::UnusedNOQA);
-
// Identify any codes that are globally exempted (within the current file).
let exemption = noqa::file_exemption(locator.contents(), comment_ranges);
@@ -94,15 +92,16 @@ pub fn check_noqa(
}
// Enforce that the noqa directive was actually used (RUF100).
- if enforce_noqa {
+ if analyze_directives && settings.rules.enabled(Rule::UnusedNOQA) {
for line in noqa_directives.lines() {
match &line.directive {
Directive::All(leading_spaces, noqa_range, trailing_spaces) => {
if line.matches.is_empty() {
let mut diagnostic =
Diagnostic::new(UnusedNOQA { codes: None }, *noqa_range);
- if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
- diagnostic.set_fix(delete_noqa(
+ if settings.rules.should_fix(diagnostic.kind.rule()) {
+ #[allow(deprecated)]
+ diagnostic.set_fix_from_edit(delete_noqa(
*leading_spaces,
*noqa_range,
*trailing_spaces,
@@ -169,19 +168,21 @@ pub fn check_noqa(
},
*range,
);
- if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
+ if settings.rules.should_fix(diagnostic.kind.rule()) {
if valid_codes.is_empty() {
- diagnostic.set_fix(delete_noqa(
+ #[allow(deprecated)]
+ diagnostic.set_fix_from_edit(delete_noqa(
*leading_spaces,
*range,
*trailing_spaces,
locator,
));
} else {
- diagnostic.set_fix(Edit::range_replacement(
+ #[allow(deprecated)]
+ diagnostic.set_fix(Fix::unspecified(Edit::range_replacement(
format!("# noqa: {}", valid_codes.join(", ")),
*range,
- ));
+ )));
}
}
diagnostics.push(diagnostic);
diff --git a/crates/ruff/src/checkers/physical_lines.rs b/crates/ruff/src/checkers/physical_lines.rs
index b4437cad5208f..124821f221c59 100644
--- a/crates/ruff/src/checkers/physical_lines.rs
+++ b/crates/ruff/src/checkers/physical_lines.rs
@@ -4,7 +4,7 @@ use ruff_text_size::TextSize;
use std::path::Path;
use ruff_diagnostics::Diagnostic;
-use ruff_python_ast::newlines::StrExt;
+use ruff_newlines::StrExt;
use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
use crate::registry::Rule;
@@ -19,16 +19,15 @@ use crate::rules::pycodestyle::rules::{
use crate::rules::pygrep_hooks::rules::{blanket_noqa, blanket_type_ignore};
use crate::rules::pylint;
use crate::rules::pyupgrade::rules::unnecessary_coding_comment;
-use crate::settings::{flags, Settings};
+use crate::settings::Settings;
-pub fn check_physical_lines(
+pub(crate) fn check_physical_lines(
path: &Path,
locator: &Locator,
stylist: &Stylist,
indexer: &Indexer,
doc_lines: &[TextSize],
settings: &Settings,
- autofix: flags::Autofix,
) -> Vec {
let mut diagnostics: Vec = vec![];
let mut has_any_shebang = false;
@@ -51,14 +50,11 @@ pub fn check_physical_lines(
settings.rules.enabled(Rule::BlankLineWithWhitespace);
let enforce_tab_indentation = settings.rules.enabled(Rule::TabIndentation);
- let fix_unnecessary_coding_comment =
- autofix.into() && settings.rules.should_fix(Rule::UTF8EncodingDeclaration);
- let fix_shebang_whitespace =
- autofix.into() && settings.rules.should_fix(Rule::ShebangLeadingWhitespace);
+ let fix_unnecessary_coding_comment = settings.rules.should_fix(Rule::UTF8EncodingDeclaration);
+ let fix_shebang_whitespace = settings.rules.should_fix(Rule::ShebangLeadingWhitespace);
let mut commented_lines_iter = indexer.comment_ranges().iter().peekable();
let mut doc_lines_iter = doc_lines.iter().peekable();
- let string_lines = indexer.triple_quoted_string_ranges();
for (index, line) in locator.contents().universal_newlines().enumerate() {
while commented_lines_iter
@@ -121,7 +117,7 @@ pub fn check_physical_lines(
}
while doc_lines_iter
- .next_if(|doc_line_start| line.range().contains(**doc_line_start))
+ .next_if(|doc_line_start| line.range().contains_inclusive(**doc_line_start))
.is_some()
{
if enforce_doc_line_too_long {
@@ -148,13 +144,13 @@ pub fn check_physical_lines(
}
if enforce_trailing_whitespace || enforce_blank_line_contains_whitespace {
- if let Some(diagnostic) = trailing_whitespace(&line, settings, autofix) {
+ if let Some(diagnostic) = trailing_whitespace(&line, settings) {
diagnostics.push(diagnostic);
}
}
if enforce_tab_indentation {
- if let Some(diagnostic) = tab_indentation(&line, string_lines) {
+ if let Some(diagnostic) = tab_indentation(&line, indexer) {
diagnostics.push(diagnostic);
}
}
@@ -164,7 +160,7 @@ pub fn check_physical_lines(
if let Some(diagnostic) = no_newline_at_end_of_file(
locator,
stylist,
- autofix.into() && settings.rules.should_fix(Rule::MissingNewlineAtEndOfFile),
+ settings.rules.should_fix(Rule::MissingNewlineAtEndOfFile),
) {
diagnostics.push(diagnostic);
}
@@ -187,8 +183,9 @@ mod tests {
use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
+ use crate::line_width::LineLength;
use crate::registry::Rule;
- use crate::settings::{flags, Settings};
+ use crate::settings::Settings;
use super::check_physical_lines;
@@ -200,7 +197,7 @@ mod tests {
let indexer = Indexer::from_tokens(&tokens, &locator);
let stylist = Stylist::from_tokens(&tokens, &locator);
- let check_with_max_line_length = |line_length: usize| {
+ let check_with_max_line_length = |line_length: LineLength| {
check_physical_lines(
Path::new("foo.py"),
&locator,
@@ -211,10 +208,10 @@ mod tests {
line_length,
..Settings::for_rule(Rule::LineTooLong)
},
- flags::Autofix::Enabled,
)
};
- assert_eq!(check_with_max_line_length(8), vec![]);
- assert_eq!(check_with_max_line_length(8), vec![]);
+ let line_length = LineLength::from(8);
+ assert_eq!(check_with_max_line_length(line_length), vec![]);
+ assert_eq!(check_with_max_line_length(line_length), vec![]);
}
}
diff --git a/crates/ruff/src/checkers/tokens.rs b/crates/ruff/src/checkers/tokens.rs
index 0b9f76e6199c0..4f71610a7cdbd 100644
--- a/crates/ruff/src/checkers/tokens.rs
+++ b/crates/ruff/src/checkers/tokens.rs
@@ -3,22 +3,23 @@
use rustpython_parser::lexer::LexResult;
use rustpython_parser::Tok;
+use crate::directives::TodoComment;
use crate::lex::docstring_detection::StateMachine;
use crate::registry::{AsRule, Rule};
use crate::rules::ruff::rules::Context;
use crate::rules::{
- eradicate, flake8_commas, flake8_implicit_str_concat, flake8_pyi, flake8_quotes, pycodestyle,
- pylint, pyupgrade, ruff,
+ eradicate, flake8_commas, flake8_fixme, flake8_implicit_str_concat, flake8_pyi, flake8_quotes,
+ flake8_todos, pycodestyle, pylint, pyupgrade, ruff,
};
-use crate::settings::{flags, Settings};
+use crate::settings::Settings;
use ruff_diagnostics::Diagnostic;
-use ruff_python_ast::source_code::Locator;
+use ruff_python_ast::source_code::{Indexer, Locator};
-pub fn check_tokens(
+pub(crate) fn check_tokens(
locator: &Locator,
+ indexer: &Indexer,
tokens: &[LexResult],
settings: &Settings,
- autofix: flags::Autofix,
is_stub: bool,
) -> Vec {
let mut diagnostics: Vec = vec![];
@@ -61,6 +62,21 @@ pub fn check_tokens(
let enforce_extraneous_parenthesis = settings.rules.enabled(Rule::ExtraneousParentheses);
let enforce_type_comment_in_stub = settings.rules.enabled(Rule::TypeCommentInStub);
+ // Combine flake8_todos and flake8_fixme so that we can reuse detected [`TodoDirective`]s.
+ let enforce_todos = settings.rules.any_enabled(&[
+ Rule::InvalidTodoTag,
+ Rule::MissingTodoAuthor,
+ Rule::MissingTodoLink,
+ Rule::MissingTodoColon,
+ Rule::MissingTodoDescription,
+ Rule::InvalidTodoCapitalization,
+ Rule::MissingSpaceAfterTodoColon,
+ Rule::LineContainsFixme,
+ Rule::LineContainsXxx,
+ Rule::LineContainsTodo,
+ Rule::LineContainsHack,
+ ]);
+
// RUF001, RUF002, RUF003
if enforce_ambiguous_unicode_character {
let mut state_machine = StateMachine::default();
@@ -85,7 +101,6 @@ pub fn check_tokens(
Context::Comment
},
settings,
- autofix,
));
}
}
@@ -93,15 +108,9 @@ pub fn check_tokens(
// ERA001
if enforce_commented_out_code {
- for (tok, range) in tokens.iter().flatten() {
- if matches!(tok, Tok::Comment(_)) {
- if let Some(diagnostic) =
- eradicate::rules::commented_out_code(locator, *range, settings, autofix)
- {
- diagnostics.push(diagnostic);
- }
- }
- }
+ diagnostics.extend(eradicate::rules::commented_out_code(
+ indexer, locator, settings,
+ ));
}
// W605
@@ -111,7 +120,7 @@ pub fn check_tokens(
diagnostics.extend(pycodestyle::rules::invalid_escape_sequence(
locator,
*range,
- autofix.into() && settings.rules.should_fix(Rule::InvalidEscapeSequence),
+ settings.rules.should_fix(Rule::InvalidEscapeSequence),
));
}
}
@@ -121,7 +130,7 @@ pub fn check_tokens(
for (tok, range) in tokens.iter().flatten() {
if matches!(tok, Tok::String { .. }) {
diagnostics.extend(
- pylint::rules::invalid_string_characters(locator, *range, autofix.into())
+ pylint::rules::invalid_string_characters(locator, *range)
.into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
);
@@ -132,7 +141,7 @@ pub fn check_tokens(
// E701, E702, E703
if enforce_compound_statements {
diagnostics.extend(
- pycodestyle::rules::compound_statements(tokens, settings, autofix)
+ pycodestyle::rules::compound_statements(tokens, settings)
.into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
);
@@ -141,7 +150,7 @@ pub fn check_tokens(
// Q001, Q002, Q003
if enforce_quotes {
diagnostics.extend(
- flake8_quotes::rules::from_tokens(tokens, locator, settings, autofix)
+ flake8_quotes::rules::from_tokens(tokens, locator, settings)
.into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
);
@@ -163,7 +172,7 @@ pub fn check_tokens(
// COM812, COM818, COM819
if enforce_trailing_comma {
diagnostics.extend(
- flake8_commas::rules::trailing_commas(tokens, locator, settings, autofix)
+ flake8_commas::rules::trailing_commas(tokens, locator, settings)
.into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
);
@@ -172,14 +181,39 @@ pub fn check_tokens(
// UP034
if enforce_extraneous_parenthesis {
diagnostics.extend(
- pyupgrade::rules::extraneous_parentheses(tokens, locator, settings, autofix)
- .into_iter(),
+ pyupgrade::rules::extraneous_parentheses(tokens, locator, settings).into_iter(),
);
}
// PYI033
if enforce_type_comment_in_stub && is_stub {
- diagnostics.extend(flake8_pyi::rules::type_comment_in_stub(tokens));
+ diagnostics.extend(flake8_pyi::rules::type_comment_in_stub(indexer, locator));
+ }
+
+ // TD001, TD002, TD003, TD004, TD005, TD006, TD007
+ // T001, T002, T003, T004
+ if enforce_todos {
+ let todo_comments: Vec = indexer
+ .comment_ranges()
+ .iter()
+ .enumerate()
+ .filter_map(|(i, comment_range)| {
+ let comment = locator.slice(*comment_range);
+ TodoComment::from_comment(comment, *comment_range, i)
+ })
+ .collect();
+
+ diagnostics.extend(
+ flake8_todos::rules::todos(&todo_comments, indexer, locator, settings)
+ .into_iter()
+ .filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
+ );
+
+ diagnostics.extend(
+ flake8_fixme::rules::todos(&todo_comments)
+ .into_iter()
+ .filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
+ );
}
diagnostics
diff --git a/crates/ruff/src/codes.rs b/crates/ruff/src/codes.rs
index cffdf12abe94e..641755699e5a8 100644
--- a/crates/ruff/src/codes.rs
+++ b/crates/ruff/src/codes.rs
@@ -1,6 +1,16 @@
-use crate::registry::{Linter, Rule};
+/// In this module we generate [`Rule`], an enum of all rules, and [`RuleCodePrefix`], an enum of
+/// all rules categories. A rule category is something like pyflakes or flake8-todos. Each rule
+/// category contains all rules and their common prefixes, i.e. everything you can specify in
+/// `--select`. For pylint this is e.g. C0414 and E0118 but also C and E01.
use std::fmt::Formatter;
+use strum_macros::{AsRefStr, EnumIter};
+
+use ruff_diagnostics::Violation;
+
+use crate::registry::{AsRule, Linter};
+use crate::rules;
+
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct NoqaCode(&'static str, &'static str);
@@ -11,7 +21,7 @@ impl std::fmt::Debug for NoqaCode {
}
impl std::fmt::Display for NoqaCode {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(f, "{}{}", self.0, self.1)
}
}
@@ -25,715 +35,750 @@ impl PartialEq<&str> for NoqaCode {
}
}
+#[derive(Debug, Copy, Clone)]
+pub enum RuleGroup {
+ /// The rule has not been assigned to any specific group.
+ Unspecified,
+ /// The rule is still under development, and must be enabled explicitly.
+ Nursery,
+}
+
#[ruff_macros::map_codes]
-pub fn code_to_rule(linter: Linter, code: &str) -> Option {
+pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
#[allow(clippy::enum_glob_use)]
use Linter::*;
+ #[rustfmt::skip]
Some(match (linter, code) {
// pycodestyle errors
- (Pycodestyle, "E101") => Rule::MixedSpacesAndTabs,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E111") => Rule::IndentationWithInvalidMultiple,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E112") => Rule::NoIndentedBlock,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E113") => Rule::UnexpectedIndentation,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E114") => Rule::IndentationWithInvalidMultipleComment,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E115") => Rule::NoIndentedBlockComment,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E116") => Rule::UnexpectedIndentationComment,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E117") => Rule::OverIndented,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E201") => Rule::WhitespaceAfterOpenBracket,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E202") => Rule::WhitespaceBeforeCloseBracket,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E203") => Rule::WhitespaceBeforePunctuation,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E211") => Rule::WhitespaceBeforeParameters,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E221") => Rule::MultipleSpacesBeforeOperator,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E222") => Rule::MultipleSpacesAfterOperator,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E223") => Rule::TabBeforeOperator,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E224") => Rule::TabAfterOperator,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E225") => Rule::MissingWhitespaceAroundOperator,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E226") => Rule::MissingWhitespaceAroundArithmeticOperator,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E227") => Rule::MissingWhitespaceAroundBitwiseOrShiftOperator,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E228") => Rule::MissingWhitespaceAroundModuloOperator,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E231") => Rule::MissingWhitespace,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E251") => Rule::UnexpectedSpacesAroundKeywordParameterEquals,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E252") => Rule::MissingWhitespaceAroundParameterEquals,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E261") => Rule::TooFewSpacesBeforeInlineComment,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E262") => Rule::NoSpaceAfterInlineComment,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E265") => Rule::NoSpaceAfterBlockComment,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E266") => Rule::MultipleLeadingHashesForBlockComment,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E271") => Rule::MultipleSpacesAfterKeyword,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E272") => Rule::MultipleSpacesBeforeKeyword,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E273") => Rule::TabAfterKeyword,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E274") => Rule::TabBeforeKeyword,
- #[cfg(feature = "logical_lines")]
- (Pycodestyle, "E275") => Rule::MissingWhitespaceAfterKeyword,
- (Pycodestyle, "E401") => Rule::MultipleImportsOnOneLine,
- (Pycodestyle, "E402") => Rule::ModuleImportNotAtTopOfFile,
- (Pycodestyle, "E501") => Rule::LineTooLong,
- (Pycodestyle, "E701") => Rule::MultipleStatementsOnOneLineColon,
- (Pycodestyle, "E702") => Rule::MultipleStatementsOnOneLineSemicolon,
- (Pycodestyle, "E703") => Rule::UselessSemicolon,
- (Pycodestyle, "E711") => Rule::NoneComparison,
- (Pycodestyle, "E712") => Rule::TrueFalseComparison,
- (Pycodestyle, "E713") => Rule::NotInTest,
- (Pycodestyle, "E714") => Rule::NotIsTest,
- (Pycodestyle, "E721") => Rule::TypeComparison,
- (Pycodestyle, "E722") => Rule::BareExcept,
- (Pycodestyle, "E731") => Rule::LambdaAssignment,
- (Pycodestyle, "E741") => Rule::AmbiguousVariableName,
- (Pycodestyle, "E742") => Rule::AmbiguousClassName,
- (Pycodestyle, "E743") => Rule::AmbiguousFunctionName,
- (Pycodestyle, "E902") => Rule::IOError,
- (Pycodestyle, "E999") => Rule::SyntaxError,
+ (Pycodestyle, "E101") => (RuleGroup::Unspecified, rules::pycodestyle::rules::MixedSpacesAndTabs),
+ (Pycodestyle, "E111") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::IndentationWithInvalidMultiple),
+ (Pycodestyle, "E112") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::NoIndentedBlock),
+ (Pycodestyle, "E113") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::UnexpectedIndentation),
+ (Pycodestyle, "E114") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::IndentationWithInvalidMultipleComment),
+ (Pycodestyle, "E115") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::NoIndentedBlockComment),
+ (Pycodestyle, "E116") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::UnexpectedIndentationComment),
+ (Pycodestyle, "E117") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::OverIndented),
+ (Pycodestyle, "E201") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::WhitespaceAfterOpenBracket),
+ (Pycodestyle, "E202") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::WhitespaceBeforeCloseBracket),
+ (Pycodestyle, "E203") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::WhitespaceBeforePunctuation),
+ (Pycodestyle, "E211") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::WhitespaceBeforeParameters),
+ (Pycodestyle, "E221") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MultipleSpacesBeforeOperator),
+ (Pycodestyle, "E222") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MultipleSpacesAfterOperator),
+ (Pycodestyle, "E223") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::TabBeforeOperator),
+ (Pycodestyle, "E224") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::TabAfterOperator),
+ (Pycodestyle, "E225") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundOperator),
+ (Pycodestyle, "E226") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundArithmeticOperator),
+ (Pycodestyle, "E227") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundBitwiseOrShiftOperator),
+ (Pycodestyle, "E228") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundModuloOperator),
+ (Pycodestyle, "E231") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MissingWhitespace),
+ (Pycodestyle, "E251") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::UnexpectedSpacesAroundKeywordParameterEquals),
+ (Pycodestyle, "E252") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundParameterEquals),
+ (Pycodestyle, "E261") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::TooFewSpacesBeforeInlineComment),
+ (Pycodestyle, "E262") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::NoSpaceAfterInlineComment),
+ (Pycodestyle, "E265") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::NoSpaceAfterBlockComment),
+ (Pycodestyle, "E266") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MultipleLeadingHashesForBlockComment),
+ (Pycodestyle, "E271") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MultipleSpacesAfterKeyword),
+ (Pycodestyle, "E272") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MultipleSpacesBeforeKeyword),
+ (Pycodestyle, "E273") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::TabAfterKeyword),
+ (Pycodestyle, "E274") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::TabBeforeKeyword),
+ (Pycodestyle, "E275") => (RuleGroup::Nursery, rules::pycodestyle::rules::logical_lines::MissingWhitespaceAfterKeyword),
+ (Pycodestyle, "E401") => (RuleGroup::Unspecified, rules::pycodestyle::rules::MultipleImportsOnOneLine),
+ (Pycodestyle, "E402") => (RuleGroup::Unspecified, rules::pycodestyle::rules::ModuleImportNotAtTopOfFile),
+ (Pycodestyle, "E501") => (RuleGroup::Unspecified, rules::pycodestyle::rules::LineTooLong),
+ (Pycodestyle, "E701") => (RuleGroup::Unspecified, rules::pycodestyle::rules::MultipleStatementsOnOneLineColon),
+ (Pycodestyle, "E702") => (RuleGroup::Unspecified, rules::pycodestyle::rules::MultipleStatementsOnOneLineSemicolon),
+ (Pycodestyle, "E703") => (RuleGroup::Unspecified, rules::pycodestyle::rules::UselessSemicolon),
+ (Pycodestyle, "E711") => (RuleGroup::Unspecified, rules::pycodestyle::rules::NoneComparison),
+ (Pycodestyle, "E712") => (RuleGroup::Unspecified, rules::pycodestyle::rules::TrueFalseComparison),
+ (Pycodestyle, "E713") => (RuleGroup::Unspecified, rules::pycodestyle::rules::NotInTest),
+ (Pycodestyle, "E714") => (RuleGroup::Unspecified, rules::pycodestyle::rules::NotIsTest),
+ (Pycodestyle, "E721") => (RuleGroup::Unspecified, rules::pycodestyle::rules::TypeComparison),
+ (Pycodestyle, "E722") => (RuleGroup::Unspecified, rules::pycodestyle::rules::BareExcept),
+ (Pycodestyle, "E731") => (RuleGroup::Unspecified, rules::pycodestyle::rules::LambdaAssignment),
+ (Pycodestyle, "E741") => (RuleGroup::Unspecified, rules::pycodestyle::rules::AmbiguousVariableName),
+ (Pycodestyle, "E742") => (RuleGroup::Unspecified, rules::pycodestyle::rules::AmbiguousClassName),
+ (Pycodestyle, "E743") => (RuleGroup::Unspecified, rules::pycodestyle::rules::AmbiguousFunctionName),
+ (Pycodestyle, "E902") => (RuleGroup::Unspecified, rules::pycodestyle::rules::IOError),
+ (Pycodestyle, "E999") => (RuleGroup::Unspecified, rules::pycodestyle::rules::SyntaxError),
// pycodestyle warnings
- (Pycodestyle, "W191") => Rule::TabIndentation,
- (Pycodestyle, "W291") => Rule::TrailingWhitespace,
- (Pycodestyle, "W292") => Rule::MissingNewlineAtEndOfFile,
- (Pycodestyle, "W293") => Rule::BlankLineWithWhitespace,
- (Pycodestyle, "W505") => Rule::DocLineTooLong,
- (Pycodestyle, "W605") => Rule::InvalidEscapeSequence,
+ (Pycodestyle, "W191") => (RuleGroup::Unspecified, rules::pycodestyle::rules::TabIndentation),
+ (Pycodestyle, "W291") => (RuleGroup::Unspecified, rules::pycodestyle::rules::TrailingWhitespace),
+ (Pycodestyle, "W292") => (RuleGroup::Unspecified, rules::pycodestyle::rules::MissingNewlineAtEndOfFile),
+ (Pycodestyle, "W293") => (RuleGroup::Unspecified, rules::pycodestyle::rules::BlankLineWithWhitespace),
+ (Pycodestyle, "W505") => (RuleGroup::Unspecified, rules::pycodestyle::rules::DocLineTooLong),
+ (Pycodestyle, "W605") => (RuleGroup::Unspecified, rules::pycodestyle::rules::InvalidEscapeSequence),
// pyflakes
- (Pyflakes, "401") => Rule::UnusedImport,
- (Pyflakes, "402") => Rule::ImportShadowedByLoopVar,
- (Pyflakes, "403") => Rule::UndefinedLocalWithImportStar,
- (Pyflakes, "404") => Rule::LateFutureImport,
- (Pyflakes, "405") => Rule::UndefinedLocalWithImportStarUsage,
- (Pyflakes, "406") => Rule::UndefinedLocalWithNestedImportStarUsage,
- (Pyflakes, "407") => Rule::FutureFeatureNotDefined,
- (Pyflakes, "501") => Rule::PercentFormatInvalidFormat,
- (Pyflakes, "502") => Rule::PercentFormatExpectedMapping,
- (Pyflakes, "503") => Rule::PercentFormatExpectedSequence,
- (Pyflakes, "504") => Rule::PercentFormatExtraNamedArguments,
- (Pyflakes, "505") => Rule::PercentFormatMissingArgument,
- (Pyflakes, "506") => Rule::PercentFormatMixedPositionalAndNamed,
- (Pyflakes, "507") => Rule::PercentFormatPositionalCountMismatch,
- (Pyflakes, "508") => Rule::PercentFormatStarRequiresSequence,
- (Pyflakes, "509") => Rule::PercentFormatUnsupportedFormatCharacter,
- (Pyflakes, "521") => Rule::StringDotFormatInvalidFormat,
- (Pyflakes, "522") => Rule::StringDotFormatExtraNamedArguments,
- (Pyflakes, "523") => Rule::StringDotFormatExtraPositionalArguments,
- (Pyflakes, "524") => Rule::StringDotFormatMissingArguments,
- (Pyflakes, "525") => Rule::StringDotFormatMixingAutomatic,
- (Pyflakes, "541") => Rule::FStringMissingPlaceholders,
- (Pyflakes, "601") => Rule::MultiValueRepeatedKeyLiteral,
- (Pyflakes, "602") => Rule::MultiValueRepeatedKeyVariable,
- (Pyflakes, "621") => Rule::ExpressionsInStarAssignment,
- (Pyflakes, "622") => Rule::MultipleStarredExpressions,
- (Pyflakes, "631") => Rule::AssertTuple,
- (Pyflakes, "632") => Rule::IsLiteral,
- (Pyflakes, "633") => Rule::InvalidPrintSyntax,
- (Pyflakes, "634") => Rule::IfTuple,
- (Pyflakes, "701") => Rule::BreakOutsideLoop,
- (Pyflakes, "702") => Rule::ContinueOutsideLoop,
- (Pyflakes, "704") => Rule::YieldOutsideFunction,
- (Pyflakes, "706") => Rule::ReturnOutsideFunction,
- (Pyflakes, "707") => Rule::DefaultExceptNotLast,
- (Pyflakes, "722") => Rule::ForwardAnnotationSyntaxError,
- (Pyflakes, "811") => Rule::RedefinedWhileUnused,
- (Pyflakes, "821") => Rule::UndefinedName,
- (Pyflakes, "822") => Rule::UndefinedExport,
- (Pyflakes, "823") => Rule::UndefinedLocal,
- (Pyflakes, "841") => Rule::UnusedVariable,
- (Pyflakes, "842") => Rule::UnusedAnnotation,
- (Pyflakes, "901") => Rule::RaiseNotImplemented,
+ (Pyflakes, "401") => (RuleGroup::Unspecified, rules::pyflakes::rules::UnusedImport),
+ (Pyflakes, "402") => (RuleGroup::Unspecified, rules::pyflakes::rules::ImportShadowedByLoopVar),
+ (Pyflakes, "403") => (RuleGroup::Unspecified, rules::pyflakes::rules::UndefinedLocalWithImportStar),
+ (Pyflakes, "404") => (RuleGroup::Unspecified, rules::pyflakes::rules::LateFutureImport),
+ (Pyflakes, "405") => (RuleGroup::Unspecified, rules::pyflakes::rules::UndefinedLocalWithImportStarUsage),
+ (Pyflakes, "406") => (RuleGroup::Unspecified, rules::pyflakes::rules::UndefinedLocalWithNestedImportStarUsage),
+ (Pyflakes, "407") => (RuleGroup::Unspecified, rules::pyflakes::rules::FutureFeatureNotDefined),
+ (Pyflakes, "501") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatInvalidFormat),
+ (Pyflakes, "502") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatExpectedMapping),
+ (Pyflakes, "503") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatExpectedSequence),
+ (Pyflakes, "504") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatExtraNamedArguments),
+ (Pyflakes, "505") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatMissingArgument),
+ (Pyflakes, "506") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatMixedPositionalAndNamed),
+ (Pyflakes, "507") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatPositionalCountMismatch),
+ (Pyflakes, "508") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatStarRequiresSequence),
+ (Pyflakes, "509") => (RuleGroup::Unspecified, rules::pyflakes::rules::PercentFormatUnsupportedFormatCharacter),
+ (Pyflakes, "521") => (RuleGroup::Unspecified, rules::pyflakes::rules::StringDotFormatInvalidFormat),
+ (Pyflakes, "522") => (RuleGroup::Unspecified, rules::pyflakes::rules::StringDotFormatExtraNamedArguments),
+ (Pyflakes, "523") => (RuleGroup::Unspecified, rules::pyflakes::rules::StringDotFormatExtraPositionalArguments),
+ (Pyflakes, "524") => (RuleGroup::Unspecified, rules::pyflakes::rules::StringDotFormatMissingArguments),
+ (Pyflakes, "525") => (RuleGroup::Unspecified, rules::pyflakes::rules::StringDotFormatMixingAutomatic),
+ (Pyflakes, "541") => (RuleGroup::Unspecified, rules::pyflakes::rules::FStringMissingPlaceholders),
+ (Pyflakes, "601") => (RuleGroup::Unspecified, rules::pyflakes::rules::MultiValueRepeatedKeyLiteral),
+ (Pyflakes, "602") => (RuleGroup::Unspecified, rules::pyflakes::rules::MultiValueRepeatedKeyVariable),
+ (Pyflakes, "621") => (RuleGroup::Unspecified, rules::pyflakes::rules::ExpressionsInStarAssignment),
+ (Pyflakes, "622") => (RuleGroup::Unspecified, rules::pyflakes::rules::MultipleStarredExpressions),
+ (Pyflakes, "631") => (RuleGroup::Unspecified, rules::pyflakes::rules::AssertTuple),
+ (Pyflakes, "632") => (RuleGroup::Unspecified, rules::pyflakes::rules::IsLiteral),
+ (Pyflakes, "633") => (RuleGroup::Unspecified, rules::pyflakes::rules::InvalidPrintSyntax),
+ (Pyflakes, "634") => (RuleGroup::Unspecified, rules::pyflakes::rules::IfTuple),
+ (Pyflakes, "701") => (RuleGroup::Unspecified, rules::pyflakes::rules::BreakOutsideLoop),
+ (Pyflakes, "702") => (RuleGroup::Unspecified, rules::pyflakes::rules::ContinueOutsideLoop),
+ (Pyflakes, "704") => (RuleGroup::Unspecified, rules::pyflakes::rules::YieldOutsideFunction),
+ (Pyflakes, "706") => (RuleGroup::Unspecified, rules::pyflakes::rules::ReturnOutsideFunction),
+ (Pyflakes, "707") => (RuleGroup::Unspecified, rules::pyflakes::rules::DefaultExceptNotLast),
+ (Pyflakes, "722") => (RuleGroup::Unspecified, rules::pyflakes::rules::ForwardAnnotationSyntaxError),
+ (Pyflakes, "811") => (RuleGroup::Unspecified, rules::pyflakes::rules::RedefinedWhileUnused),
+ (Pyflakes, "821") => (RuleGroup::Unspecified, rules::pyflakes::rules::UndefinedName),
+ (Pyflakes, "822") => (RuleGroup::Unspecified, rules::pyflakes::rules::UndefinedExport),
+ (Pyflakes, "823") => (RuleGroup::Unspecified, rules::pyflakes::rules::UndefinedLocal),
+ (Pyflakes, "841") => (RuleGroup::Unspecified, rules::pyflakes::rules::UnusedVariable),
+ (Pyflakes, "842") => (RuleGroup::Unspecified, rules::pyflakes::rules::UnusedAnnotation),
+ (Pyflakes, "901") => (RuleGroup::Unspecified, rules::pyflakes::rules::RaiseNotImplemented),
// pylint
- (Pylint, "C0414") => Rule::UselessImportAlias,
- (Pylint, "C1901") => Rule::CompareToEmptyString,
- (Pylint, "C3002") => Rule::UnnecessaryDirectLambdaCall,
- (Pylint, "E0100") => Rule::YieldInInit,
- (Pylint, "E0101") => Rule::ReturnInInit,
- (Pylint, "E0116") => Rule::ContinueInFinally,
- (Pylint, "E0117") => Rule::NonlocalWithoutBinding,
- (Pylint, "E0118") => Rule::LoadBeforeGlobalDeclaration,
- (Pylint, "E0604") => Rule::InvalidAllObject,
- (Pylint, "E0605") => Rule::InvalidAllFormat,
- (Pylint, "E1142") => Rule::AwaitOutsideAsync,
- (Pylint, "E1205") => Rule::LoggingTooManyArgs,
- (Pylint, "E1206") => Rule::LoggingTooFewArgs,
- (Pylint, "E1307") => Rule::BadStringFormatType,
- (Pylint, "E1310") => Rule::BadStrStripCall,
- (Pylint, "E1507") => Rule::InvalidEnvvarValue,
- (Pylint, "E2502") => Rule::BidirectionalUnicode,
- (Pylint, "E2510") => Rule::InvalidCharacterBackspace,
- (Pylint, "E2512") => Rule::InvalidCharacterSub,
- (Pylint, "E2513") => Rule::InvalidCharacterEsc,
- (Pylint, "E2514") => Rule::InvalidCharacterNul,
- (Pylint, "E2515") => Rule::InvalidCharacterZeroWidthSpace,
- (Pylint, "R0133") => Rule::ComparisonOfConstant,
- (Pylint, "R0206") => Rule::PropertyWithParameters,
- (Pylint, "R0402") => Rule::ManualFromImport,
- (Pylint, "R0911") => Rule::TooManyReturnStatements,
- (Pylint, "R0912") => Rule::TooManyBranches,
- (Pylint, "R0913") => Rule::TooManyArguments,
- (Pylint, "R0915") => Rule::TooManyStatements,
- (Pylint, "R1701") => Rule::RepeatedIsinstanceCalls,
- (Pylint, "R1711") => Rule::UselessReturn,
- (Pylint, "R1722") => Rule::SysExitAlias,
- (Pylint, "R2004") => Rule::MagicValueComparison,
- (Pylint, "R5501") => Rule::CollapsibleElseIf,
- (Pylint, "W0120") => Rule::UselessElseOnLoop,
- (Pylint, "W0129") => Rule::AssertOnStringLiteral,
- (Pylint, "W0406") => Rule::ImportSelf,
- (Pylint, "W0602") => Rule::GlobalVariableNotAssigned,
- (Pylint, "W0603") => Rule::GlobalStatement,
- (Pylint, "W0711") => Rule::BinaryOpException,
- (Pylint, "W1508") => Rule::InvalidEnvvarDefault,
- (Pylint, "W2901") => Rule::RedefinedLoopName,
- (Pylint, "E0302") => Rule::UnexpectedSpecialMethodSignature,
+ (Pylint, "C0414") => (RuleGroup::Unspecified, rules::pylint::rules::UselessImportAlias),
+ (Pylint, "C1901") => (RuleGroup::Unspecified, rules::pylint::rules::CompareToEmptyString),
+ (Pylint, "C3002") => (RuleGroup::Unspecified, rules::pylint::rules::UnnecessaryDirectLambdaCall),
+ (Pylint, "C0208") => (RuleGroup::Unspecified, rules::pylint::rules::IterationOverSet),
+ (Pylint, "E0100") => (RuleGroup::Unspecified, rules::pylint::rules::YieldInInit),
+ (Pylint, "E0101") => (RuleGroup::Unspecified, rules::pylint::rules::ReturnInInit),
+ (Pylint, "E0116") => (RuleGroup::Unspecified, rules::pylint::rules::ContinueInFinally),
+ (Pylint, "E0117") => (RuleGroup::Unspecified, rules::pylint::rules::NonlocalWithoutBinding),
+ (Pylint, "E0118") => (RuleGroup::Unspecified, rules::pylint::rules::LoadBeforeGlobalDeclaration),
+ (Pylint, "E0241") => (RuleGroup::Unspecified, rules::pylint::rules::DuplicateBases),
+ (Pylint, "E0302") => (RuleGroup::Unspecified, rules::pylint::rules::UnexpectedSpecialMethodSignature),
+ (Pylint, "E0307") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidStrReturnType),
+ (Pylint, "E0604") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidAllObject),
+ (Pylint, "E0605") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidAllFormat),
+ (Pylint, "E1142") => (RuleGroup::Unspecified, rules::pylint::rules::AwaitOutsideAsync),
+ (Pylint, "E1205") => (RuleGroup::Unspecified, rules::pylint::rules::LoggingTooManyArgs),
+ (Pylint, "E1206") => (RuleGroup::Unspecified, rules::pylint::rules::LoggingTooFewArgs),
+ (Pylint, "E1307") => (RuleGroup::Unspecified, rules::pylint::rules::BadStringFormatType),
+ (Pylint, "E1310") => (RuleGroup::Unspecified, rules::pylint::rules::BadStrStripCall),
+ (Pylint, "E1507") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidEnvvarValue),
+ (Pylint, "E1700") => (RuleGroup::Unspecified, rules::pylint::rules::YieldFromInAsyncFunction),
+ (Pylint, "E2502") => (RuleGroup::Unspecified, rules::pylint::rules::BidirectionalUnicode),
+ (Pylint, "E2510") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidCharacterBackspace),
+ (Pylint, "E2512") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidCharacterSub),
+ (Pylint, "E2513") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidCharacterEsc),
+ (Pylint, "E2514") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidCharacterNul),
+ (Pylint, "E2515") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidCharacterZeroWidthSpace),
+ (Pylint, "R0133") => (RuleGroup::Unspecified, rules::pylint::rules::ComparisonOfConstant),
+ (Pylint, "R0206") => (RuleGroup::Unspecified, rules::pylint::rules::PropertyWithParameters),
+ (Pylint, "R0402") => (RuleGroup::Unspecified, rules::pylint::rules::ManualFromImport),
+ (Pylint, "R0911") => (RuleGroup::Unspecified, rules::pylint::rules::TooManyReturnStatements),
+ (Pylint, "R0912") => (RuleGroup::Unspecified, rules::pylint::rules::TooManyBranches),
+ (Pylint, "R0913") => (RuleGroup::Unspecified, rules::pylint::rules::TooManyArguments),
+ (Pylint, "R0915") => (RuleGroup::Unspecified, rules::pylint::rules::TooManyStatements),
+ (Pylint, "R1701") => (RuleGroup::Unspecified, rules::pylint::rules::RepeatedIsinstanceCalls),
+ (Pylint, "R1711") => (RuleGroup::Unspecified, rules::pylint::rules::UselessReturn),
+ (Pylint, "R1722") => (RuleGroup::Unspecified, rules::pylint::rules::SysExitAlias),
+ (Pylint, "R2004") => (RuleGroup::Unspecified, rules::pylint::rules::MagicValueComparison),
+ (Pylint, "R5501") => (RuleGroup::Unspecified, rules::pylint::rules::CollapsibleElseIf),
+ (Pylint, "W0120") => (RuleGroup::Unspecified, rules::pylint::rules::UselessElseOnLoop),
+ (Pylint, "W0129") => (RuleGroup::Unspecified, rules::pylint::rules::AssertOnStringLiteral),
+ (Pylint, "W0131") => (RuleGroup::Unspecified, rules::pylint::rules::NamedExprWithoutContext),
+ (Pylint, "W0406") => (RuleGroup::Unspecified, rules::pylint::rules::ImportSelf),
+ (Pylint, "W0602") => (RuleGroup::Unspecified, rules::pylint::rules::GlobalVariableNotAssigned),
+ (Pylint, "W0603") => (RuleGroup::Unspecified, rules::pylint::rules::GlobalStatement),
+ (Pylint, "W0711") => (RuleGroup::Unspecified, rules::pylint::rules::BinaryOpException),
+ (Pylint, "W1508") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidEnvvarDefault),
+ (Pylint, "W2901") => (RuleGroup::Unspecified, rules::pylint::rules::RedefinedLoopName),
+ (Pylint, "W3301") => (RuleGroup::Unspecified, rules::pylint::rules::NestedMinMax),
+
+ // flake8-async
+ (Flake8Async, "100") => (RuleGroup::Unspecified, rules::flake8_async::rules::BlockingHttpCallInAsyncFunction),
+ (Flake8Async, "101") => (RuleGroup::Unspecified, rules::flake8_async::rules::OpenSleepOrSubprocessInAsyncFunction),
+ (Flake8Async, "102") => (RuleGroup::Unspecified, rules::flake8_async::rules::BlockingOsCallInAsyncFunction),
// flake8-builtins
- (Flake8Builtins, "001") => Rule::BuiltinVariableShadowing,
- (Flake8Builtins, "002") => Rule::BuiltinArgumentShadowing,
- (Flake8Builtins, "003") => Rule::BuiltinAttributeShadowing,
+ (Flake8Builtins, "001") => (RuleGroup::Unspecified, rules::flake8_builtins::rules::BuiltinVariableShadowing),
+ (Flake8Builtins, "002") => (RuleGroup::Unspecified, rules::flake8_builtins::rules::BuiltinArgumentShadowing),
+ (Flake8Builtins, "003") => (RuleGroup::Unspecified, rules::flake8_builtins::rules::BuiltinAttributeShadowing),
// flake8-bugbear
- (Flake8Bugbear, "002") => Rule::UnaryPrefixIncrement,
- (Flake8Bugbear, "003") => Rule::AssignmentToOsEnviron,
- (Flake8Bugbear, "004") => Rule::UnreliableCallableCheck,
- (Flake8Bugbear, "005") => Rule::StripWithMultiCharacters,
- (Flake8Bugbear, "006") => Rule::MutableArgumentDefault,
- (Flake8Bugbear, "007") => Rule::UnusedLoopControlVariable,
- (Flake8Bugbear, "008") => Rule::FunctionCallInDefaultArgument,
- (Flake8Bugbear, "009") => Rule::GetAttrWithConstant,
- (Flake8Bugbear, "010") => Rule::SetAttrWithConstant,
- (Flake8Bugbear, "011") => Rule::AssertFalse,
- (Flake8Bugbear, "012") => Rule::JumpStatementInFinally,
- (Flake8Bugbear, "013") => Rule::RedundantTupleInExceptionHandler,
- (Flake8Bugbear, "014") => Rule::DuplicateHandlerException,
- (Flake8Bugbear, "015") => Rule::UselessComparison,
- (Flake8Bugbear, "016") => Rule::CannotRaiseLiteral,
- (Flake8Bugbear, "017") => Rule::AssertRaisesException,
- (Flake8Bugbear, "018") => Rule::UselessExpression,
- (Flake8Bugbear, "019") => Rule::CachedInstanceMethod,
- (Flake8Bugbear, "020") => Rule::LoopVariableOverridesIterator,
- (Flake8Bugbear, "021") => Rule::FStringDocstring,
- (Flake8Bugbear, "022") => Rule::UselessContextlibSuppress,
- (Flake8Bugbear, "023") => Rule::FunctionUsesLoopVariable,
- (Flake8Bugbear, "024") => Rule::AbstractBaseClassWithoutAbstractMethod,
- (Flake8Bugbear, "025") => Rule::DuplicateTryBlockException,
- (Flake8Bugbear, "026") => Rule::StarArgUnpackingAfterKeywordArg,
- (Flake8Bugbear, "027") => Rule::EmptyMethodWithoutAbstractDecorator,
- (Flake8Bugbear, "028") => Rule::NoExplicitStacklevel,
- (Flake8Bugbear, "029") => Rule::ExceptWithEmptyTuple,
- (Flake8Bugbear, "030") => Rule::ExceptWithNonExceptionClasses,
- (Flake8Bugbear, "031") => Rule::ReuseOfGroupbyGenerator,
- (Flake8Bugbear, "032") => Rule::UnintentionalTypeAnnotation,
- (Flake8Bugbear, "904") => Rule::RaiseWithoutFromInsideExcept,
- (Flake8Bugbear, "905") => Rule::ZipWithoutExplicitStrict,
+ (Flake8Bugbear, "002") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UnaryPrefixIncrement),
+ (Flake8Bugbear, "003") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::AssignmentToOsEnviron),
+ (Flake8Bugbear, "004") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UnreliableCallableCheck),
+ (Flake8Bugbear, "005") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::StripWithMultiCharacters),
+ (Flake8Bugbear, "006") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::MutableArgumentDefault),
+ (Flake8Bugbear, "007") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UnusedLoopControlVariable),
+ (Flake8Bugbear, "008") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::FunctionCallInDefaultArgument),
+ (Flake8Bugbear, "009") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::GetAttrWithConstant),
+ (Flake8Bugbear, "010") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::SetAttrWithConstant),
+ (Flake8Bugbear, "011") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::AssertFalse),
+ (Flake8Bugbear, "012") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::JumpStatementInFinally),
+ (Flake8Bugbear, "013") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::RedundantTupleInExceptionHandler),
+ (Flake8Bugbear, "014") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::DuplicateHandlerException),
+ (Flake8Bugbear, "015") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UselessComparison),
+ (Flake8Bugbear, "016") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::CannotRaiseLiteral),
+ (Flake8Bugbear, "017") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::AssertRaisesException),
+ (Flake8Bugbear, "018") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UselessExpression),
+ (Flake8Bugbear, "019") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::CachedInstanceMethod),
+ (Flake8Bugbear, "020") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::LoopVariableOverridesIterator),
+ (Flake8Bugbear, "021") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::FStringDocstring),
+ (Flake8Bugbear, "022") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UselessContextlibSuppress),
+ (Flake8Bugbear, "023") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::FunctionUsesLoopVariable),
+ (Flake8Bugbear, "024") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::AbstractBaseClassWithoutAbstractMethod),
+ (Flake8Bugbear, "025") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::DuplicateTryBlockException),
+ (Flake8Bugbear, "026") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::StarArgUnpackingAfterKeywordArg),
+ (Flake8Bugbear, "027") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::EmptyMethodWithoutAbstractDecorator),
+ (Flake8Bugbear, "028") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::NoExplicitStacklevel),
+ (Flake8Bugbear, "029") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ExceptWithEmptyTuple),
+ (Flake8Bugbear, "030") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ExceptWithNonExceptionClasses),
+ (Flake8Bugbear, "031") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ReuseOfGroupbyGenerator),
+ (Flake8Bugbear, "032") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UnintentionalTypeAnnotation),
+ (Flake8Bugbear, "033") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::DuplicateValue),
+ (Flake8Bugbear, "904") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::RaiseWithoutFromInsideExcept),
+ (Flake8Bugbear, "905") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ZipWithoutExplicitStrict),
// flake8-blind-except
- (Flake8BlindExcept, "001") => Rule::BlindExcept,
+ (Flake8BlindExcept, "001") => (RuleGroup::Unspecified, rules::flake8_blind_except::rules::BlindExcept),
// flake8-comprehensions
- (Flake8Comprehensions, "00") => Rule::UnnecessaryGeneratorList,
- (Flake8Comprehensions, "01") => Rule::UnnecessaryGeneratorSet,
- (Flake8Comprehensions, "02") => Rule::UnnecessaryGeneratorDict,
- (Flake8Comprehensions, "03") => Rule::UnnecessaryListComprehensionSet,
- (Flake8Comprehensions, "04") => Rule::UnnecessaryListComprehensionDict,
- (Flake8Comprehensions, "05") => Rule::UnnecessaryLiteralSet,
- (Flake8Comprehensions, "06") => Rule::UnnecessaryLiteralDict,
- (Flake8Comprehensions, "08") => Rule::UnnecessaryCollectionCall,
- (Flake8Comprehensions, "09") => Rule::UnnecessaryLiteralWithinTupleCall,
- (Flake8Comprehensions, "10") => Rule::UnnecessaryLiteralWithinListCall,
- (Flake8Comprehensions, "11") => Rule::UnnecessaryListCall,
- (Flake8Comprehensions, "13") => Rule::UnnecessaryCallAroundSorted,
- (Flake8Comprehensions, "14") => Rule::UnnecessaryDoubleCastOrProcess,
- (Flake8Comprehensions, "15") => Rule::UnnecessarySubscriptReversal,
- (Flake8Comprehensions, "16") => Rule::UnnecessaryComprehension,
- (Flake8Comprehensions, "17") => Rule::UnnecessaryMap,
- (Flake8Comprehensions, "18") => Rule::UnnecessaryLiteralWithinDictCall,
- (Flake8Comprehensions, "19") => Rule::UnnecessaryComprehensionAnyAll,
+ (Flake8Comprehensions, "00") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryGeneratorList),
+ (Flake8Comprehensions, "01") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryGeneratorSet),
+ (Flake8Comprehensions, "02") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryGeneratorDict),
+ (Flake8Comprehensions, "03") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryListComprehensionSet),
+ (Flake8Comprehensions, "04") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryListComprehensionDict),
+ (Flake8Comprehensions, "05") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryLiteralSet),
+ (Flake8Comprehensions, "06") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryLiteralDict),
+ (Flake8Comprehensions, "08") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryCollectionCall),
+ (Flake8Comprehensions, "09") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryLiteralWithinTupleCall),
+ (Flake8Comprehensions, "10") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryLiteralWithinListCall),
+ (Flake8Comprehensions, "11") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryListCall),
+ (Flake8Comprehensions, "13") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryCallAroundSorted),
+ (Flake8Comprehensions, "14") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryDoubleCastOrProcess),
+ (Flake8Comprehensions, "15") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessarySubscriptReversal),
+ (Flake8Comprehensions, "16") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryComprehension),
+ (Flake8Comprehensions, "17") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryMap),
+ (Flake8Comprehensions, "18") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryLiteralWithinDictCall),
+ (Flake8Comprehensions, "19") => (RuleGroup::Unspecified, rules::flake8_comprehensions::rules::UnnecessaryComprehensionAnyAll),
// flake8-debugger
- (Flake8Debugger, "0") => Rule::Debugger,
+ (Flake8Debugger, "0") => (RuleGroup::Unspecified, rules::flake8_debugger::rules::Debugger),
// mccabe
- (McCabe, "1") => Rule::ComplexStructure,
+ (McCabe, "1") => (RuleGroup::Unspecified, rules::mccabe::rules::ComplexStructure),
// flake8-tidy-imports
- (Flake8TidyImports, "251") => Rule::BannedApi,
- (Flake8TidyImports, "252") => Rule::RelativeImports,
+ (Flake8TidyImports, "251") => (RuleGroup::Unspecified, rules::flake8_tidy_imports::rules::BannedApi),
+ (Flake8TidyImports, "252") => (RuleGroup::Unspecified, rules::flake8_tidy_imports::rules::RelativeImports),
// flake8-return
- (Flake8Return, "501") => Rule::UnnecessaryReturnNone,
- (Flake8Return, "502") => Rule::ImplicitReturnValue,
- (Flake8Return, "503") => Rule::ImplicitReturn,
- (Flake8Return, "504") => Rule::UnnecessaryAssign,
- (Flake8Return, "505") => Rule::SuperfluousElseReturn,
- (Flake8Return, "506") => Rule::SuperfluousElseRaise,
- (Flake8Return, "507") => Rule::SuperfluousElseContinue,
- (Flake8Return, "508") => Rule::SuperfluousElseBreak,
+ (Flake8Return, "501") => (RuleGroup::Unspecified, rules::flake8_return::rules::UnnecessaryReturnNone),
+ (Flake8Return, "502") => (RuleGroup::Unspecified, rules::flake8_return::rules::ImplicitReturnValue),
+ (Flake8Return, "503") => (RuleGroup::Unspecified, rules::flake8_return::rules::ImplicitReturn),
+ (Flake8Return, "504") => (RuleGroup::Unspecified, rules::flake8_return::rules::UnnecessaryAssign),
+ (Flake8Return, "505") => (RuleGroup::Unspecified, rules::flake8_return::rules::SuperfluousElseReturn),
+ (Flake8Return, "506") => (RuleGroup::Unspecified, rules::flake8_return::rules::SuperfluousElseRaise),
+ (Flake8Return, "507") => (RuleGroup::Unspecified, rules::flake8_return::rules::SuperfluousElseContinue),
+ (Flake8Return, "508") => (RuleGroup::Unspecified, rules::flake8_return::rules::SuperfluousElseBreak),
// flake8-gettext
- (Flake8GetText, "001") => Rule::FStringInGetTextFuncCall,
- (Flake8GetText, "002") => Rule::FormatInGetTextFuncCall,
- (Flake8GetText, "003") => Rule::PrintfInGetTextFuncCall,
+ (Flake8GetText, "001") => (RuleGroup::Unspecified, rules::flake8_gettext::rules::FStringInGetTextFuncCall),
+ (Flake8GetText, "002") => (RuleGroup::Unspecified, rules::flake8_gettext::rules::FormatInGetTextFuncCall),
+ (Flake8GetText, "003") => (RuleGroup::Unspecified, rules::flake8_gettext::rules::PrintfInGetTextFuncCall),
// flake8-implicit-str-concat
- (Flake8ImplicitStrConcat, "001") => Rule::SingleLineImplicitStringConcatenation,
- (Flake8ImplicitStrConcat, "002") => Rule::MultiLineImplicitStringConcatenation,
- (Flake8ImplicitStrConcat, "003") => Rule::ExplicitStringConcatenation,
+ (Flake8ImplicitStrConcat, "001") => (RuleGroup::Unspecified, rules::flake8_implicit_str_concat::rules::SingleLineImplicitStringConcatenation),
+ (Flake8ImplicitStrConcat, "002") => (RuleGroup::Unspecified, rules::flake8_implicit_str_concat::rules::MultiLineImplicitStringConcatenation),
+ (Flake8ImplicitStrConcat, "003") => (RuleGroup::Unspecified, rules::flake8_implicit_str_concat::rules::ExplicitStringConcatenation),
// flake8-print
- (Flake8Print, "1") => Rule::Print,
- (Flake8Print, "3") => Rule::PPrint,
+ (Flake8Print, "1") => (RuleGroup::Unspecified, rules::flake8_print::rules::Print),
+ (Flake8Print, "3") => (RuleGroup::Unspecified, rules::flake8_print::rules::PPrint),
// flake8-quotes
- (Flake8Quotes, "000") => Rule::BadQuotesInlineString,
- (Flake8Quotes, "001") => Rule::BadQuotesMultilineString,
- (Flake8Quotes, "002") => Rule::BadQuotesDocstring,
- (Flake8Quotes, "003") => Rule::AvoidableEscapedQuote,
+ (Flake8Quotes, "000") => (RuleGroup::Unspecified, rules::flake8_quotes::rules::BadQuotesInlineString),
+ (Flake8Quotes, "001") => (RuleGroup::Unspecified, rules::flake8_quotes::rules::BadQuotesMultilineString),
+ (Flake8Quotes, "002") => (RuleGroup::Unspecified, rules::flake8_quotes::rules::BadQuotesDocstring),
+ (Flake8Quotes, "003") => (RuleGroup::Unspecified, rules::flake8_quotes::rules::AvoidableEscapedQuote),
// flake8-annotations
- (Flake8Annotations, "001") => Rule::MissingTypeFunctionArgument,
- (Flake8Annotations, "002") => Rule::MissingTypeArgs,
- (Flake8Annotations, "003") => Rule::MissingTypeKwargs,
- (Flake8Annotations, "101") => Rule::MissingTypeSelf,
- (Flake8Annotations, "102") => Rule::MissingTypeCls,
- (Flake8Annotations, "201") => Rule::MissingReturnTypeUndocumentedPublicFunction,
- (Flake8Annotations, "202") => Rule::MissingReturnTypePrivateFunction,
- (Flake8Annotations, "204") => Rule::MissingReturnTypeSpecialMethod,
- (Flake8Annotations, "205") => Rule::MissingReturnTypeStaticMethod,
- (Flake8Annotations, "206") => Rule::MissingReturnTypeClassMethod,
- (Flake8Annotations, "401") => Rule::AnyType,
+ (Flake8Annotations, "001") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingTypeFunctionArgument),
+ (Flake8Annotations, "002") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingTypeArgs),
+ (Flake8Annotations, "003") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingTypeKwargs),
+ (Flake8Annotations, "101") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingTypeSelf),
+ (Flake8Annotations, "102") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingTypeCls),
+ (Flake8Annotations, "201") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingReturnTypeUndocumentedPublicFunction),
+ (Flake8Annotations, "202") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingReturnTypePrivateFunction),
+ (Flake8Annotations, "204") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingReturnTypeSpecialMethod),
+ (Flake8Annotations, "205") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingReturnTypeStaticMethod),
+ (Flake8Annotations, "206") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::MissingReturnTypeClassMethod),
+ (Flake8Annotations, "401") => (RuleGroup::Unspecified, rules::flake8_annotations::rules::AnyType),
+
+ // flake8-future-annotations
+ (Flake8FutureAnnotations, "100") => (RuleGroup::Unspecified, rules::flake8_future_annotations::rules::FutureRewritableTypeAnnotation),
+ (Flake8FutureAnnotations, "102") => (RuleGroup::Unspecified, rules::flake8_future_annotations::rules::FutureRequiredTypeAnnotation),
// flake8-2020
- (Flake82020, "101") => Rule::SysVersionSlice3,
- (Flake82020, "102") => Rule::SysVersion2,
- (Flake82020, "103") => Rule::SysVersionCmpStr3,
- (Flake82020, "201") => Rule::SysVersionInfo0Eq3,
- (Flake82020, "202") => Rule::SixPY3,
- (Flake82020, "203") => Rule::SysVersionInfo1CmpInt,
- (Flake82020, "204") => Rule::SysVersionInfoMinorCmpInt,
- (Flake82020, "301") => Rule::SysVersion0,
- (Flake82020, "302") => Rule::SysVersionCmpStr10,
- (Flake82020, "303") => Rule::SysVersionSlice1,
+ (Flake82020, "101") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersionSlice3),
+ (Flake82020, "102") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersion2),
+ (Flake82020, "103") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersionCmpStr3),
+ (Flake82020, "201") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersionInfo0Eq3),
+ (Flake82020, "202") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SixPY3),
+ (Flake82020, "203") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersionInfo1CmpInt),
+ (Flake82020, "204") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersionInfoMinorCmpInt),
+ (Flake82020, "301") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersion0),
+ (Flake82020, "302") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersionCmpStr10),
+ (Flake82020, "303") => (RuleGroup::Unspecified, rules::flake8_2020::rules::SysVersionSlice1),
// flake8-simplify
- (Flake8Simplify, "101") => Rule::DuplicateIsinstanceCall,
- (Flake8Simplify, "102") => Rule::CollapsibleIf,
- (Flake8Simplify, "103") => Rule::NeedlessBool,
- (Flake8Simplify, "105") => Rule::SuppressibleException,
- (Flake8Simplify, "107") => Rule::ReturnInTryExceptFinally,
- (Flake8Simplify, "108") => Rule::IfElseBlockInsteadOfIfExp,
- (Flake8Simplify, "109") => Rule::CompareWithTuple,
- (Flake8Simplify, "110") => Rule::ReimplementedBuiltin,
- (Flake8Simplify, "112") => Rule::UncapitalizedEnvironmentVariables,
- (Flake8Simplify, "114") => Rule::IfWithSameArms,
- (Flake8Simplify, "115") => Rule::OpenFileWithContextHandler,
- (Flake8Simplify, "116") => Rule::IfElseBlockInsteadOfDictLookup,
- (Flake8Simplify, "117") => Rule::MultipleWithStatements,
- (Flake8Simplify, "118") => Rule::InDictKeys,
- (Flake8Simplify, "201") => Rule::NegateEqualOp,
- (Flake8Simplify, "202") => Rule::NegateNotEqualOp,
- (Flake8Simplify, "208") => Rule::DoubleNegation,
- (Flake8Simplify, "210") => Rule::IfExprWithTrueFalse,
- (Flake8Simplify, "211") => Rule::IfExprWithFalseTrue,
- (Flake8Simplify, "212") => Rule::IfExprWithTwistedArms,
- (Flake8Simplify, "220") => Rule::ExprAndNotExpr,
- (Flake8Simplify, "221") => Rule::ExprOrNotExpr,
- (Flake8Simplify, "222") => Rule::ExprOrTrue,
- (Flake8Simplify, "223") => Rule::ExprAndFalse,
- (Flake8Simplify, "300") => Rule::YodaConditions,
- (Flake8Simplify, "401") => Rule::IfElseBlockInsteadOfDictGet,
- (Flake8Simplify, "910") => Rule::DictGetWithNoneDefault,
+ (Flake8Simplify, "101") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::DuplicateIsinstanceCall),
+ (Flake8Simplify, "102") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::CollapsibleIf),
+ (Flake8Simplify, "103") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::NeedlessBool),
+ (Flake8Simplify, "105") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::SuppressibleException),
+ (Flake8Simplify, "107") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::ReturnInTryExceptFinally),
+ (Flake8Simplify, "108") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::IfElseBlockInsteadOfIfExp),
+ (Flake8Simplify, "109") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::CompareWithTuple),
+ (Flake8Simplify, "110") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::ReimplementedBuiltin),
+ (Flake8Simplify, "112") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::UncapitalizedEnvironmentVariables),
+ (Flake8Simplify, "114") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::IfWithSameArms),
+ (Flake8Simplify, "115") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::OpenFileWithContextHandler),
+ (Flake8Simplify, "116") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::IfElseBlockInsteadOfDictLookup),
+ (Flake8Simplify, "117") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::MultipleWithStatements),
+ (Flake8Simplify, "118") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::InDictKeys),
+ (Flake8Simplify, "201") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::NegateEqualOp),
+ (Flake8Simplify, "202") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::NegateNotEqualOp),
+ (Flake8Simplify, "208") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::DoubleNegation),
+ (Flake8Simplify, "210") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::IfExprWithTrueFalse),
+ (Flake8Simplify, "211") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::IfExprWithFalseTrue),
+ (Flake8Simplify, "212") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::IfExprWithTwistedArms),
+ (Flake8Simplify, "220") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::ExprAndNotExpr),
+ (Flake8Simplify, "221") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::ExprOrNotExpr),
+ (Flake8Simplify, "222") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::ExprOrTrue),
+ (Flake8Simplify, "223") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::ExprAndFalse),
+ (Flake8Simplify, "300") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::YodaConditions),
+ (Flake8Simplify, "401") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::IfElseBlockInsteadOfDictGet),
+ (Flake8Simplify, "910") => (RuleGroup::Unspecified, rules::flake8_simplify::rules::DictGetWithNoneDefault),
// pyupgrade
- (Pyupgrade, "001") => Rule::UselessMetaclassType,
- (Pyupgrade, "003") => Rule::TypeOfPrimitive,
- (Pyupgrade, "004") => Rule::UselessObjectInheritance,
- (Pyupgrade, "005") => Rule::DeprecatedUnittestAlias,
- (Pyupgrade, "006") => Rule::NonPEP585Annotation,
- (Pyupgrade, "007") => Rule::NonPEP604Annotation,
- (Pyupgrade, "008") => Rule::SuperCallWithParameters,
- (Pyupgrade, "009") => Rule::UTF8EncodingDeclaration,
- (Pyupgrade, "010") => Rule::UnnecessaryFutureImport,
- (Pyupgrade, "011") => Rule::LRUCacheWithoutParameters,
- (Pyupgrade, "012") => Rule::UnnecessaryEncodeUTF8,
- (Pyupgrade, "013") => Rule::ConvertTypedDictFunctionalToClass,
- (Pyupgrade, "014") => Rule::ConvertNamedTupleFunctionalToClass,
- (Pyupgrade, "015") => Rule::RedundantOpenModes,
- (Pyupgrade, "017") => Rule::DatetimeTimezoneUTC,
- (Pyupgrade, "018") => Rule::NativeLiterals,
- (Pyupgrade, "019") => Rule::TypingTextStrAlias,
- (Pyupgrade, "020") => Rule::OpenAlias,
- (Pyupgrade, "021") => Rule::ReplaceUniversalNewlines,
- (Pyupgrade, "022") => Rule::ReplaceStdoutStderr,
- (Pyupgrade, "023") => Rule::DeprecatedCElementTree,
- (Pyupgrade, "024") => Rule::OSErrorAlias,
- (Pyupgrade, "025") => Rule::UnicodeKindPrefix,
- (Pyupgrade, "026") => Rule::DeprecatedMockImport,
- (Pyupgrade, "027") => Rule::UnpackedListComprehension,
- (Pyupgrade, "028") => Rule::YieldInForLoop,
- (Pyupgrade, "029") => Rule::UnnecessaryBuiltinImport,
- (Pyupgrade, "030") => Rule::FormatLiterals,
- (Pyupgrade, "031") => Rule::PrintfStringFormatting,
- (Pyupgrade, "032") => Rule::FString,
- (Pyupgrade, "033") => Rule::LRUCacheWithMaxsizeNone,
- (Pyupgrade, "034") => Rule::ExtraneousParentheses,
- (Pyupgrade, "035") => Rule::DeprecatedImport,
- (Pyupgrade, "036") => Rule::OutdatedVersionBlock,
- (Pyupgrade, "037") => Rule::QuotedAnnotation,
- (Pyupgrade, "038") => Rule::NonPEP604Isinstance,
+ (Pyupgrade, "001") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UselessMetaclassType),
+ (Pyupgrade, "003") => (RuleGroup::Unspecified, rules::pyupgrade::rules::TypeOfPrimitive),
+ (Pyupgrade, "004") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UselessObjectInheritance),
+ (Pyupgrade, "005") => (RuleGroup::Unspecified, rules::pyupgrade::rules::DeprecatedUnittestAlias),
+ (Pyupgrade, "006") => (RuleGroup::Unspecified, rules::pyupgrade::rules::NonPEP585Annotation),
+ (Pyupgrade, "007") => (RuleGroup::Unspecified, rules::pyupgrade::rules::NonPEP604Annotation),
+ (Pyupgrade, "008") => (RuleGroup::Unspecified, rules::pyupgrade::rules::SuperCallWithParameters),
+ (Pyupgrade, "009") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UTF8EncodingDeclaration),
+ (Pyupgrade, "010") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UnnecessaryFutureImport),
+ (Pyupgrade, "011") => (RuleGroup::Unspecified, rules::pyupgrade::rules::LRUCacheWithoutParameters),
+ (Pyupgrade, "012") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UnnecessaryEncodeUTF8),
+ (Pyupgrade, "013") => (RuleGroup::Unspecified, rules::pyupgrade::rules::ConvertTypedDictFunctionalToClass),
+ (Pyupgrade, "014") => (RuleGroup::Unspecified, rules::pyupgrade::rules::ConvertNamedTupleFunctionalToClass),
+ (Pyupgrade, "015") => (RuleGroup::Unspecified, rules::pyupgrade::rules::RedundantOpenModes),
+ (Pyupgrade, "017") => (RuleGroup::Unspecified, rules::pyupgrade::rules::DatetimeTimezoneUTC),
+ (Pyupgrade, "018") => (RuleGroup::Unspecified, rules::pyupgrade::rules::NativeLiterals),
+ (Pyupgrade, "019") => (RuleGroup::Unspecified, rules::pyupgrade::rules::TypingTextStrAlias),
+ (Pyupgrade, "020") => (RuleGroup::Unspecified, rules::pyupgrade::rules::OpenAlias),
+ (Pyupgrade, "021") => (RuleGroup::Unspecified, rules::pyupgrade::rules::ReplaceUniversalNewlines),
+ (Pyupgrade, "022") => (RuleGroup::Unspecified, rules::pyupgrade::rules::ReplaceStdoutStderr),
+ (Pyupgrade, "023") => (RuleGroup::Unspecified, rules::pyupgrade::rules::DeprecatedCElementTree),
+ (Pyupgrade, "024") => (RuleGroup::Unspecified, rules::pyupgrade::rules::OSErrorAlias),
+ (Pyupgrade, "025") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UnicodeKindPrefix),
+ (Pyupgrade, "026") => (RuleGroup::Unspecified, rules::pyupgrade::rules::DeprecatedMockImport),
+ (Pyupgrade, "027") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UnpackedListComprehension),
+ (Pyupgrade, "028") => (RuleGroup::Unspecified, rules::pyupgrade::rules::YieldInForLoop),
+ (Pyupgrade, "029") => (RuleGroup::Unspecified, rules::pyupgrade::rules::UnnecessaryBuiltinImport),
+ (Pyupgrade, "030") => (RuleGroup::Unspecified, rules::pyupgrade::rules::FormatLiterals),
+ (Pyupgrade, "031") => (RuleGroup::Unspecified, rules::pyupgrade::rules::PrintfStringFormatting),
+ (Pyupgrade, "032") => (RuleGroup::Unspecified, rules::pyupgrade::rules::FString),
+ (Pyupgrade, "033") => (RuleGroup::Unspecified, rules::pyupgrade::rules::LRUCacheWithMaxsizeNone),
+ (Pyupgrade, "034") => (RuleGroup::Unspecified, rules::pyupgrade::rules::ExtraneousParentheses),
+ (Pyupgrade, "035") => (RuleGroup::Unspecified, rules::pyupgrade::rules::DeprecatedImport),
+ (Pyupgrade, "036") => (RuleGroup::Unspecified, rules::pyupgrade::rules::OutdatedVersionBlock),
+ (Pyupgrade, "037") => (RuleGroup::Unspecified, rules::pyupgrade::rules::QuotedAnnotation),
+ (Pyupgrade, "038") => (RuleGroup::Unspecified, rules::pyupgrade::rules::NonPEP604Isinstance),
// pydocstyle
- (Pydocstyle, "100") => Rule::UndocumentedPublicModule,
- (Pydocstyle, "101") => Rule::UndocumentedPublicClass,
- (Pydocstyle, "102") => Rule::UndocumentedPublicMethod,
- (Pydocstyle, "103") => Rule::UndocumentedPublicFunction,
- (Pydocstyle, "104") => Rule::UndocumentedPublicPackage,
- (Pydocstyle, "105") => Rule::UndocumentedMagicMethod,
- (Pydocstyle, "106") => Rule::UndocumentedPublicNestedClass,
- (Pydocstyle, "107") => Rule::UndocumentedPublicInit,
- (Pydocstyle, "200") => Rule::FitsOnOneLine,
- (Pydocstyle, "201") => Rule::NoBlankLineBeforeFunction,
- (Pydocstyle, "202") => Rule::NoBlankLineAfterFunction,
- (Pydocstyle, "203") => Rule::OneBlankLineBeforeClass,
- (Pydocstyle, "204") => Rule::OneBlankLineAfterClass,
- (Pydocstyle, "205") => Rule::BlankLineAfterSummary,
- (Pydocstyle, "206") => Rule::IndentWithSpaces,
- (Pydocstyle, "207") => Rule::UnderIndentation,
- (Pydocstyle, "208") => Rule::OverIndentation,
- (Pydocstyle, "209") => Rule::NewLineAfterLastParagraph,
- (Pydocstyle, "210") => Rule::SurroundingWhitespace,
- (Pydocstyle, "211") => Rule::BlankLineBeforeClass,
- (Pydocstyle, "212") => Rule::MultiLineSummaryFirstLine,
- (Pydocstyle, "213") => Rule::MultiLineSummarySecondLine,
- (Pydocstyle, "214") => Rule::SectionNotOverIndented,
- (Pydocstyle, "215") => Rule::SectionUnderlineNotOverIndented,
- (Pydocstyle, "300") => Rule::TripleSingleQuotes,
- (Pydocstyle, "301") => Rule::EscapeSequenceInDocstring,
- (Pydocstyle, "400") => Rule::EndsInPeriod,
- (Pydocstyle, "401") => Rule::NonImperativeMood,
- (Pydocstyle, "402") => Rule::NoSignature,
- (Pydocstyle, "403") => Rule::FirstLineCapitalized,
- (Pydocstyle, "404") => Rule::DocstringStartsWithThis,
- (Pydocstyle, "405") => Rule::CapitalizeSectionName,
- (Pydocstyle, "406") => Rule::NewLineAfterSectionName,
- (Pydocstyle, "407") => Rule::DashedUnderlineAfterSection,
- (Pydocstyle, "408") => Rule::SectionUnderlineAfterName,
- (Pydocstyle, "409") => Rule::SectionUnderlineMatchesSectionLength,
- (Pydocstyle, "410") => Rule::NoBlankLineAfterSection,
- (Pydocstyle, "411") => Rule::NoBlankLineBeforeSection,
- (Pydocstyle, "412") => Rule::BlankLinesBetweenHeaderAndContent,
- (Pydocstyle, "413") => Rule::BlankLineAfterLastSection,
- (Pydocstyle, "414") => Rule::EmptyDocstringSection,
- (Pydocstyle, "415") => Rule::EndsInPunctuation,
- (Pydocstyle, "416") => Rule::SectionNameEndsInColon,
- (Pydocstyle, "417") => Rule::UndocumentedParam,
- (Pydocstyle, "418") => Rule::OverloadWithDocstring,
- (Pydocstyle, "419") => Rule::EmptyDocstring,
+ (Pydocstyle, "100") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedPublicModule),
+ (Pydocstyle, "101") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedPublicClass),
+ (Pydocstyle, "102") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedPublicMethod),
+ (Pydocstyle, "103") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedPublicFunction),
+ (Pydocstyle, "104") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedPublicPackage),
+ (Pydocstyle, "105") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedMagicMethod),
+ (Pydocstyle, "106") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedPublicNestedClass),
+ (Pydocstyle, "107") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedPublicInit),
+ (Pydocstyle, "200") => (RuleGroup::Unspecified, rules::pydocstyle::rules::FitsOnOneLine),
+ (Pydocstyle, "201") => (RuleGroup::Unspecified, rules::pydocstyle::rules::NoBlankLineBeforeFunction),
+ (Pydocstyle, "202") => (RuleGroup::Unspecified, rules::pydocstyle::rules::NoBlankLineAfterFunction),
+ (Pydocstyle, "203") => (RuleGroup::Unspecified, rules::pydocstyle::rules::OneBlankLineBeforeClass),
+ (Pydocstyle, "204") => (RuleGroup::Unspecified, rules::pydocstyle::rules::OneBlankLineAfterClass),
+ (Pydocstyle, "205") => (RuleGroup::Unspecified, rules::pydocstyle::rules::BlankLineAfterSummary),
+ (Pydocstyle, "206") => (RuleGroup::Unspecified, rules::pydocstyle::rules::IndentWithSpaces),
+ (Pydocstyle, "207") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UnderIndentation),
+ (Pydocstyle, "208") => (RuleGroup::Unspecified, rules::pydocstyle::rules::OverIndentation),
+ (Pydocstyle, "209") => (RuleGroup::Unspecified, rules::pydocstyle::rules::NewLineAfterLastParagraph),
+ (Pydocstyle, "210") => (RuleGroup::Unspecified, rules::pydocstyle::rules::SurroundingWhitespace),
+ (Pydocstyle, "211") => (RuleGroup::Unspecified, rules::pydocstyle::rules::BlankLineBeforeClass),
+ (Pydocstyle, "212") => (RuleGroup::Unspecified, rules::pydocstyle::rules::MultiLineSummaryFirstLine),
+ (Pydocstyle, "213") => (RuleGroup::Unspecified, rules::pydocstyle::rules::MultiLineSummarySecondLine),
+ (Pydocstyle, "214") => (RuleGroup::Unspecified, rules::pydocstyle::rules::SectionNotOverIndented),
+ (Pydocstyle, "215") => (RuleGroup::Unspecified, rules::pydocstyle::rules::SectionUnderlineNotOverIndented),
+ (Pydocstyle, "300") => (RuleGroup::Unspecified, rules::pydocstyle::rules::TripleSingleQuotes),
+ (Pydocstyle, "301") => (RuleGroup::Unspecified, rules::pydocstyle::rules::EscapeSequenceInDocstring),
+ (Pydocstyle, "400") => (RuleGroup::Unspecified, rules::pydocstyle::rules::EndsInPeriod),
+ (Pydocstyle, "401") => (RuleGroup::Unspecified, rules::pydocstyle::rules::NonImperativeMood),
+ (Pydocstyle, "402") => (RuleGroup::Unspecified, rules::pydocstyle::rules::NoSignature),
+ (Pydocstyle, "403") => (RuleGroup::Unspecified, rules::pydocstyle::rules::FirstLineCapitalized),
+ (Pydocstyle, "404") => (RuleGroup::Unspecified, rules::pydocstyle::rules::DocstringStartsWithThis),
+ (Pydocstyle, "405") => (RuleGroup::Unspecified, rules::pydocstyle::rules::CapitalizeSectionName),
+ (Pydocstyle, "406") => (RuleGroup::Unspecified, rules::pydocstyle::rules::NewLineAfterSectionName),
+ (Pydocstyle, "407") => (RuleGroup::Unspecified, rules::pydocstyle::rules::DashedUnderlineAfterSection),
+ (Pydocstyle, "408") => (RuleGroup::Unspecified, rules::pydocstyle::rules::SectionUnderlineAfterName),
+ (Pydocstyle, "409") => (RuleGroup::Unspecified, rules::pydocstyle::rules::SectionUnderlineMatchesSectionLength),
+ (Pydocstyle, "410") => (RuleGroup::Unspecified, rules::pydocstyle::rules::NoBlankLineAfterSection),
+ (Pydocstyle, "411") => (RuleGroup::Unspecified, rules::pydocstyle::rules::NoBlankLineBeforeSection),
+ (Pydocstyle, "412") => (RuleGroup::Unspecified, rules::pydocstyle::rules::BlankLinesBetweenHeaderAndContent),
+ (Pydocstyle, "413") => (RuleGroup::Unspecified, rules::pydocstyle::rules::BlankLineAfterLastSection),
+ (Pydocstyle, "414") => (RuleGroup::Unspecified, rules::pydocstyle::rules::EmptyDocstringSection),
+ (Pydocstyle, "415") => (RuleGroup::Unspecified, rules::pydocstyle::rules::EndsInPunctuation),
+ (Pydocstyle, "416") => (RuleGroup::Unspecified, rules::pydocstyle::rules::SectionNameEndsInColon),
+ (Pydocstyle, "417") => (RuleGroup::Unspecified, rules::pydocstyle::rules::UndocumentedParam),
+ (Pydocstyle, "418") => (RuleGroup::Unspecified, rules::pydocstyle::rules::OverloadWithDocstring),
+ (Pydocstyle, "419") => (RuleGroup::Unspecified, rules::pydocstyle::rules::EmptyDocstring),
// pep8-naming
- (PEP8Naming, "801") => Rule::InvalidClassName,
- (PEP8Naming, "802") => Rule::InvalidFunctionName,
- (PEP8Naming, "803") => Rule::InvalidArgumentName,
- (PEP8Naming, "804") => Rule::InvalidFirstArgumentNameForClassMethod,
- (PEP8Naming, "805") => Rule::InvalidFirstArgumentNameForMethod,
- (PEP8Naming, "806") => Rule::NonLowercaseVariableInFunction,
- (PEP8Naming, "807") => Rule::DunderFunctionName,
- (PEP8Naming, "811") => Rule::ConstantImportedAsNonConstant,
- (PEP8Naming, "812") => Rule::LowercaseImportedAsNonLowercase,
- (PEP8Naming, "813") => Rule::CamelcaseImportedAsLowercase,
- (PEP8Naming, "814") => Rule::CamelcaseImportedAsConstant,
- (PEP8Naming, "815") => Rule::MixedCaseVariableInClassScope,
- (PEP8Naming, "816") => Rule::MixedCaseVariableInGlobalScope,
- (PEP8Naming, "817") => Rule::CamelcaseImportedAsAcronym,
- (PEP8Naming, "818") => Rule::ErrorSuffixOnExceptionName,
- (PEP8Naming, "999") => Rule::InvalidModuleName,
+ (PEP8Naming, "801") => (RuleGroup::Unspecified, rules::pep8_naming::rules::InvalidClassName),
+ (PEP8Naming, "802") => (RuleGroup::Unspecified, rules::pep8_naming::rules::InvalidFunctionName),
+ (PEP8Naming, "803") => (RuleGroup::Unspecified, rules::pep8_naming::rules::InvalidArgumentName),
+ (PEP8Naming, "804") => (RuleGroup::Unspecified, rules::pep8_naming::rules::InvalidFirstArgumentNameForClassMethod),
+ (PEP8Naming, "805") => (RuleGroup::Unspecified, rules::pep8_naming::rules::InvalidFirstArgumentNameForMethod),
+ (PEP8Naming, "806") => (RuleGroup::Unspecified, rules::pep8_naming::rules::NonLowercaseVariableInFunction),
+ (PEP8Naming, "807") => (RuleGroup::Unspecified, rules::pep8_naming::rules::DunderFunctionName),
+ (PEP8Naming, "811") => (RuleGroup::Unspecified, rules::pep8_naming::rules::ConstantImportedAsNonConstant),
+ (PEP8Naming, "812") => (RuleGroup::Unspecified, rules::pep8_naming::rules::LowercaseImportedAsNonLowercase),
+ (PEP8Naming, "813") => (RuleGroup::Unspecified, rules::pep8_naming::rules::CamelcaseImportedAsLowercase),
+ (PEP8Naming, "814") => (RuleGroup::Unspecified, rules::pep8_naming::rules::CamelcaseImportedAsConstant),
+ (PEP8Naming, "815") => (RuleGroup::Unspecified, rules::pep8_naming::rules::MixedCaseVariableInClassScope),
+ (PEP8Naming, "816") => (RuleGroup::Unspecified, rules::pep8_naming::rules::MixedCaseVariableInGlobalScope),
+ (PEP8Naming, "817") => (RuleGroup::Unspecified, rules::pep8_naming::rules::CamelcaseImportedAsAcronym),
+ (PEP8Naming, "818") => (RuleGroup::Unspecified, rules::pep8_naming::rules::ErrorSuffixOnExceptionName),
+ (PEP8Naming, "999") => (RuleGroup::Unspecified, rules::pep8_naming::rules::InvalidModuleName),
// isort
- (Isort, "001") => Rule::UnsortedImports,
- (Isort, "002") => Rule::MissingRequiredImport,
+ (Isort, "001") => (RuleGroup::Unspecified, rules::isort::rules::UnsortedImports),
+ (Isort, "002") => (RuleGroup::Unspecified, rules::isort::rules::MissingRequiredImport),
// eradicate
- (Eradicate, "001") => Rule::CommentedOutCode,
+ (Eradicate, "001") => (RuleGroup::Unspecified, rules::eradicate::rules::CommentedOutCode),
// flake8-bandit
- (Flake8Bandit, "101") => Rule::Assert,
- (Flake8Bandit, "102") => Rule::ExecBuiltin,
- (Flake8Bandit, "103") => Rule::BadFilePermissions,
- (Flake8Bandit, "104") => Rule::HardcodedBindAllInterfaces,
- (Flake8Bandit, "105") => Rule::HardcodedPasswordString,
- (Flake8Bandit, "106") => Rule::HardcodedPasswordFuncArg,
- (Flake8Bandit, "107") => Rule::HardcodedPasswordDefault,
- (Flake8Bandit, "108") => Rule::HardcodedTempFile,
- (Flake8Bandit, "110") => Rule::TryExceptPass,
- (Flake8Bandit, "112") => Rule::TryExceptContinue,
- (Flake8Bandit, "113") => Rule::RequestWithoutTimeout,
- (Flake8Bandit, "301") => Rule::SuspiciousPickleUsage,
- (Flake8Bandit, "302") => Rule::SuspiciousMarshalUsage,
- (Flake8Bandit, "303") => Rule::SuspiciousInsecureHashUsage,
- (Flake8Bandit, "304") => Rule::SuspiciousInsecureCipherUsage,
- (Flake8Bandit, "305") => Rule::SuspiciousInsecureCipherModeUsage,
- (Flake8Bandit, "306") => Rule::SuspiciousMktempUsage,
- (Flake8Bandit, "307") => Rule::SuspiciousEvalUsage,
- (Flake8Bandit, "308") => Rule::SuspiciousMarkSafeUsage,
- (Flake8Bandit, "310") => Rule::SuspiciousURLOpenUsage,
- (Flake8Bandit, "311") => Rule::SuspiciousNonCryptographicRandomUsage,
- (Flake8Bandit, "312") => Rule::SuspiciousTelnetUsage,
- (Flake8Bandit, "313") => Rule::SuspiciousXMLCElementTreeUsage,
- (Flake8Bandit, "314") => Rule::SuspiciousXMLElementTreeUsage,
- (Flake8Bandit, "315") => Rule::SuspiciousXMLExpatReaderUsage,
- (Flake8Bandit, "316") => Rule::SuspiciousXMLExpatBuilderUsage,
- (Flake8Bandit, "317") => Rule::SuspiciousXMLSaxUsage,
- (Flake8Bandit, "318") => Rule::SuspiciousXMLMiniDOMUsage,
- (Flake8Bandit, "319") => Rule::SuspiciousXMLPullDOMUsage,
- (Flake8Bandit, "320") => Rule::SuspiciousXMLETreeUsage,
- (Flake8Bandit, "321") => Rule::SuspiciousFTPLibUsage,
- (Flake8Bandit, "323") => Rule::SuspiciousUnverifiedContextUsage,
- (Flake8Bandit, "324") => Rule::HashlibInsecureHashFunction,
- (Flake8Bandit, "501") => Rule::RequestWithNoCertValidation,
- (Flake8Bandit, "506") => Rule::UnsafeYAMLLoad,
- (Flake8Bandit, "508") => Rule::SnmpInsecureVersion,
- (Flake8Bandit, "509") => Rule::SnmpWeakCryptography,
- (Flake8Bandit, "602") => Rule::SubprocessPopenWithShellEqualsTrue,
- (Flake8Bandit, "603") => Rule::SubprocessWithoutShellEqualsTrue,
- (Flake8Bandit, "604") => Rule::CallWithShellEqualsTrue,
- (Flake8Bandit, "605") => Rule::StartProcessWithAShell,
- (Flake8Bandit, "606") => Rule::StartProcessWithNoShell,
- (Flake8Bandit, "607") => Rule::StartProcessWithPartialPath,
- (Flake8Bandit, "608") => Rule::HardcodedSQLExpression,
- (Flake8Bandit, "612") => Rule::LoggingConfigInsecureListen,
- (Flake8Bandit, "701") => Rule::Jinja2AutoescapeFalse,
+ (Flake8Bandit, "101") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::Assert),
+ (Flake8Bandit, "102") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::ExecBuiltin),
+ (Flake8Bandit, "103") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::BadFilePermissions),
+ (Flake8Bandit, "104") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::HardcodedBindAllInterfaces),
+ (Flake8Bandit, "105") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::HardcodedPasswordString),
+ (Flake8Bandit, "106") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::HardcodedPasswordFuncArg),
+ (Flake8Bandit, "107") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::HardcodedPasswordDefault),
+ (Flake8Bandit, "108") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::HardcodedTempFile),
+ (Flake8Bandit, "110") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::TryExceptPass),
+ (Flake8Bandit, "112") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::TryExceptContinue),
+ (Flake8Bandit, "113") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::RequestWithoutTimeout),
+ (Flake8Bandit, "301") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousPickleUsage),
+ (Flake8Bandit, "302") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousMarshalUsage),
+ (Flake8Bandit, "303") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousInsecureHashUsage),
+ (Flake8Bandit, "304") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousInsecureCipherUsage),
+ (Flake8Bandit, "305") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousInsecureCipherModeUsage),
+ (Flake8Bandit, "306") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousMktempUsage),
+ (Flake8Bandit, "307") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousEvalUsage),
+ (Flake8Bandit, "308") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousMarkSafeUsage),
+ (Flake8Bandit, "310") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousURLOpenUsage),
+ (Flake8Bandit, "311") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousNonCryptographicRandomUsage),
+ (Flake8Bandit, "312") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousTelnetUsage),
+ (Flake8Bandit, "313") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousXMLCElementTreeUsage),
+ (Flake8Bandit, "314") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousXMLElementTreeUsage),
+ (Flake8Bandit, "315") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousXMLExpatReaderUsage),
+ (Flake8Bandit, "316") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousXMLExpatBuilderUsage),
+ (Flake8Bandit, "317") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousXMLSaxUsage),
+ (Flake8Bandit, "318") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousXMLMiniDOMUsage),
+ (Flake8Bandit, "319") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousXMLPullDOMUsage),
+ (Flake8Bandit, "320") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousXMLETreeUsage),
+ (Flake8Bandit, "321") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousFTPLibUsage),
+ (Flake8Bandit, "323") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SuspiciousUnverifiedContextUsage),
+ (Flake8Bandit, "324") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::HashlibInsecureHashFunction),
+ (Flake8Bandit, "501") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::RequestWithNoCertValidation),
+ (Flake8Bandit, "506") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::UnsafeYAMLLoad),
+ (Flake8Bandit, "508") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SnmpInsecureVersion),
+ (Flake8Bandit, "509") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SnmpWeakCryptography),
+ (Flake8Bandit, "601") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::ParamikoCall),
+ (Flake8Bandit, "602") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SubprocessPopenWithShellEqualsTrue),
+ (Flake8Bandit, "603") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::SubprocessWithoutShellEqualsTrue),
+ (Flake8Bandit, "604") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::CallWithShellEqualsTrue),
+ (Flake8Bandit, "605") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::StartProcessWithAShell),
+ (Flake8Bandit, "606") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::StartProcessWithNoShell),
+ (Flake8Bandit, "607") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::StartProcessWithPartialPath),
+ (Flake8Bandit, "608") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::HardcodedSQLExpression),
+ (Flake8Bandit, "609") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::UnixCommandWildcardInjection),
+ (Flake8Bandit, "612") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::LoggingConfigInsecureListen),
+ (Flake8Bandit, "701") => (RuleGroup::Unspecified, rules::flake8_bandit::rules::Jinja2AutoescapeFalse),
// flake8-boolean-trap
- (Flake8BooleanTrap, "001") => Rule::BooleanPositionalArgInFunctionDefinition,
- (Flake8BooleanTrap, "002") => Rule::BooleanDefaultValueInFunctionDefinition,
- (Flake8BooleanTrap, "003") => Rule::BooleanPositionalValueInFunctionCall,
+ (Flake8BooleanTrap, "001") => (RuleGroup::Unspecified, rules::flake8_boolean_trap::rules::BooleanPositionalArgInFunctionDefinition),
+ (Flake8BooleanTrap, "002") => (RuleGroup::Unspecified, rules::flake8_boolean_trap::rules::BooleanDefaultValueInFunctionDefinition),
+ (Flake8BooleanTrap, "003") => (RuleGroup::Unspecified, rules::flake8_boolean_trap::rules::BooleanPositionalValueInFunctionCall),
// flake8-unused-arguments
- (Flake8UnusedArguments, "001") => Rule::UnusedFunctionArgument,
- (Flake8UnusedArguments, "002") => Rule::UnusedMethodArgument,
- (Flake8UnusedArguments, "003") => Rule::UnusedClassMethodArgument,
- (Flake8UnusedArguments, "004") => Rule::UnusedStaticMethodArgument,
- (Flake8UnusedArguments, "005") => Rule::UnusedLambdaArgument,
+ (Flake8UnusedArguments, "001") => (RuleGroup::Unspecified, rules::flake8_unused_arguments::rules::UnusedFunctionArgument),
+ (Flake8UnusedArguments, "002") => (RuleGroup::Unspecified, rules::flake8_unused_arguments::rules::UnusedMethodArgument),
+ (Flake8UnusedArguments, "003") => (RuleGroup::Unspecified, rules::flake8_unused_arguments::rules::UnusedClassMethodArgument),
+ (Flake8UnusedArguments, "004") => (RuleGroup::Unspecified, rules::flake8_unused_arguments::rules::UnusedStaticMethodArgument),
+ (Flake8UnusedArguments, "005") => (RuleGroup::Unspecified, rules::flake8_unused_arguments::rules::UnusedLambdaArgument),
// flake8-import-conventions
- (Flake8ImportConventions, "001") => Rule::UnconventionalImportAlias,
- (Flake8ImportConventions, "002") => Rule::BannedImportAlias,
- (Flake8ImportConventions, "003") => Rule::BannedImportFrom,
+ (Flake8ImportConventions, "001") => (RuleGroup::Unspecified, rules::flake8_import_conventions::rules::UnconventionalImportAlias),
+ (Flake8ImportConventions, "002") => (RuleGroup::Unspecified, rules::flake8_import_conventions::rules::BannedImportAlias),
+ (Flake8ImportConventions, "003") => (RuleGroup::Unspecified, rules::flake8_import_conventions::rules::BannedImportFrom),
// flake8-datetimez
- (Flake8Datetimez, "001") => Rule::CallDatetimeWithoutTzinfo,
- (Flake8Datetimez, "002") => Rule::CallDatetimeToday,
- (Flake8Datetimez, "003") => Rule::CallDatetimeUtcnow,
- (Flake8Datetimez, "004") => Rule::CallDatetimeUtcfromtimestamp,
- (Flake8Datetimez, "005") => Rule::CallDatetimeNowWithoutTzinfo,
- (Flake8Datetimez, "006") => Rule::CallDatetimeFromtimestamp,
- (Flake8Datetimez, "007") => Rule::CallDatetimeStrptimeWithoutZone,
- (Flake8Datetimez, "011") => Rule::CallDateToday,
- (Flake8Datetimez, "012") => Rule::CallDateFromtimestamp,
+ (Flake8Datetimez, "001") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDatetimeWithoutTzinfo),
+ (Flake8Datetimez, "002") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDatetimeToday),
+ (Flake8Datetimez, "003") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDatetimeUtcnow),
+ (Flake8Datetimez, "004") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDatetimeUtcfromtimestamp),
+ (Flake8Datetimez, "005") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDatetimeNowWithoutTzinfo),
+ (Flake8Datetimez, "006") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDatetimeFromtimestamp),
+ (Flake8Datetimez, "007") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDatetimeStrptimeWithoutZone),
+ (Flake8Datetimez, "011") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDateToday),
+ (Flake8Datetimez, "012") => (RuleGroup::Unspecified, rules::flake8_datetimez::rules::CallDateFromtimestamp),
// pygrep-hooks
- (PygrepHooks, "001") => Rule::Eval,
- (PygrepHooks, "002") => Rule::DeprecatedLogWarn,
- (PygrepHooks, "003") => Rule::BlanketTypeIgnore,
- (PygrepHooks, "004") => Rule::BlanketNOQA,
+ (PygrepHooks, "001") => (RuleGroup::Unspecified, rules::pygrep_hooks::rules::Eval),
+ (PygrepHooks, "002") => (RuleGroup::Unspecified, rules::pygrep_hooks::rules::DeprecatedLogWarn),
+ (PygrepHooks, "003") => (RuleGroup::Unspecified, rules::pygrep_hooks::rules::BlanketTypeIgnore),
+ (PygrepHooks, "004") => (RuleGroup::Unspecified, rules::pygrep_hooks::rules::BlanketNOQA),
+ (PygrepHooks, "005") => (RuleGroup::Unspecified, rules::pygrep_hooks::rules::InvalidMockAccess),
// pandas-vet
- (PandasVet, "002") => Rule::PandasUseOfInplaceArgument,
- (PandasVet, "003") => Rule::PandasUseOfDotIsNull,
- (PandasVet, "004") => Rule::PandasUseOfDotNotNull,
- (PandasVet, "007") => Rule::PandasUseOfDotIx,
- (PandasVet, "008") => Rule::PandasUseOfDotAt,
- (PandasVet, "009") => Rule::PandasUseOfDotIat,
- (PandasVet, "010") => Rule::PandasUseOfDotPivotOrUnstack,
- (PandasVet, "011") => Rule::PandasUseOfDotValues,
- (PandasVet, "012") => Rule::PandasUseOfDotReadTable,
- (PandasVet, "013") => Rule::PandasUseOfDotStack,
- (PandasVet, "015") => Rule::PandasUseOfPdMerge,
- (PandasVet, "901") => Rule::PandasDfVariableName,
+ (PandasVet, "002") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfInplaceArgument),
+ (PandasVet, "003") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotIsNull),
+ (PandasVet, "004") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotNotNull),
+ (PandasVet, "007") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotIx),
+ (PandasVet, "008") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotAt),
+ (PandasVet, "009") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotIat),
+ (PandasVet, "010") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotPivotOrUnstack),
+ (PandasVet, "011") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotValues),
+ (PandasVet, "012") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotReadTable),
+ (PandasVet, "013") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfDotStack),
+ (PandasVet, "015") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasUseOfPdMerge),
+ (PandasVet, "901") => (RuleGroup::Unspecified, rules::pandas_vet::rules::PandasDfVariableName),
// flake8-errmsg
- (Flake8ErrMsg, "101") => Rule::RawStringInException,
- (Flake8ErrMsg, "102") => Rule::FStringInException,
- (Flake8ErrMsg, "103") => Rule::DotFormatInException,
+ (Flake8ErrMsg, "101") => (RuleGroup::Unspecified, rules::flake8_errmsg::rules::RawStringInException),
+ (Flake8ErrMsg, "102") => (RuleGroup::Unspecified, rules::flake8_errmsg::rules::FStringInException),
+ (Flake8ErrMsg, "103") => (RuleGroup::Unspecified, rules::flake8_errmsg::rules::DotFormatInException),
// flake8-pyi
- (Flake8Pyi, "001") => Rule::UnprefixedTypeParam,
- (Flake8Pyi, "006") => Rule::BadVersionInfoComparison,
- (Flake8Pyi, "007") => Rule::UnrecognizedPlatformCheck,
- (Flake8Pyi, "008") => Rule::UnrecognizedPlatformName,
- (Flake8Pyi, "009") => Rule::PassStatementStubBody,
- (Flake8Pyi, "010") => Rule::NonEmptyStubBody,
- (Flake8Pyi, "011") => Rule::TypedArgumentDefaultInStub,
- (Flake8Pyi, "012") => Rule::PassInClassBody,
- (Flake8Pyi, "014") => Rule::ArgumentDefaultInStub,
- (Flake8Pyi, "015") => Rule::AssignmentDefaultInStub,
- (Flake8Pyi, "016") => Rule::DuplicateUnionMember,
- (Flake8Pyi, "020") => Rule::QuotedAnnotationInStub,
- (Flake8Pyi, "021") => Rule::DocstringInStub,
- (Flake8Pyi, "033") => Rule::TypeCommentInStub,
- (Flake8Pyi, "042") => Rule::SnakeCaseTypeAlias,
- (Flake8Pyi, "043") => Rule::TSuffixedTypeAlias,
+ (Flake8Pyi, "001") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnprefixedTypeParam),
+ (Flake8Pyi, "006") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::BadVersionInfoComparison),
+ (Flake8Pyi, "007") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnrecognizedPlatformCheck),
+ (Flake8Pyi, "008") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnrecognizedPlatformName),
+ (Flake8Pyi, "009") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::PassStatementStubBody),
+ (Flake8Pyi, "010") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::NonEmptyStubBody),
+ (Flake8Pyi, "011") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::TypedArgumentDefaultInStub),
+ (Flake8Pyi, "012") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::PassInClassBody),
+ (Flake8Pyi, "013") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::EllipsisInNonEmptyClassBody),
+ (Flake8Pyi, "014") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::ArgumentDefaultInStub),
+ (Flake8Pyi, "015") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::AssignmentDefaultInStub),
+ (Flake8Pyi, "016") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::DuplicateUnionMember),
+ (Flake8Pyi, "020") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::QuotedAnnotationInStub),
+ (Flake8Pyi, "021") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::DocstringInStub),
+ (Flake8Pyi, "024") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::CollectionsNamedTuple),
+ (Flake8Pyi, "025") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnaliasedCollectionsAbcSetImport),
+ (Flake8Pyi, "029") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::StrOrReprDefinedInStub),
+ (Flake8Pyi, "032") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::AnyEqNeAnnotation),
+ (Flake8Pyi, "033") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::TypeCommentInStub),
+ (Flake8Pyi, "034") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::NonSelfReturnType),
+ (Flake8Pyi, "035") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnassignedSpecialVariableInStub),
+ (Flake8Pyi, "042") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::SnakeCaseTypeAlias),
+ (Flake8Pyi, "043") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::TSuffixedTypeAlias),
+ (Flake8Pyi, "045") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::IterMethodReturnIterable),
+ (Flake8Pyi, "048") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::StubBodyMultipleStatements),
+ (Flake8Pyi, "050") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::NoReturnArgumentAnnotationInStub),
+ (Flake8Pyi, "052") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnannotatedAssignmentInStub),
+ (Flake8Pyi, "054") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::NumericLiteralTooLong),
+ (Flake8Pyi, "053") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::StringOrBytesTooLong),
// flake8-pytest-style
- (Flake8PytestStyle, "001") => Rule::PytestFixtureIncorrectParenthesesStyle,
- (Flake8PytestStyle, "002") => Rule::PytestFixturePositionalArgs,
- (Flake8PytestStyle, "003") => Rule::PytestExtraneousScopeFunction,
- (Flake8PytestStyle, "004") => Rule::PytestMissingFixtureNameUnderscore,
- (Flake8PytestStyle, "005") => Rule::PytestIncorrectFixtureNameUnderscore,
- (Flake8PytestStyle, "006") => Rule::PytestParametrizeNamesWrongType,
- (Flake8PytestStyle, "007") => Rule::PytestParametrizeValuesWrongType,
- (Flake8PytestStyle, "008") => Rule::PytestPatchWithLambda,
- (Flake8PytestStyle, "009") => Rule::PytestUnittestAssertion,
- (Flake8PytestStyle, "010") => Rule::PytestRaisesWithoutException,
- (Flake8PytestStyle, "011") => Rule::PytestRaisesTooBroad,
- (Flake8PytestStyle, "012") => Rule::PytestRaisesWithMultipleStatements,
- (Flake8PytestStyle, "013") => Rule::PytestIncorrectPytestImport,
- (Flake8PytestStyle, "015") => Rule::PytestAssertAlwaysFalse,
- (Flake8PytestStyle, "016") => Rule::PytestFailWithoutMessage,
- (Flake8PytestStyle, "017") => Rule::PytestAssertInExcept,
- (Flake8PytestStyle, "018") => Rule::PytestCompositeAssertion,
- (Flake8PytestStyle, "019") => Rule::PytestFixtureParamWithoutValue,
- (Flake8PytestStyle, "020") => Rule::PytestDeprecatedYieldFixture,
- (Flake8PytestStyle, "021") => Rule::PytestFixtureFinalizerCallback,
- (Flake8PytestStyle, "022") => Rule::PytestUselessYieldFixture,
- (Flake8PytestStyle, "023") => Rule::PytestIncorrectMarkParenthesesStyle,
- (Flake8PytestStyle, "024") => Rule::PytestUnnecessaryAsyncioMarkOnFixture,
- (Flake8PytestStyle, "025") => Rule::PytestErroneousUseFixturesOnFixture,
- (Flake8PytestStyle, "026") => Rule::PytestUseFixturesWithoutParameters,
+ (Flake8PytestStyle, "001") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestFixtureIncorrectParenthesesStyle),
+ (Flake8PytestStyle, "002") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestFixturePositionalArgs),
+ (Flake8PytestStyle, "003") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestExtraneousScopeFunction),
+ (Flake8PytestStyle, "004") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestMissingFixtureNameUnderscore),
+ (Flake8PytestStyle, "005") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestIncorrectFixtureNameUnderscore),
+ (Flake8PytestStyle, "006") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestParametrizeNamesWrongType),
+ (Flake8PytestStyle, "007") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestParametrizeValuesWrongType),
+ (Flake8PytestStyle, "008") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestPatchWithLambda),
+ (Flake8PytestStyle, "009") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestUnittestAssertion),
+ (Flake8PytestStyle, "010") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestRaisesWithoutException),
+ (Flake8PytestStyle, "011") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestRaisesTooBroad),
+ (Flake8PytestStyle, "012") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestRaisesWithMultipleStatements),
+ (Flake8PytestStyle, "013") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestIncorrectPytestImport),
+ (Flake8PytestStyle, "015") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestAssertAlwaysFalse),
+ (Flake8PytestStyle, "016") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestFailWithoutMessage),
+ (Flake8PytestStyle, "017") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestAssertInExcept),
+ (Flake8PytestStyle, "018") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestCompositeAssertion),
+ (Flake8PytestStyle, "019") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestFixtureParamWithoutValue),
+ (Flake8PytestStyle, "020") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestDeprecatedYieldFixture),
+ (Flake8PytestStyle, "021") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestFixtureFinalizerCallback),
+ (Flake8PytestStyle, "022") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestUselessYieldFixture),
+ (Flake8PytestStyle, "023") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestIncorrectMarkParenthesesStyle),
+ (Flake8PytestStyle, "024") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestUnnecessaryAsyncioMarkOnFixture),
+ (Flake8PytestStyle, "025") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestErroneousUseFixturesOnFixture),
+ (Flake8PytestStyle, "026") => (RuleGroup::Unspecified, rules::flake8_pytest_style::rules::PytestUseFixturesWithoutParameters),
// flake8-pie
- (Flake8Pie, "790") => Rule::UnnecessaryPass,
- (Flake8Pie, "794") => Rule::DuplicateClassFieldDefinition,
- (Flake8Pie, "796") => Rule::NonUniqueEnums,
- (Flake8Pie, "800") => Rule::UnnecessarySpread,
- (Flake8Pie, "804") => Rule::UnnecessaryDictKwargs,
- (Flake8Pie, "807") => Rule::ReimplementedListBuiltin,
- (Flake8Pie, "810") => Rule::MultipleStartsEndsWith,
+ (Flake8Pie, "790") => (RuleGroup::Unspecified, rules::flake8_pie::rules::UnnecessaryPass),
+ (Flake8Pie, "794") => (RuleGroup::Unspecified, rules::flake8_pie::rules::DuplicateClassFieldDefinition),
+ (Flake8Pie, "796") => (RuleGroup::Unspecified, rules::flake8_pie::rules::NonUniqueEnums),
+ (Flake8Pie, "800") => (RuleGroup::Unspecified, rules::flake8_pie::rules::UnnecessarySpread),
+ (Flake8Pie, "804") => (RuleGroup::Unspecified, rules::flake8_pie::rules::UnnecessaryDictKwargs),
+ (Flake8Pie, "807") => (RuleGroup::Unspecified, rules::flake8_pie::rules::ReimplementedListBuiltin),
+ (Flake8Pie, "810") => (RuleGroup::Unspecified, rules::flake8_pie::rules::MultipleStartsEndsWith),
// flake8-commas
- (Flake8Commas, "812") => Rule::MissingTrailingComma,
- (Flake8Commas, "818") => Rule::TrailingCommaOnBareTuple,
- (Flake8Commas, "819") => Rule::ProhibitedTrailingComma,
+ (Flake8Commas, "812") => (RuleGroup::Unspecified, rules::flake8_commas::rules::MissingTrailingComma),
+ (Flake8Commas, "818") => (RuleGroup::Unspecified, rules::flake8_commas::rules::TrailingCommaOnBareTuple),
+ (Flake8Commas, "819") => (RuleGroup::Unspecified, rules::flake8_commas::rules::ProhibitedTrailingComma),
// flake8-no-pep420
- (Flake8NoPep420, "001") => Rule::ImplicitNamespacePackage,
+ (Flake8NoPep420, "001") => (RuleGroup::Unspecified, rules::flake8_no_pep420::rules::ImplicitNamespacePackage),
// flake8-executable
- (Flake8Executable, "001") => Rule::ShebangNotExecutable,
- (Flake8Executable, "002") => Rule::ShebangMissingExecutableFile,
- (Flake8Executable, "003") => Rule::ShebangMissingPython,
- (Flake8Executable, "004") => Rule::ShebangLeadingWhitespace,
- (Flake8Executable, "005") => Rule::ShebangNotFirstLine,
+ (Flake8Executable, "001") => (RuleGroup::Unspecified, rules::flake8_executable::rules::ShebangNotExecutable),
+ (Flake8Executable, "002") => (RuleGroup::Unspecified, rules::flake8_executable::rules::ShebangMissingExecutableFile),
+ (Flake8Executable, "003") => (RuleGroup::Unspecified, rules::flake8_executable::rules::ShebangMissingPython),
+ (Flake8Executable, "004") => (RuleGroup::Unspecified, rules::flake8_executable::rules::ShebangLeadingWhitespace),
+ (Flake8Executable, "005") => (RuleGroup::Unspecified, rules::flake8_executable::rules::ShebangNotFirstLine),
// flake8-type-checking
- (Flake8TypeChecking, "001") => Rule::TypingOnlyFirstPartyImport,
- (Flake8TypeChecking, "002") => Rule::TypingOnlyThirdPartyImport,
- (Flake8TypeChecking, "003") => Rule::TypingOnlyStandardLibraryImport,
- (Flake8TypeChecking, "004") => Rule::RuntimeImportInTypeCheckingBlock,
- (Flake8TypeChecking, "005") => Rule::EmptyTypeCheckingBlock,
+ (Flake8TypeChecking, "001") => (RuleGroup::Unspecified, rules::flake8_type_checking::rules::TypingOnlyFirstPartyImport),
+ (Flake8TypeChecking, "002") => (RuleGroup::Unspecified, rules::flake8_type_checking::rules::TypingOnlyThirdPartyImport),
+ (Flake8TypeChecking, "003") => (RuleGroup::Unspecified, rules::flake8_type_checking::rules::TypingOnlyStandardLibraryImport),
+ (Flake8TypeChecking, "004") => (RuleGroup::Unspecified, rules::flake8_type_checking::rules::RuntimeImportInTypeCheckingBlock),
+ (Flake8TypeChecking, "005") => (RuleGroup::Unspecified, rules::flake8_type_checking::rules::EmptyTypeCheckingBlock),
// tryceratops
- (Tryceratops, "002") => Rule::RaiseVanillaClass,
- (Tryceratops, "003") => Rule::RaiseVanillaArgs,
- (Tryceratops, "004") => Rule::TypeCheckWithoutTypeError,
- (Tryceratops, "200") => Rule::ReraiseNoCause,
- (Tryceratops, "201") => Rule::VerboseRaise,
- (Tryceratops, "300") => Rule::TryConsiderElse,
- (Tryceratops, "301") => Rule::RaiseWithinTry,
- (Tryceratops, "400") => Rule::ErrorInsteadOfException,
- (Tryceratops, "401") => Rule::VerboseLogMessage,
+ (Tryceratops, "002") => (RuleGroup::Unspecified, rules::tryceratops::rules::RaiseVanillaClass),
+ (Tryceratops, "003") => (RuleGroup::Unspecified, rules::tryceratops::rules::RaiseVanillaArgs),
+ (Tryceratops, "004") => (RuleGroup::Unspecified, rules::tryceratops::rules::TypeCheckWithoutTypeError),
+ (Tryceratops, "200") => (RuleGroup::Unspecified, rules::tryceratops::rules::ReraiseNoCause),
+ (Tryceratops, "201") => (RuleGroup::Unspecified, rules::tryceratops::rules::VerboseRaise),
+ (Tryceratops, "300") => (RuleGroup::Unspecified, rules::tryceratops::rules::TryConsiderElse),
+ (Tryceratops, "301") => (RuleGroup::Unspecified, rules::tryceratops::rules::RaiseWithinTry),
+ (Tryceratops, "302") => (RuleGroup::Unspecified, rules::tryceratops::rules::UselessTryExcept),
+ (Tryceratops, "400") => (RuleGroup::Unspecified, rules::tryceratops::rules::ErrorInsteadOfException),
+ (Tryceratops, "401") => (RuleGroup::Unspecified, rules::tryceratops::rules::VerboseLogMessage),
// flake8-use-pathlib
- (Flake8UsePathlib, "100") => Rule::OsPathAbspath,
- (Flake8UsePathlib, "101") => Rule::OsChmod,
- (Flake8UsePathlib, "102") => Rule::OsMkdir,
- (Flake8UsePathlib, "103") => Rule::OsMakedirs,
- (Flake8UsePathlib, "104") => Rule::OsRename,
- (Flake8UsePathlib, "105") => Rule::PathlibReplace,
- (Flake8UsePathlib, "106") => Rule::OsRmdir,
- (Flake8UsePathlib, "107") => Rule::OsRemove,
- (Flake8UsePathlib, "108") => Rule::OsUnlink,
- (Flake8UsePathlib, "109") => Rule::OsGetcwd,
- (Flake8UsePathlib, "110") => Rule::OsPathExists,
- (Flake8UsePathlib, "111") => Rule::OsPathExpanduser,
- (Flake8UsePathlib, "112") => Rule::OsPathIsdir,
- (Flake8UsePathlib, "113") => Rule::OsPathIsfile,
- (Flake8UsePathlib, "114") => Rule::OsPathIslink,
- (Flake8UsePathlib, "115") => Rule::OsReadlink,
- (Flake8UsePathlib, "116") => Rule::OsStat,
- (Flake8UsePathlib, "117") => Rule::OsPathIsabs,
- (Flake8UsePathlib, "118") => Rule::OsPathJoin,
- (Flake8UsePathlib, "119") => Rule::OsPathBasename,
- (Flake8UsePathlib, "120") => Rule::OsPathDirname,
- (Flake8UsePathlib, "121") => Rule::OsPathSamefile,
- (Flake8UsePathlib, "122") => Rule::OsPathSplitext,
- (Flake8UsePathlib, "123") => Rule::BuiltinOpen,
- (Flake8UsePathlib, "124") => Rule::PyPath,
+ (Flake8UsePathlib, "100") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathAbspath),
+ (Flake8UsePathlib, "101") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsChmod),
+ (Flake8UsePathlib, "102") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsMkdir),
+ (Flake8UsePathlib, "103") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsMakedirs),
+ (Flake8UsePathlib, "104") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsRename),
+ (Flake8UsePathlib, "105") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsReplace),
+ (Flake8UsePathlib, "106") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsRmdir),
+ (Flake8UsePathlib, "107") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsRemove),
+ (Flake8UsePathlib, "108") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsUnlink),
+ (Flake8UsePathlib, "109") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsGetcwd),
+ (Flake8UsePathlib, "110") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathExists),
+ (Flake8UsePathlib, "111") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathExpanduser),
+ (Flake8UsePathlib, "112") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathIsdir),
+ (Flake8UsePathlib, "113") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathIsfile),
+ (Flake8UsePathlib, "114") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathIslink),
+ (Flake8UsePathlib, "115") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsReadlink),
+ (Flake8UsePathlib, "116") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsStat),
+ (Flake8UsePathlib, "117") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathIsabs),
+ (Flake8UsePathlib, "118") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathJoin),
+ (Flake8UsePathlib, "119") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathBasename),
+ (Flake8UsePathlib, "120") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathDirname),
+ (Flake8UsePathlib, "121") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathSamefile),
+ (Flake8UsePathlib, "122") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::OsPathSplitext),
+ (Flake8UsePathlib, "123") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::BuiltinOpen),
+ (Flake8UsePathlib, "124") => (RuleGroup::Unspecified, rules::flake8_use_pathlib::violations::PyPath),
// flake8-logging-format
- (Flake8LoggingFormat, "001") => Rule::LoggingStringFormat,
- (Flake8LoggingFormat, "002") => Rule::LoggingPercentFormat,
- (Flake8LoggingFormat, "003") => Rule::LoggingStringConcat,
- (Flake8LoggingFormat, "004") => Rule::LoggingFString,
- (Flake8LoggingFormat, "010") => Rule::LoggingWarn,
- (Flake8LoggingFormat, "101") => Rule::LoggingExtraAttrClash,
- (Flake8LoggingFormat, "201") => Rule::LoggingExcInfo,
- (Flake8LoggingFormat, "202") => Rule::LoggingRedundantExcInfo,
+ (Flake8LoggingFormat, "001") => (RuleGroup::Unspecified, rules::flake8_logging_format::violations::LoggingStringFormat),
+ (Flake8LoggingFormat, "002") => (RuleGroup::Unspecified, rules::flake8_logging_format::violations::LoggingPercentFormat),
+ (Flake8LoggingFormat, "003") => (RuleGroup::Unspecified, rules::flake8_logging_format::violations::LoggingStringConcat),
+ (Flake8LoggingFormat, "004") => (RuleGroup::Unspecified, rules::flake8_logging_format::violations::LoggingFString),
+ (Flake8LoggingFormat, "010") => (RuleGroup::Unspecified, rules::flake8_logging_format::violations::LoggingWarn),
+ (Flake8LoggingFormat, "101") => (RuleGroup::Unspecified, rules::flake8_logging_format::violations::LoggingExtraAttrClash),
+ (Flake8LoggingFormat, "201") => (RuleGroup::Unspecified, rules::flake8_logging_format::violations::LoggingExcInfo),
+ (Flake8LoggingFormat, "202") => (RuleGroup::Unspecified, rules::flake8_logging_format::violations::LoggingRedundantExcInfo),
// flake8-raise
- (Flake8Raise, "102") => Rule::UnnecessaryParenOnRaiseException,
+ (Flake8Raise, "102") => (RuleGroup::Unspecified, rules::flake8_raise::rules::UnnecessaryParenOnRaiseException),
// flake8-self
- (Flake8Self, "001") => Rule::PrivateMemberAccess,
+ (Flake8Self, "001") => (RuleGroup::Unspecified, rules::flake8_self::rules::PrivateMemberAccess),
// numpy
- (Numpy, "001") => Rule::NumpyDeprecatedTypeAlias,
- (Numpy, "002") => Rule::NumpyLegacyRandom,
+ (Numpy, "001") => (RuleGroup::Unspecified, rules::numpy::rules::NumpyDeprecatedTypeAlias),
+ (Numpy, "002") => (RuleGroup::Unspecified, rules::numpy::rules::NumpyLegacyRandom),
// ruff
- (Ruff, "001") => Rule::AmbiguousUnicodeCharacterString,
- (Ruff, "002") => Rule::AmbiguousUnicodeCharacterDocstring,
- (Ruff, "003") => Rule::AmbiguousUnicodeCharacterComment,
- (Ruff, "005") => Rule::CollectionLiteralConcatenation,
- (Ruff, "006") => Rule::AsyncioDanglingTask,
- (Ruff, "007") => Rule::PairwiseOverZipped,
- (Ruff, "008") => Rule::MutableDataclassDefault,
- (Ruff, "009") => Rule::FunctionCallInDataclassDefaultArgument,
- (Ruff, "100") => Rule::UnusedNOQA,
+ (Ruff, "001") => (RuleGroup::Unspecified, rules::ruff::rules::AmbiguousUnicodeCharacterString),
+ (Ruff, "002") => (RuleGroup::Unspecified, rules::ruff::rules::AmbiguousUnicodeCharacterDocstring),
+ (Ruff, "003") => (RuleGroup::Unspecified, rules::ruff::rules::AmbiguousUnicodeCharacterComment),
+ (Ruff, "005") => (RuleGroup::Unspecified, rules::ruff::rules::CollectionLiteralConcatenation),
+ (Ruff, "006") => (RuleGroup::Unspecified, rules::ruff::rules::AsyncioDanglingTask),
+ (Ruff, "007") => (RuleGroup::Unspecified, rules::ruff::rules::PairwiseOverZipped),
+ (Ruff, "008") => (RuleGroup::Unspecified, rules::ruff::rules::MutableDataclassDefault),
+ (Ruff, "009") => (RuleGroup::Unspecified, rules::ruff::rules::FunctionCallInDataclassDefaultArgument),
+ (Ruff, "010") => (RuleGroup::Unspecified, rules::ruff::rules::ExplicitFStringTypeConversion),
+ (Ruff, "100") => (RuleGroup::Unspecified, rules::ruff::rules::UnusedNOQA),
+ (Ruff, "200") => (RuleGroup::Unspecified, rules::ruff::rules::InvalidPyprojectToml),
// flake8-django
- (Flake8Django, "001") => Rule::DjangoNullableModelStringField,
- (Flake8Django, "003") => Rule::DjangoLocalsInRenderFunction,
- (Flake8Django, "006") => Rule::DjangoExcludeWithModelForm,
- (Flake8Django, "007") => Rule::DjangoAllWithModelForm,
- (Flake8Django, "008") => Rule::DjangoModelWithoutDunderStr,
- (Flake8Django, "012") => Rule::DjangoUnorderedBodyContentInModel,
- (Flake8Django, "013") => Rule::DjangoNonLeadingReceiverDecorator,
+ (Flake8Django, "001") => (RuleGroup::Unspecified, rules::flake8_django::rules::DjangoNullableModelStringField),
+ (Flake8Django, "003") => (RuleGroup::Unspecified, rules::flake8_django::rules::DjangoLocalsInRenderFunction),
+ (Flake8Django, "006") => (RuleGroup::Unspecified, rules::flake8_django::rules::DjangoExcludeWithModelForm),
+ (Flake8Django, "007") => (RuleGroup::Unspecified, rules::flake8_django::rules::DjangoAllWithModelForm),
+ (Flake8Django, "008") => (RuleGroup::Unspecified, rules::flake8_django::rules::DjangoModelWithoutDunderStr),
+ (Flake8Django, "012") => (RuleGroup::Unspecified, rules::flake8_django::rules::DjangoUnorderedBodyContentInModel),
+ (Flake8Django, "013") => (RuleGroup::Unspecified, rules::flake8_django::rules::DjangoNonLeadingReceiverDecorator),
+
+ // flynt
+ // Reserved: (Flynt, "001") => (RuleGroup::Unspecified, Rule: :StringConcatenationToFString),
+ (Flynt, "002") => (RuleGroup::Unspecified, rules::flynt::rules::StaticJoinToFString),
+
+ // flake8-todos
+ (Flake8Todos, "001") => (RuleGroup::Unspecified, rules::flake8_todos::rules::InvalidTodoTag),
+ (Flake8Todos, "002") => (RuleGroup::Unspecified, rules::flake8_todos::rules::MissingTodoAuthor),
+ (Flake8Todos, "003") => (RuleGroup::Unspecified, rules::flake8_todos::rules::MissingTodoLink),
+ (Flake8Todos, "004") => (RuleGroup::Unspecified, rules::flake8_todos::rules::MissingTodoColon),
+ (Flake8Todos, "005") => (RuleGroup::Unspecified, rules::flake8_todos::rules::MissingTodoDescription),
+ (Flake8Todos, "006") => (RuleGroup::Unspecified, rules::flake8_todos::rules::InvalidTodoCapitalization),
+ (Flake8Todos, "007") => (RuleGroup::Unspecified, rules::flake8_todos::rules::MissingSpaceAfterTodoColon),
+
+ // airflow
+ (Airflow, "001") => (RuleGroup::Unspecified, rules::airflow::rules::AirflowVariableNameTaskIdMismatch),
+
+ // flake8-fixme
+ (Flake8Fixme, "001") => (RuleGroup::Unspecified, rules::flake8_fixme::rules::LineContainsFixme),
+ (Flake8Fixme, "002") => (RuleGroup::Unspecified, rules::flake8_fixme::rules::LineContainsTodo),
+ (Flake8Fixme, "003") => (RuleGroup::Unspecified, rules::flake8_fixme::rules::LineContainsXxx),
+ (Flake8Fixme, "004") => (RuleGroup::Unspecified, rules::flake8_fixme::rules::LineContainsHack),
_ => return None,
})
diff --git a/crates/ruff/src/cst/helpers.rs b/crates/ruff/src/cst/helpers.rs
index 2235afeb7f39a..c2cc95f79c951 100644
--- a/crates/ruff/src/cst/helpers.rs
+++ b/crates/ruff/src/cst/helpers.rs
@@ -1,7 +1,7 @@
use libcst_native::{Expression, NameOrAttribute};
fn compose_call_path_inner<'a>(expr: &'a Expression, parts: &mut Vec<&'a str>) {
- match &expr {
+ match expr {
Expression::Call(expr) => {
compose_call_path_inner(&expr.func, parts);
}
@@ -16,7 +16,7 @@ fn compose_call_path_inner<'a>(expr: &'a Expression, parts: &mut Vec<&'a str>) {
}
}
-pub fn compose_call_path(expr: &Expression) -> Option {
+pub(crate) fn compose_call_path(expr: &Expression) -> Option {
let mut segments = vec![];
compose_call_path_inner(expr, &mut segments);
if segments.is_empty() {
@@ -26,7 +26,7 @@ pub fn compose_call_path(expr: &Expression) -> Option {
}
}
-pub fn compose_module_path(module: &NameOrAttribute) -> String {
+pub(crate) fn compose_module_path(module: &NameOrAttribute) -> String {
match module {
NameOrAttribute::N(name) => name.value.to_string(),
NameOrAttribute::A(attr) => {
diff --git a/crates/ruff/src/cst/matchers.rs b/crates/ruff/src/cst/matchers.rs
index fbebdca0a82c8..dad1e689eb142 100644
--- a/crates/ruff/src/cst/matchers.rs
+++ b/crates/ruff/src/cst/matchers.rs
@@ -1,37 +1,33 @@
use anyhow::{bail, Result};
use libcst_native::{
- Attribute, Call, Comparison, Dict, Expr, Expression, Import, ImportAlias, ImportFrom,
- ImportNames, Module, SimpleString, SmallStatement, Statement,
+ Arg, Attribute, Call, Comparison, CompoundStatement, Dict, Expression, FunctionDef,
+ GeneratorExp, If, Import, ImportAlias, ImportFrom, ImportNames, IndentedBlock, Lambda,
+ ListComp, Module, Name, SmallStatement, Statement, Suite, Tuple, With,
};
-pub fn match_module(module_text: &str) -> Result {
+pub(crate) fn match_module(module_text: &str) -> Result {
match libcst_native::parse_module(module_text, None) {
Ok(module) => Ok(module),
Err(_) => bail!("Failed to extract CST from source"),
}
}
-pub fn match_expression(expression_text: &str) -> Result {
+pub(crate) fn match_expression(expression_text: &str) -> Result {
match libcst_native::parse_expression(expression_text) {
Ok(expression) => Ok(expression),
- Err(_) => bail!("Failed to extract CST from source"),
+ Err(_) => bail!("Failed to extract expression from source"),
}
}
-pub fn match_expr<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut Expr<'b>> {
- if let Some(Statement::Simple(expr)) = module.body.first_mut() {
- if let Some(SmallStatement::Expr(expr)) = expr.body.first_mut() {
- Ok(expr)
- } else {
- bail!("Expected SmallStatement::Expr")
- }
- } else {
- bail!("Expected Statement::Simple")
+pub(crate) fn match_statement(statement_text: &str) -> Result {
+ match libcst_native::parse_statement(statement_text) {
+ Ok(statement) => Ok(statement),
+ Err(_) => bail!("Failed to extract statement from source"),
}
}
-pub fn match_import<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut Import<'b>> {
- if let Some(Statement::Simple(expr)) = module.body.first_mut() {
+pub(crate) fn match_import<'a, 'b>(statement: &'a mut Statement<'b>) -> Result<&'a mut Import<'b>> {
+ if let Statement::Simple(expr) = statement {
if let Some(SmallStatement::Import(expr)) = expr.body.first_mut() {
Ok(expr)
} else {
@@ -42,8 +38,10 @@ pub fn match_import<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut Import
}
}
-pub fn match_import_from<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut ImportFrom<'b>> {
- if let Some(Statement::Simple(expr)) = module.body.first_mut() {
+pub(crate) fn match_import_from<'a, 'b>(
+ statement: &'a mut Statement<'b>,
+) -> Result<&'a mut ImportFrom<'b>> {
+ if let Statement::Simple(expr) = statement {
if let Some(SmallStatement::ImportFrom(expr)) = expr.body.first_mut() {
Ok(expr)
} else {
@@ -54,7 +52,7 @@ pub fn match_import_from<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut I
}
}
-pub fn match_aliases<'a, 'b>(
+pub(crate) fn match_aliases<'a, 'b>(
import_from: &'a mut ImportFrom<'b>,
) -> Result<&'a mut Vec>> {
if let ImportNames::Aliases(aliases) = &mut import_from.names {
@@ -64,7 +62,7 @@ pub fn match_aliases<'a, 'b>(
}
}
-pub fn match_call<'a, 'b>(expression: &'a mut Expression<'b>) -> Result<&'a mut Call<'b>> {
+pub(crate) fn match_call<'a, 'b>(expression: &'a Expression<'b>) -> Result<&'a Call<'b>> {
if let Expression::Call(call) = expression {
Ok(call)
} else {
@@ -72,7 +70,17 @@ pub fn match_call<'a, 'b>(expression: &'a mut Expression<'b>) -> Result<&'a mut
}
}
-pub fn match_comparison<'a, 'b>(
+pub(crate) fn match_call_mut<'a, 'b>(
+ expression: &'a mut Expression<'b>,
+) -> Result<&'a mut Call<'b>> {
+ if let Expression::Call(call) = expression {
+ Ok(call)
+ } else {
+ bail!("Expected Expression::Call")
+ }
+}
+
+pub(crate) fn match_comparison<'a, 'b>(
expression: &'a mut Expression<'b>,
) -> Result<&'a mut Comparison<'b>> {
if let Expression::Comparison(comparison) = expression {
@@ -82,7 +90,7 @@ pub fn match_comparison<'a, 'b>(
}
}
-pub fn match_dict<'a, 'b>(expression: &'a mut Expression<'b>) -> Result<&'a mut Dict<'b>> {
+pub(crate) fn match_dict<'a, 'b>(expression: &'a mut Expression<'b>) -> Result<&'a mut Dict<'b>> {
if let Expression::Dict(dict) = expression {
Ok(dict)
} else {
@@ -90,7 +98,7 @@ pub fn match_dict<'a, 'b>(expression: &'a mut Expression<'b>) -> Result<&'a mut
}
}
-pub fn match_attribute<'a, 'b>(
+pub(crate) fn match_attribute<'a, 'b>(
expression: &'a mut Expression<'b>,
) -> Result<&'a mut Attribute<'b>> {
if let Expression::Attribute(attribute) = expression {
@@ -100,12 +108,100 @@ pub fn match_attribute<'a, 'b>(
}
}
-pub fn match_simple_string<'a, 'b>(
- expression: &'a mut Expression<'b>,
-) -> Result<&'a mut SimpleString<'b>> {
- if let Expression::SimpleString(simple_string) = expression {
- Ok(simple_string)
+pub(crate) fn match_name<'a, 'b>(expression: &'a Expression<'b>) -> Result<&'a Name<'b>> {
+ if let Expression::Name(name) = expression {
+ Ok(name)
+ } else {
+ bail!("Expected Expression::Name")
+ }
+}
+
+pub(crate) fn match_arg<'a, 'b>(call: &'a Call<'b>) -> Result<&'a Arg<'b>> {
+ if let Some(arg) = call.args.first() {
+ Ok(arg)
+ } else {
+ bail!("Expected Arg")
+ }
+}
+
+pub(crate) fn match_generator_exp<'a, 'b>(
+ expression: &'a Expression<'b>,
+) -> Result<&'a GeneratorExp<'b>> {
+ if let Expression::GeneratorExp(generator_exp) = expression {
+ Ok(generator_exp)
+ } else {
+ bail!("Expected Expression::GeneratorExp")
+ }
+}
+
+pub(crate) fn match_tuple<'a, 'b>(expression: &'a Expression<'b>) -> Result<&'a Tuple<'b>> {
+ if let Expression::Tuple(tuple) = expression {
+ Ok(tuple)
+ } else {
+ bail!("Expected Expression::Tuple")
+ }
+}
+
+pub(crate) fn match_list_comp<'a, 'b>(expression: &'a Expression<'b>) -> Result<&'a ListComp<'b>> {
+ if let Expression::ListComp(list_comp) = expression {
+ Ok(list_comp)
+ } else {
+ bail!("Expected Expression::ListComp")
+ }
+}
+
+pub(crate) fn match_lambda<'a, 'b>(expression: &'a Expression<'b>) -> Result<&'a Lambda<'b>> {
+ if let Expression::Lambda(lambda) = expression {
+ Ok(lambda)
+ } else {
+ bail!("Expected Expression::Lambda")
+ }
+}
+
+pub(crate) fn match_function_def<'a, 'b>(
+ statement: &'a mut Statement<'b>,
+) -> Result<&'a mut FunctionDef<'b>> {
+ if let Statement::Compound(compound) = statement {
+ if let CompoundStatement::FunctionDef(function_def) = compound {
+ Ok(function_def)
+ } else {
+ bail!("Expected CompoundStatement::FunctionDef")
+ }
+ } else {
+ bail!("Expected Statement::Compound")
+ }
+}
+
+pub(crate) fn match_indented_block<'a, 'b>(
+ suite: &'a mut Suite<'b>,
+) -> Result<&'a mut IndentedBlock<'b>> {
+ if let Suite::IndentedBlock(indented_block) = suite {
+ Ok(indented_block)
+ } else {
+ bail!("Expected Suite::IndentedBlock")
+ }
+}
+
+pub(crate) fn match_with<'a, 'b>(statement: &'a mut Statement<'b>) -> Result<&'a mut With<'b>> {
+ if let Statement::Compound(compound) = statement {
+ if let CompoundStatement::With(with) = compound {
+ Ok(with)
+ } else {
+ bail!("Expected CompoundStatement::With")
+ }
+ } else {
+ bail!("Expected Statement::Compound")
+ }
+}
+
+pub(crate) fn match_if<'a, 'b>(statement: &'a mut Statement<'b>) -> Result<&'a mut If<'b>> {
+ if let Statement::Compound(compound) = statement {
+ if let CompoundStatement::If(if_) = compound {
+ Ok(if_)
+ } else {
+ bail!("Expected CompoundStatement::If")
+ }
} else {
- bail!("Expected Expression::SimpleString")
+ bail!("Expected Statement::Compound")
}
}
diff --git a/crates/ruff/src/cst/mod.rs b/crates/ruff/src/cst/mod.rs
index d4cf6cc5d76f2..9060c74698a17 100644
--- a/crates/ruff/src/cst/mod.rs
+++ b/crates/ruff/src/cst/mod.rs
@@ -1,2 +1,2 @@
-pub mod helpers;
-pub mod matchers;
+pub(crate) mod helpers;
+pub(crate) mod matchers;
diff --git a/crates/ruff/src/directives.rs b/crates/ruff/src/directives.rs
index 77e94177b40b2..2f3c0187ac4a9 100644
--- a/crates/ruff/src/directives.rs
+++ b/crates/ruff/src/directives.rs
@@ -1,12 +1,15 @@
-//! Extract `# noqa` and `# isort: skip` directives from tokenized source.
+//! Extract `# noqa`, `# isort: skip`, and `# TODO` directives from tokenized source.
+
+use std::str::FromStr;
-use crate::noqa::NoqaMapping;
use bitflags::bitflags;
-use ruff_python_ast::source_code::{Indexer, Locator};
use ruff_text_size::{TextLen, TextRange, TextSize};
use rustpython_parser::lexer::LexResult;
use rustpython_parser::Tok;
+use ruff_python_ast::source_code::{Indexer, Locator};
+
+use crate::noqa::NoqaMapping;
use crate::settings::Settings;
bitflags! {
@@ -82,11 +85,7 @@ pub fn extract_directives(
}
/// Extract a mapping from logical line to noqa line.
-pub fn extract_noqa_line_for(
- lxr: &[LexResult],
- locator: &Locator,
- indexer: &Indexer,
-) -> NoqaMapping {
+fn extract_noqa_line_for(lxr: &[LexResult], locator: &Locator, indexer: &Indexer) -> NoqaMapping {
let mut string_mappings = Vec::new();
for (tok, range) in lxr.iter().flatten() {
@@ -102,7 +101,10 @@ pub fn extract_noqa_line_for(
..
} => {
if locator.contains_line_break(*range) {
- string_mappings.push(*range);
+ string_mappings.push(TextRange::new(
+ locator.line_start(range.start()),
+ range.end(),
+ ));
}
}
@@ -162,7 +164,7 @@ pub fn extract_noqa_line_for(
}
/// Extract a set of ranges over which to disable isort.
-pub fn extract_isort_directives(lxr: &[LexResult], locator: &Locator) -> IsortDirectives {
+fn extract_isort_directives(lxr: &[LexResult], locator: &Locator) -> IsortDirectives {
let mut exclusions: Vec = Vec::default();
let mut splits: Vec = Vec::default();
let mut off: Option = None;
@@ -217,14 +219,144 @@ pub fn extract_isort_directives(lxr: &[LexResult], locator: &Locator) -> IsortDi
}
}
+/// A comment that contains a [`TodoDirective`]
+pub(crate) struct TodoComment<'a> {
+ /// The comment's text
+ pub(crate) content: &'a str,
+ /// The directive found within the comment.
+ pub(crate) directive: TodoDirective<'a>,
+ /// The comment's actual [`TextRange`].
+ pub(crate) range: TextRange,
+ /// The comment range's position in [`Indexer`].comment_ranges()
+ pub(crate) range_index: usize,
+}
+
+impl<'a> TodoComment<'a> {
+ /// Attempt to transform a normal comment into a [`TodoComment`].
+ pub(crate) fn from_comment(
+ content: &'a str,
+ range: TextRange,
+ range_index: usize,
+ ) -> Option {
+ TodoDirective::from_comment(content, range).map(|directive| Self {
+ content,
+ directive,
+ range,
+ range_index,
+ })
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub(crate) struct TodoDirective<'a> {
+ /// The actual directive
+ pub(crate) content: &'a str,
+ /// The directive's [`TextRange`] in the file.
+ pub(crate) range: TextRange,
+ /// The directive's kind: HACK, XXX, FIXME, or TODO.
+ pub(crate) kind: TodoDirectiveKind,
+}
+
+impl<'a> TodoDirective<'a> {
+ /// Extract a [`TodoDirective`] from a comment.
+ pub(crate) fn from_comment(comment: &'a str, comment_range: TextRange) -> Option {
+ // The directive's offset from the start of the comment.
+ let mut relative_offset = TextSize::new(0);
+ let mut subset_opt = Some(comment);
+
+ // Loop over `#`-delimited sections of the comment to check for directives. This will
+ // correctly handle cases like `# foo # TODO`.
+ while let Some(subset) = subset_opt {
+ let trimmed = subset.trim_start_matches('#').trim_start();
+
+ let offset = subset.text_len() - trimmed.text_len();
+ relative_offset += offset;
+
+ // If we detect a TodoDirectiveKind variant substring in the comment, construct and
+ // return the appropriate TodoDirective
+ if let Ok(directive_kind) = trimmed.parse::() {
+ let len = directive_kind.len();
+
+ return Some(Self {
+ content: &comment[TextRange::at(relative_offset, len)],
+ range: TextRange::at(comment_range.start() + relative_offset, len),
+ kind: directive_kind,
+ });
+ }
+
+ // Shrink the subset to check for the next phrase starting with "#".
+ subset_opt = if let Some(new_offset) = trimmed.find('#') {
+ relative_offset += TextSize::try_from(new_offset).unwrap();
+ subset.get(relative_offset.to_usize()..)
+ } else {
+ None
+ };
+ }
+
+ None
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub(crate) enum TodoDirectiveKind {
+ Todo,
+ Fixme,
+ Xxx,
+ Hack,
+}
+
+impl FromStr for TodoDirectiveKind {
+ type Err = ();
+
+ fn from_str(s: &str) -> Result {
+ // The lengths of the respective variant strings: TODO, FIXME, HACK, XXX
+ for length in [3, 4, 5] {
+ let Some(substr) = s.get(..length) else {
+ break;
+ };
+
+ match substr.to_lowercase().as_str() {
+ "fixme" => {
+ return Ok(TodoDirectiveKind::Fixme);
+ }
+ "hack" => {
+ return Ok(TodoDirectiveKind::Hack);
+ }
+ "todo" => {
+ return Ok(TodoDirectiveKind::Todo);
+ }
+ "xxx" => {
+ return Ok(TodoDirectiveKind::Xxx);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(())
+ }
+}
+
+impl TodoDirectiveKind {
+ fn len(&self) -> TextSize {
+ match self {
+ TodoDirectiveKind::Xxx => TextSize::new(3),
+ TodoDirectiveKind::Hack | TodoDirectiveKind::Todo => TextSize::new(4),
+ TodoDirectiveKind::Fixme => TextSize::new(5),
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
- use ruff_python_ast::source_code::{Indexer, Locator};
use ruff_text_size::{TextLen, TextRange, TextSize};
use rustpython_parser::lexer::LexResult;
use rustpython_parser::{lexer, Mode};
- use crate::directives::{extract_isort_directives, extract_noqa_line_for};
+ use ruff_python_ast::source_code::{Indexer, Locator};
+
+ use crate::directives::{
+ extract_isort_directives, extract_noqa_line_for, TodoDirective, TodoDirectiveKind,
+ };
use crate::noqa::NoqaMapping;
fn noqa_mappings(contents: &str) -> NoqaMapping {
@@ -271,7 +403,7 @@ y = 2
z = x + 1";
assert_eq!(
noqa_mappings(contents),
- NoqaMapping::from_iter([TextRange::new(TextSize::from(4), TextSize::from(22)),])
+ NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(22)),])
);
let contents = "x = 1
@@ -282,7 +414,7 @@ ghi
z = 2";
assert_eq!(
noqa_mappings(contents),
- NoqaMapping::from_iter([TextRange::new(TextSize::from(10), TextSize::from(28))])
+ NoqaMapping::from_iter([TextRange::new(TextSize::from(6), TextSize::from(28))])
);
let contents = "x = 1
@@ -292,7 +424,7 @@ ghi
'''";
assert_eq!(
noqa_mappings(contents),
- NoqaMapping::from_iter([TextRange::new(TextSize::from(10), TextSize::from(28))])
+ NoqaMapping::from_iter([TextRange::new(TextSize::from(6), TextSize::from(28))])
);
let contents = r#"x = \
@@ -427,4 +559,62 @@ z = x + 1";
vec![TextSize::from(13)]
);
}
+
+ #[test]
+ fn todo_directives() {
+ let test_comment = "# TODO: todo tag";
+ let test_comment_range = TextRange::at(TextSize::new(0), test_comment.text_len());
+ let expected = TodoDirective {
+ content: "TODO",
+ range: TextRange::new(TextSize::new(2), TextSize::new(6)),
+ kind: TodoDirectiveKind::Todo,
+ };
+ assert_eq!(
+ expected,
+ TodoDirective::from_comment(test_comment, test_comment_range).unwrap()
+ );
+
+ let test_comment = "#TODO: todo tag";
+ let test_comment_range = TextRange::at(TextSize::new(0), test_comment.text_len());
+ let expected = TodoDirective {
+ content: "TODO",
+ range: TextRange::new(TextSize::new(1), TextSize::new(5)),
+ kind: TodoDirectiveKind::Todo,
+ };
+ assert_eq!(
+ expected,
+ TodoDirective::from_comment(test_comment, test_comment_range).unwrap()
+ );
+
+ let test_comment = "# fixme: fixme tag";
+ let test_comment_range = TextRange::at(TextSize::new(0), test_comment.text_len());
+ let expected = TodoDirective {
+ content: "fixme",
+ range: TextRange::new(TextSize::new(2), TextSize::new(7)),
+ kind: TodoDirectiveKind::Fixme,
+ };
+ assert_eq!(
+ expected,
+ TodoDirective::from_comment(test_comment, test_comment_range).unwrap()
+ );
+
+ let test_comment = "# noqa # TODO: todo";
+ let test_comment_range = TextRange::at(TextSize::new(0), test_comment.text_len());
+ let expected = TodoDirective {
+ content: "TODO",
+ range: TextRange::new(TextSize::new(9), TextSize::new(13)),
+ kind: TodoDirectiveKind::Todo,
+ };
+ assert_eq!(
+ expected,
+ TodoDirective::from_comment(test_comment, test_comment_range).unwrap()
+ );
+
+ let test_comment = "# no directive";
+ let test_comment_range = TextRange::at(TextSize::new(0), test_comment.text_len());
+ assert_eq!(
+ None,
+ TodoDirective::from_comment(test_comment, test_comment_range)
+ );
+ }
}
diff --git a/crates/ruff/src/doc_lines.rs b/crates/ruff/src/doc_lines.rs
index 7cb8de1ff6972..3e2de44920b2a 100644
--- a/crates/ruff/src/doc_lines.rs
+++ b/crates/ruff/src/doc_lines.rs
@@ -1,33 +1,31 @@
//! Doc line extraction. In this context, a doc line is a line consisting of a
//! standalone comment or a constant string statement.
-use ruff_text_size::{TextRange, TextSize};
use std::iter::FusedIterator;
-use ruff_python_ast::source_code::Locator;
-use rustpython_parser::ast::{Constant, ExprKind, Stmt, StmtKind, Suite};
+use ruff_text_size::TextSize;
+use rustpython_parser::ast::{self, Constant, Expr, Ranged, Stmt, Suite};
use rustpython_parser::lexer::LexResult;
use rustpython_parser::Tok;
-use ruff_python_ast::visitor;
-use ruff_python_ast::visitor::Visitor;
+use ruff_newlines::UniversalNewlineIterator;
+use ruff_python_ast::source_code::Locator;
+use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
/// Extract doc lines (standalone comments) from a token sequence.
-pub fn doc_lines_from_tokens<'a>(lxr: &'a [LexResult], locator: &'a Locator<'a>) -> DocLines<'a> {
- DocLines::new(lxr, locator)
+pub(crate) fn doc_lines_from_tokens(lxr: &[LexResult]) -> DocLines {
+ DocLines::new(lxr)
}
-pub struct DocLines<'a> {
+pub(crate) struct DocLines<'a> {
inner: std::iter::Flatten>,
- locator: &'a Locator<'a>,
prev: TextSize,
}
impl<'a> DocLines<'a> {
- fn new(lxr: &'a [LexResult], locator: &'a Locator) -> Self {
+ fn new(lxr: &'a [LexResult]) -> Self {
Self {
inner: lxr.iter().flatten(),
- locator,
prev: TextSize::default(),
}
}
@@ -43,15 +41,11 @@ impl Iterator for DocLines<'_> {
match tok {
Tok::Comment(..) => {
- if at_start_of_line
- || self
- .locator
- .contains_line_break(TextRange::new(self.prev, range.start()))
- {
+ if at_start_of_line {
break Some(range.start());
}
}
- Tok::Newline => {
+ Tok::Newline | Tok::NonLogicalNewline => {
at_start_of_line = true;
}
Tok::Indent | Tok::Dedent => {
@@ -69,29 +63,43 @@ impl Iterator for DocLines<'_> {
impl FusedIterator for DocLines<'_> {}
-#[derive(Default)]
-struct StringLinesVisitor {
+struct StringLinesVisitor<'a> {
string_lines: Vec,
+ locator: &'a Locator<'a>,
}
-impl Visitor<'_> for StringLinesVisitor {
+impl StatementVisitor<'_> for StringLinesVisitor<'_> {
fn visit_stmt(&mut self, stmt: &Stmt) {
- if let StmtKind::Expr { value } = &stmt.node {
- if let ExprKind::Constant {
+ if let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt {
+ if let Expr::Constant(ast::ExprConstant {
value: Constant::Str(..),
..
- } = &value.node
+ }) = value.as_ref()
{
- self.string_lines.push(value.start());
+ for line in UniversalNewlineIterator::with_offset(
+ self.locator.slice(value.range()),
+ value.start(),
+ ) {
+ self.string_lines.push(line.start());
+ }
}
}
- visitor::walk_stmt(self, stmt);
+ walk_stmt(self, stmt);
+ }
+}
+
+impl<'a> StringLinesVisitor<'a> {
+ fn new(locator: &'a Locator<'a>) -> Self {
+ Self {
+ string_lines: Vec::new(),
+ locator,
+ }
}
}
/// Extract doc lines (standalone strings) start positions from an AST.
-pub fn doc_lines_from_ast(python_ast: &Suite) -> Vec {
- let mut visitor = StringLinesVisitor::default();
+pub(crate) fn doc_lines_from_ast(python_ast: &Suite, locator: &Locator) -> Vec {
+ let mut visitor = StringLinesVisitor::new(locator);
visitor.visit_body(python_ast);
visitor.string_lines
}
diff --git a/crates/ruff/src/docstrings/definition.rs b/crates/ruff/src/docstrings/definition.rs
deleted file mode 100644
index cf338a364adb4..0000000000000
--- a/crates/ruff/src/docstrings/definition.rs
+++ /dev/null
@@ -1,140 +0,0 @@
-use ruff_text_size::{TextRange, TextSize};
-use rustpython_parser::ast::{Expr, Stmt};
-use std::fmt::{Debug, Formatter};
-use std::ops::Deref;
-
-use ruff_python_semantic::analyze::visibility::{
- class_visibility, function_visibility, method_visibility, Modifier, Visibility, VisibleScope,
-};
-
-#[derive(Debug, Clone)]
-pub enum DefinitionKind<'a> {
- Module,
- Package,
- Class(&'a Stmt),
- NestedClass(&'a Stmt),
- Function(&'a Stmt),
- NestedFunction(&'a Stmt),
- Method(&'a Stmt),
-}
-
-#[derive(Debug)]
-pub struct Definition<'a> {
- pub kind: DefinitionKind<'a>,
- pub docstring: Option<&'a Expr>,
-}
-
-#[derive(Debug)]
-pub struct Docstring<'a> {
- pub kind: DefinitionKind<'a>,
- pub expr: &'a Expr,
- /// The content of the docstring, including the leading and trailing quotes.
- pub contents: &'a str,
-
- /// The range of the docstring body (without the quotes). The range is relative to [`Self::contents`].
- pub body_range: TextRange,
- pub indentation: &'a str,
-}
-
-impl<'a> Docstring<'a> {
- pub fn body(&self) -> DocstringBody {
- DocstringBody { docstring: self }
- }
-
- pub const fn start(&self) -> TextSize {
- self.expr.start()
- }
-
- pub const fn end(&self) -> TextSize {
- self.expr.end()
- }
-
- pub const fn range(&self) -> TextRange {
- self.expr.range()
- }
-
- pub fn leading_quote(&self) -> &'a str {
- &self.contents[TextRange::up_to(self.body_range.start())]
- }
-}
-
-#[derive(Copy, Clone)]
-pub struct DocstringBody<'a> {
- docstring: &'a Docstring<'a>,
-}
-
-impl<'a> DocstringBody<'a> {
- #[inline]
- pub fn start(self) -> TextSize {
- self.range().start()
- }
-
- #[inline]
- pub fn end(self) -> TextSize {
- self.range().end()
- }
-
- pub fn range(self) -> TextRange {
- self.docstring.body_range + self.docstring.start()
- }
-
- pub fn as_str(self) -> &'a str {
- &self.docstring.contents[self.docstring.body_range]
- }
-}
-
-impl Deref for DocstringBody<'_> {
- type Target = str;
-
- fn deref(&self) -> &Self::Target {
- self.as_str()
- }
-}
-
-impl Debug for DocstringBody<'_> {
- fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
- f.debug_struct("DocstringBody")
- .field("text", &self.as_str())
- .field("range", &self.range())
- .finish()
- }
-}
-
-#[derive(Copy, Clone)]
-pub enum Documentable {
- Class,
- Function,
-}
-
-pub fn transition_scope(scope: VisibleScope, stmt: &Stmt, kind: Documentable) -> VisibleScope {
- match kind {
- Documentable::Function => VisibleScope {
- modifier: Modifier::Function,
- visibility: match scope {
- VisibleScope {
- modifier: Modifier::Module,
- visibility: Visibility::Public,
- } => function_visibility(stmt),
- VisibleScope {
- modifier: Modifier::Class,
- visibility: Visibility::Public,
- } => method_visibility(stmt),
- _ => Visibility::Private,
- },
- },
- Documentable::Class => VisibleScope {
- modifier: Modifier::Class,
- visibility: match scope {
- VisibleScope {
- modifier: Modifier::Module,
- visibility: Visibility::Public,
- } => class_visibility(stmt),
- VisibleScope {
- modifier: Modifier::Class,
- visibility: Visibility::Public,
- } => class_visibility(stmt),
- _ => Visibility::Private,
- },
- },
- }
-}
diff --git a/crates/ruff/src/docstrings/extraction.rs b/crates/ruff/src/docstrings/extraction.rs
index 7fbe79a2d1a95..aa64249425074 100644
--- a/crates/ruff/src/docstrings/extraction.rs
+++ b/crates/ruff/src/docstrings/extraction.rs
@@ -1,84 +1,90 @@
//! Extract docstrings from an AST.
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
+use rustpython_parser::ast::{self, Constant, Expr, Stmt};
-use ruff_python_semantic::analyze::visibility;
-
-use crate::docstrings::definition::{Definition, DefinitionKind, Documentable};
+use ruff_python_semantic::definition::{Definition, DefinitionId, Definitions, Member, MemberKind};
/// Extract a docstring from a function or class body.
-pub fn docstring_from(suite: &[Stmt]) -> Option<&Expr> {
+pub(crate) fn docstring_from(suite: &[Stmt]) -> Option<&Expr> {
let stmt = suite.first()?;
// Require the docstring to be a standalone expression.
- let StmtKind::Expr { value } = &stmt.node else {
+ let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt else {
return None;
};
// Only match strings.
if !matches!(
- &value.node,
- ExprKind::Constant {
+ value.as_ref(),
+ Expr::Constant(ast::ExprConstant {
value: Constant::Str(_),
..
- }
+ })
) {
return None;
}
Some(value)
}
+/// Extract a docstring from a `Definition`.
+pub(crate) fn extract_docstring<'a>(definition: &'a Definition<'a>) -> Option<&'a Expr> {
+ match definition {
+ Definition::Module(module) => docstring_from(module.python_ast),
+ Definition::Member(member) => {
+ if let Stmt::ClassDef(ast::StmtClassDef { body, .. })
+ | Stmt::FunctionDef(ast::StmtFunctionDef { body, .. })
+ | Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef { body, .. }) = &member.stmt
+ {
+ docstring_from(body)
+ } else {
+ None
+ }
+ }
+ }
+}
+
+#[derive(Copy, Clone)]
+pub(crate) enum ExtractionTarget {
+ Class,
+ Function,
+}
+
/// Extract a `Definition` from the AST node defined by a `Stmt`.
-pub fn extract<'a>(
- scope: visibility::VisibleScope,
+pub(crate) fn extract_definition<'a>(
+ target: ExtractionTarget,
stmt: &'a Stmt,
- body: &'a [Stmt],
- kind: Documentable,
-) -> Definition<'a> {
- let expr = docstring_from(body);
- match kind {
- Documentable::Function => match scope {
- visibility::VisibleScope {
- modifier: visibility::Modifier::Module,
- ..
- } => Definition {
- kind: DefinitionKind::Function(stmt),
- docstring: expr,
+ parent: DefinitionId,
+ definitions: &Definitions<'a>,
+) -> Member<'a> {
+ match target {
+ ExtractionTarget::Function => match &definitions[parent] {
+ Definition::Module(..) => Member {
+ parent,
+ kind: MemberKind::Function,
+ stmt,
},
- visibility::VisibleScope {
- modifier: visibility::Modifier::Class,
+ Definition::Member(Member {
+ kind: MemberKind::Class | MemberKind::NestedClass,
..
- } => Definition {
- kind: DefinitionKind::Method(stmt),
- docstring: expr,
+ }) => Member {
+ parent,
+ kind: MemberKind::Method,
+ stmt,
},
- visibility::VisibleScope {
- modifier: visibility::Modifier::Function,
- ..
- } => Definition {
- kind: DefinitionKind::NestedFunction(stmt),
- docstring: expr,
+ Definition::Member(..) => Member {
+ parent,
+ kind: MemberKind::NestedFunction,
+ stmt,
},
},
- Documentable::Class => match scope {
- visibility::VisibleScope {
- modifier: visibility::Modifier::Module,
- ..
- } => Definition {
- kind: DefinitionKind::Class(stmt),
- docstring: expr,
+ ExtractionTarget::Class => match &definitions[parent] {
+ Definition::Module(..) => Member {
+ parent,
+ kind: MemberKind::Class,
+ stmt,
},
- visibility::VisibleScope {
- modifier: visibility::Modifier::Class,
- ..
- } => Definition {
- kind: DefinitionKind::NestedClass(stmt),
- docstring: expr,
- },
- visibility::VisibleScope {
- modifier: visibility::Modifier::Function,
- ..
- } => Definition {
- kind: DefinitionKind::NestedClass(stmt),
- docstring: expr,
+ Definition::Member(..) => Member {
+ parent,
+ kind: MemberKind::NestedClass,
+ stmt,
},
},
}
diff --git a/crates/ruff/src/docstrings/mod.rs b/crates/ruff/src/docstrings/mod.rs
index 468f9f460845e..b9df42d41598b 100644
--- a/crates/ruff/src/docstrings/mod.rs
+++ b/crates/ruff/src/docstrings/mod.rs
@@ -1,6 +1,84 @@
-pub mod definition;
-pub mod extraction;
-pub mod google;
-pub mod numpy;
-pub mod sections;
-pub mod styles;
+use std::fmt::{Debug, Formatter};
+use std::ops::Deref;
+
+use ruff_text_size::{TextRange, TextSize};
+use rustpython_parser::ast::{Expr, Ranged};
+
+use ruff_python_semantic::definition::Definition;
+
+pub(crate) mod extraction;
+pub(crate) mod google;
+pub(crate) mod numpy;
+pub(crate) mod sections;
+pub(crate) mod styles;
+
+#[derive(Debug)]
+pub(crate) struct Docstring<'a> {
+ pub(crate) definition: &'a Definition<'a>,
+ pub(crate) expr: &'a Expr,
+ /// The content of the docstring, including the leading and trailing quotes.
+ pub(crate) contents: &'a str,
+
+ /// The range of the docstring body (without the quotes). The range is relative to [`Self::contents`].
+ pub(crate) body_range: TextRange,
+ pub(crate) indentation: &'a str,
+}
+
+impl<'a> Docstring<'a> {
+ pub(crate) fn body(&self) -> DocstringBody {
+ DocstringBody { docstring: self }
+ }
+
+ pub(crate) fn start(&self) -> TextSize {
+ self.expr.start()
+ }
+
+ pub(crate) fn end(&self) -> TextSize {
+ self.expr.end()
+ }
+
+ pub(crate) fn range(&self) -> TextRange {
+ self.expr.range()
+ }
+
+ pub(crate) fn leading_quote(&self) -> &'a str {
+ &self.contents[TextRange::up_to(self.body_range.start())]
+ }
+}
+
+#[derive(Copy, Clone)]
+pub(crate) struct DocstringBody<'a> {
+ docstring: &'a Docstring<'a>,
+}
+
+impl<'a> DocstringBody<'a> {
+ #[inline]
+ pub(crate) fn start(self) -> TextSize {
+ self.range().start()
+ }
+
+ pub(crate) fn range(self) -> TextRange {
+ self.docstring.body_range + self.docstring.start()
+ }
+
+ pub(crate) fn as_str(self) -> &'a str {
+ &self.docstring.contents[self.docstring.body_range]
+ }
+}
+
+impl Deref for DocstringBody<'_> {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ self.as_str()
+ }
+}
+
+impl Debug for DocstringBody<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("DocstringBody")
+ .field("text", &self.as_str())
+ .field("range", &self.range())
+ .finish()
+ }
+}
diff --git a/crates/ruff/src/docstrings/sections.rs b/crates/ruff/src/docstrings/sections.rs
index 1685c7e76181b..f1b4ced28661e 100644
--- a/crates/ruff/src/docstrings/sections.rs
+++ b/crates/ruff/src/docstrings/sections.rs
@@ -1,15 +1,17 @@
-use ruff_python_ast::newlines::{StrExt, UniversalNewlineIterator};
-use ruff_text_size::{TextLen, TextRange, TextSize};
use std::fmt::{Debug, Formatter};
use std::iter::FusedIterator;
+
+use ruff_text_size::{TextLen, TextRange, TextSize};
use strum_macros::EnumIter;
-use crate::docstrings::definition::{Docstring, DocstringBody};
-use crate::docstrings::styles::SectionStyle;
+use ruff_newlines::{StrExt, UniversalNewlineIterator};
use ruff_python_ast::whitespace;
+use crate::docstrings::styles::SectionStyle;
+use crate::docstrings::{Docstring, DocstringBody};
+
#[derive(EnumIter, PartialEq, Eq, Debug, Clone, Copy)]
-pub enum SectionKind {
+pub(crate) enum SectionKind {
Args,
Arguments,
Attention,
@@ -48,7 +50,7 @@ pub enum SectionKind {
}
impl SectionKind {
- pub fn from_str(s: &str) -> Option {
+ pub(crate) fn from_str(s: &str) -> Option {
match s.to_ascii_lowercase().as_str() {
"args" => Some(Self::Args),
"arguments" => Some(Self::Arguments),
@@ -89,7 +91,7 @@ impl SectionKind {
}
}
- pub fn as_str(self) -> &'static str {
+ pub(crate) fn as_str(self) -> &'static str {
match self {
Self::Args => "Args",
Self::Arguments => "Arguments",
@@ -137,7 +139,7 @@ pub(crate) struct SectionContexts<'a> {
impl<'a> SectionContexts<'a> {
/// Extract all `SectionContext` values from a docstring.
- pub fn from_docstring(docstring: &'a Docstring<'a>, style: SectionStyle) -> Self {
+ pub(crate) fn from_docstring(docstring: &'a Docstring<'a>, style: SectionStyle) -> Self {
let contents = docstring.body();
let mut contexts = Vec::new();
@@ -190,11 +192,11 @@ impl<'a> SectionContexts<'a> {
}
}
- pub fn len(&self) -> usize {
+ pub(crate) fn len(&self) -> usize {
self.contexts.len()
}
- pub fn iter(&self) -> SectionContextsIter {
+ pub(crate) fn iter(&self) -> SectionContextsIter {
SectionContextsIter {
docstring_body: self.docstring.body(),
inner: self.contexts.iter(),
@@ -217,7 +219,7 @@ impl Debug for SectionContexts<'_> {
}
}
-pub struct SectionContextsIter<'a> {
+pub(crate) struct SectionContextsIter<'a> {
docstring_body: DocstringBody<'a>,
inner: std::slice::Iter<'a, SectionContextData>,
}
@@ -266,28 +268,24 @@ struct SectionContextData {
summary_full_end: TextSize,
}
-pub struct SectionContext<'a> {
+pub(crate) struct SectionContext<'a> {
data: &'a SectionContextData,
docstring_body: DocstringBody<'a>,
}
impl<'a> SectionContext<'a> {
- pub fn is_last(&self) -> bool {
- self.range().end() == self.docstring_body.end()
- }
-
/// The `kind` of the section, e.g. [`SectionKind::Args`] or [`SectionKind::Returns`].
- pub const fn kind(&self) -> SectionKind {
+ pub(crate) const fn kind(&self) -> SectionKind {
self.data.kind
}
/// The name of the section as it appears in the docstring, e.g. "Args" or "Returns".
- pub fn section_name(&self) -> &'a str {
+ pub(crate) fn section_name(&self) -> &'a str {
&self.docstring_body.as_str()[self.data.name_range]
}
/// Returns the rest of the summary line after the section name.
- pub fn summary_after_section_name(&self) -> &'a str {
+ pub(crate) fn summary_after_section_name(&self) -> &'a str {
&self.summary_line()[usize::from(self.data.name_range.end() - self.data.range.start())..]
}
@@ -296,17 +294,12 @@ impl<'a> SectionContext<'a> {
}
/// The absolute range of the section name
- pub fn section_name_range(&self) -> TextRange {
+ pub(crate) fn section_name_range(&self) -> TextRange {
self.data.name_range + self.offset()
}
- /// Summary range relative to the start of the document. Includes the trailing newline.
- pub fn summary_full_range(&self) -> TextRange {
- self.summary_full_range_relative() + self.offset()
- }
-
/// The absolute range of the summary line, excluding any trailing newline character.
- pub fn summary_range(&self) -> TextRange {
+ pub(crate) fn summary_range(&self) -> TextRange {
TextRange::at(self.range().start(), self.summary_line().text_len())
}
@@ -321,12 +314,12 @@ impl<'a> SectionContext<'a> {
}
/// The absolute range of the full-section.
- pub fn range(&self) -> TextRange {
+ pub(crate) fn range(&self) -> TextRange {
self.range_relative() + self.offset()
}
/// Summary line without the trailing newline characters
- pub fn summary_line(&self) -> &'a str {
+ pub(crate) fn summary_line(&self) -> &'a str {
let full_summary = &self.docstring_body.as_str()[self.summary_full_range_relative()];
let mut bytes = full_summary.bytes().rev();
@@ -347,14 +340,14 @@ impl<'a> SectionContext<'a> {
}
/// Returns the text of the last line of the previous section or an empty string if it is the first section.
- pub fn previous_line(&self) -> Option<&'a str> {
+ pub(crate) fn previous_line(&self) -> Option<&'a str> {
let previous =
&self.docstring_body.as_str()[TextRange::up_to(self.range_relative().start())];
previous.universal_newlines().last().map(|l| l.as_str())
}
/// Returns the lines belonging to this section after the summary line.
- pub fn following_lines(&self) -> UniversalNewlineIterator<'a> {
+ pub(crate) fn following_lines(&self) -> UniversalNewlineIterator<'a> {
let lines = self.following_lines_str();
UniversalNewlineIterator::with_offset(lines, self.offset() + self.data.summary_full_end)
}
@@ -369,7 +362,7 @@ impl<'a> SectionContext<'a> {
}
/// Returns the absolute range of the following lines.
- pub fn following_range(&self) -> TextRange {
+ pub(crate) fn following_range(&self) -> TextRange {
self.following_range_relative() + self.offset()
}
}
diff --git a/crates/ruff/src/flake8_to_ruff/converter.rs b/crates/ruff/src/flake8_to_ruff/converter.rs
index 8e54ae57bcfc7..5bccf6369199d 100644
--- a/crates/ruff/src/flake8_to_ruff/converter.rs
+++ b/crates/ruff/src/flake8_to_ruff/converter.rs
@@ -3,16 +3,14 @@ use std::collections::{HashMap, HashSet};
use anyhow::Result;
use itertools::Itertools;
-use super::external_config::ExternalConfig;
-use super::plugin::Plugin;
-use super::{parser, plugin};
+use crate::line_width::LineLength;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
use crate::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use crate::rules::flake8_quotes::settings::Quote;
-use crate::rules::flake8_tidy_imports::relative_imports::Strictness;
+use crate::rules::flake8_tidy_imports::settings::Strictness;
use crate::rules::pydocstyle::settings::Convention;
use crate::rules::{
flake8_annotations, flake8_bugbear, flake8_builtins, flake8_errmsg, flake8_pytest_style,
@@ -23,6 +21,10 @@ use crate::settings::pyproject::Pyproject;
use crate::settings::types::PythonVersion;
use crate::warn_user;
+use super::external_config::ExternalConfig;
+use super::plugin::Plugin;
+use super::{parser, plugin};
+
const DEFAULT_SELECTORS: &[RuleSelector] = &[
RuleSelector::Linter(Linter::Pyflakes),
RuleSelector::Linter(Linter::Pycodestyle),
@@ -119,7 +121,9 @@ pub fn convert(
options.builtins = Some(parser::parse_strings(value.as_ref()));
}
"max-line-length" | "max_line_length" => match value.parse::() {
- Ok(line_length) => options.line_length = Some(line_length),
+ Ok(line_length) => {
+ options.line_length = Some(LineLength::from(line_length));
+ }
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
@@ -402,7 +406,7 @@ pub fn convert(
// Extract any settings from the existing `pyproject.toml`.
if let Some(black) = &external_config.black {
if let Some(line_length) = &black.line_length {
- options.line_length = Some(*line_length);
+ options.line_length = Some(LineLength::from(*line_length));
}
if let Some(target_version) = &black.target_version {
@@ -456,11 +460,10 @@ mod tests {
use pep440_rs::VersionSpecifiers;
use pretty_assertions::assert_eq;
- use super::super::plugin::Plugin;
- use super::convert;
use crate::flake8_to_ruff::converter::DEFAULT_SELECTORS;
use crate::flake8_to_ruff::pep621::Project;
use crate::flake8_to_ruff::ExternalConfig;
+ use crate::line_width::LineLength;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
use crate::rules::pydocstyle::settings::Convention;
@@ -469,6 +472,9 @@ mod tests {
use crate::settings::pyproject::Pyproject;
use crate::settings::types::PythonVersion;
+ use super::super::plugin::Plugin;
+ use super::convert;
+
fn default_options(plugins: impl IntoIterator- ) -> Options {
Options {
ignore: Some(vec![]),
@@ -508,7 +514,7 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
- line_length: Some(100),
+ line_length: Some(LineLength::from(100)),
..default_options([])
});
assert_eq!(actual, expected);
@@ -527,7 +533,7 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
- line_length: Some(100),
+ line_length: Some(LineLength::from(100)),
..default_options([])
});
assert_eq!(actual, expected);
diff --git a/crates/ruff/src/flake8_to_ruff/mod.rs b/crates/ruff/src/flake8_to_ruff/mod.rs
index 629b4831c2d9a..1b847b97d42b4 100644
--- a/crates/ruff/src/flake8_to_ruff/mod.rs
+++ b/crates/ruff/src/flake8_to_ruff/mod.rs
@@ -1,3 +1,8 @@
+pub use converter::convert;
+pub use external_config::ExternalConfig;
+pub use plugin::Plugin;
+pub use pyproject::parse;
+
mod black;
mod converter;
mod external_config;
@@ -6,8 +11,3 @@ mod parser;
pub mod pep621;
mod plugin;
mod pyproject;
-
-pub use converter::convert;
-pub use external_config::ExternalConfig;
-pub use plugin::Plugin;
-pub use pyproject::parse;
diff --git a/crates/ruff/src/flake8_to_ruff/parser.rs b/crates/ruff/src/flake8_to_ruff/parser.rs
index 3e44adab5a904..5c305aafcde65 100644
--- a/crates/ruff/src/flake8_to_ruff/parser.rs
+++ b/crates/ruff/src/flake8_to_ruff/parser.rs
@@ -13,7 +13,7 @@ static COMMA_SEPARATED_LIST_RE: Lazy
= Lazy::new(|| Regex::new(r"[,\s]").
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
/// "F401,E501").
-pub fn parse_prefix_codes(value: &str) -> Vec {
+pub(crate) fn parse_prefix_codes(value: &str) -> Vec {
let mut codes: Vec = vec![];
for code in COMMA_SEPARATED_LIST_RE.split(value) {
let code = code.trim();
@@ -30,7 +30,7 @@ pub fn parse_prefix_codes(value: &str) -> Vec {
}
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
-pub fn parse_strings(value: &str) -> Vec {
+pub(crate) fn parse_strings(value: &str) -> Vec {
COMMA_SEPARATED_LIST_RE
.split(value)
.map(str::trim)
@@ -40,7 +40,7 @@ pub fn parse_strings(value: &str) -> Vec {
}
/// Parse a boolean.
-pub fn parse_bool(value: &str) -> Result {
+pub(crate) fn parse_bool(value: &str) -> Result {
match value.trim() {
"true" => Ok(true),
"false" => Ok(false),
@@ -138,7 +138,7 @@ fn tokenize_files_to_codes_mapping(value: &str) -> Vec {
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
/// See:
-pub fn parse_files_to_codes_mapping(value: &str) -> Result> {
+pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result> {
if value.trim().is_empty() {
return Ok(vec![]);
}
@@ -178,7 +178,7 @@ pub fn parse_files_to_codes_mapping(value: &str) -> Result,
) -> FxHashMap> {
let mut per_file_ignores: FxHashMap> = FxHashMap::default();
@@ -195,12 +195,13 @@ pub fn collect_per_file_ignores(
mod tests {
use anyhow::Result;
- use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
use crate::codes;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
use crate::settings::types::PatternPrefixPair;
+ use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
+
#[test]
fn it_parses_prefix_codes() {
let actual = parse_prefix_codes("");
diff --git a/crates/ruff/src/flake8_to_ruff/plugin.rs b/crates/ruff/src/flake8_to_ruff/plugin.rs
index 93e39d6b03b94..77f6645d29a12 100644
--- a/crates/ruff/src/flake8_to_ruff/plugin.rs
+++ b/crates/ruff/src/flake8_to_ruff/plugin.rs
@@ -175,7 +175,7 @@ impl From<&Plugin> for Linter {
///
/// For example, if the user specified a `mypy-init-return` setting, we should
/// infer that `flake8-annotations` is active.
-pub fn infer_plugins_from_options(flake8: &HashMap>) -> Vec {
+pub(crate) fn infer_plugins_from_options(flake8: &HashMap>) -> Vec {
let mut plugins = BTreeSet::new();
for key in flake8.keys() {
match key.as_str() {
@@ -292,7 +292,7 @@ pub fn infer_plugins_from_options(flake8: &HashMap>) -> V
///
/// For example, if the user ignores `ANN101`, we should infer that
/// `flake8-annotations` is active.
-pub fn infer_plugins_from_codes(selectors: &HashSet) -> Vec {
+pub(crate) fn infer_plugins_from_codes(selectors: &HashSet) -> Vec {
// Ignore cases in which we've knowingly changed rule prefixes.
[
Plugin::Flake82020,
diff --git a/crates/ruff/src/importer.rs b/crates/ruff/src/importer.rs
deleted file mode 100644
index fe003d18193d9..0000000000000
--- a/crates/ruff/src/importer.rs
+++ /dev/null
@@ -1,363 +0,0 @@
-//! Add and modify import statements to make module members available during fix execution.
-
-use anyhow::Result;
-use libcst_native::{Codegen, CodegenState, ImportAlias, Name, NameOrAttribute};
-use ruff_text_size::TextSize;
-use rustc_hash::FxHashMap;
-use rustpython_parser::ast::{Stmt, StmtKind, Suite};
-use rustpython_parser::{lexer, Mode, Tok};
-
-use ruff_diagnostics::Edit;
-use ruff_python_ast::helpers::is_docstring_stmt;
-use ruff_python_ast::imports::AnyImport;
-use ruff_python_ast::source_code::{Locator, Stylist};
-
-use crate::cst::matchers::{match_aliases, match_import_from, match_module};
-
-pub struct Importer<'a> {
- python_ast: &'a Suite,
- locator: &'a Locator<'a>,
- stylist: &'a Stylist<'a>,
- /// A map from module name to top-level `StmtKind::ImportFrom` statements.
- import_from_map: FxHashMap<&'a str, &'a Stmt>,
- /// The last top-level import statement.
- trailing_import: Option<&'a Stmt>,
-}
-
-impl<'a> Importer<'a> {
- pub fn new(python_ast: &'a Suite, locator: &'a Locator<'a>, stylist: &'a Stylist<'a>) -> Self {
- Self {
- python_ast,
- locator,
- stylist,
- import_from_map: FxHashMap::default(),
- trailing_import: None,
- }
- }
-
- /// Visit a top-level import statement.
- pub fn visit_import(&mut self, import: &'a Stmt) {
- // Store a reference to the import statement in the appropriate map.
- match &import.node {
- StmtKind::Import { .. } => {
- // Nothing to do here, we don't extend top-level `import` statements at all, so
- // no need to track them.
- }
- StmtKind::ImportFrom { module, level, .. } => {
- // Store a reverse-map from module name to `import ... from` statement.
- if level.map_or(true, |level| level == 0) {
- if let Some(module) = module {
- self.import_from_map.insert(module.as_str(), import);
- }
- }
- }
- _ => {
- panic!("Expected StmtKind::Import | StmtKind::ImportFrom");
- }
- }
-
- // Store a reference to the last top-level import statement.
- self.trailing_import = Some(import);
- }
-
- /// Add an import statement to import the given module.
- ///
- /// If there are no existing imports, the new import will be added at the top
- /// of the file. Otherwise, it will be added after the most recent top-level
- /// import statement.
- pub fn add_import(&self, import: &AnyImport) -> Edit {
- let required_import = import.to_string();
- if let Some(stmt) = self.trailing_import {
- // Insert after the last top-level import.
- let Insertion {
- prefix,
- location,
- suffix,
- } = end_of_statement_insertion(stmt, self.locator, self.stylist);
- let content = format!("{prefix}{required_import}{suffix}");
- Edit::insertion(content, location)
- } else {
- // Insert at the top of the file.
- let Insertion {
- prefix,
- location,
- suffix,
- } = top_of_file_insertion(self.python_ast, self.locator, self.stylist);
- let content = format!("{prefix}{required_import}{suffix}");
- Edit::insertion(content, location)
- }
- }
-
- /// Return the top-level [`Stmt`] that imports the given module using `StmtKind::ImportFrom`.
- /// if it exists.
- pub fn get_import_from(&self, module: &str) -> Option<&Stmt> {
- self.import_from_map.get(module).copied()
- }
-
- /// Add the given member to an existing `StmtKind::ImportFrom` statement.
- pub fn add_member(&self, stmt: &Stmt, member: &str) -> Result {
- let mut tree = match_module(self.locator.slice(stmt.range()))?;
- let import_from = match_import_from(&mut tree)?;
- let aliases = match_aliases(import_from)?;
- aliases.push(ImportAlias {
- name: NameOrAttribute::N(Box::new(Name {
- value: member,
- lpar: vec![],
- rpar: vec![],
- })),
- asname: None,
- comma: aliases.last().and_then(|alias| alias.comma.clone()),
- });
- let mut state = CodegenState {
- default_newline: &self.stylist.line_ending(),
- default_indent: self.stylist.indentation(),
- ..CodegenState::default()
- };
- tree.codegen(&mut state);
- Ok(Edit::range_replacement(state.to_string(), stmt.range()))
- }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-struct Insertion {
- /// The content to add before the insertion.
- prefix: &'static str,
- /// The location at which to insert.
- location: TextSize,
- /// The content to add after the insertion.
- suffix: &'static str,
-}
-
-impl Insertion {
- fn new(prefix: &'static str, location: TextSize, suffix: &'static str) -> Self {
- Self {
- prefix,
- location,
- suffix,
- }
- }
-}
-
-/// Find the end of the last docstring.
-fn match_docstring_end(body: &[Stmt]) -> Option {
- let mut iter = body.iter();
- let Some(mut stmt) = iter.next() else {
- return None;
- };
- if !is_docstring_stmt(stmt) {
- return None;
- }
- for next in iter {
- if !is_docstring_stmt(next) {
- break;
- }
- stmt = next;
- }
- Some(stmt.end())
-}
-
-/// Find the location at which an "end-of-statement" import should be inserted,
-/// along with a prefix and suffix to use for the insertion.
-///
-/// For example, given the following code:
-///
-/// ```python
-/// """Hello, world!"""
-///
-/// import os
-/// import math
-///
-///
-/// def foo():
-/// pass
-/// ```
-///
-/// The location returned will be the start of new line after the last
-/// import statement, which in this case is the line after `import math`,
-/// along with a trailing newline suffix.
-fn end_of_statement_insertion(stmt: &Stmt, locator: &Locator, stylist: &Stylist) -> Insertion {
- let location = stmt.end();
- let mut tokens = lexer::lex_located(locator.after(location), Mode::Module, location).flatten();
- if let Some((Tok::Semi, range)) = tokens.next() {
- // If the first token after the docstring is a semicolon, insert after the semicolon as an
- // inline statement;
- Insertion::new(" ", range.end(), ";")
- } else {
- // Otherwise, insert on the next line.
- Insertion::new(
- "",
- locator.full_line_end(location),
- stylist.line_ending().as_str(),
- )
- }
-}
-
-/// Find the location at which a "top-of-file" import should be inserted,
-/// along with a prefix and suffix to use for the insertion.
-///
-/// For example, given the following code:
-///
-/// ```python
-/// """Hello, world!"""
-///
-/// import os
-/// ```
-///
-/// The location returned will be the start of the `import os` statement,
-/// along with a trailing newline suffix.
-fn top_of_file_insertion(body: &[Stmt], locator: &Locator, stylist: &Stylist) -> Insertion {
- // Skip over any docstrings.
- let mut location = if let Some(location) = match_docstring_end(body) {
- // If the first token after the docstring is a semicolon, insert after the semicolon as an
- // inline statement;
- let first_token = lexer::lex_located(locator.after(location), Mode::Module, location)
- .flatten()
- .next();
- if let Some((Tok::Semi, range)) = first_token {
- return Insertion::new(" ", range.end(), ";");
- }
-
- // Otherwise, advance to the next row.
- locator.full_line_end(location)
- } else {
- TextSize::default()
- };
-
- // Skip over any comments and empty lines.
- for (tok, range) in
- lexer::lex_located(locator.after(location), Mode::Module, location).flatten()
- {
- if matches!(tok, Tok::Comment(..) | Tok::Newline) {
- location = locator.full_line_end(range.end());
- } else {
- break;
- }
- }
-
- return Insertion::new("", location, stylist.line_ending().as_str());
-}
-
-#[cfg(test)]
-mod tests {
- use anyhow::Result;
- use ruff_python_ast::newlines::LineEnding;
- use ruff_text_size::TextSize;
- use rustpython_parser as parser;
- use rustpython_parser::lexer::LexResult;
-
- use ruff_python_ast::source_code::{Locator, Stylist};
-
- use crate::importer::{top_of_file_insertion, Insertion};
-
- fn insert(contents: &str) -> Result {
- let program = parser::parse_program(contents, "")?;
- let tokens: Vec = ruff_rustpython::tokenize(contents);
- let locator = Locator::new(contents);
- let stylist = Stylist::from_tokens(&tokens, &locator);
- Ok(top_of_file_insertion(&program, &locator, &stylist))
- }
-
- #[test]
- fn top_of_file_insertions() -> Result<()> {
- let contents = "";
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(0), LineEnding::default().as_str())
- );
-
- let contents = r#"
-"""Hello, world!""""#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(19), LineEnding::default().as_str())
- );
-
- let contents = r#"
-"""Hello, world!"""
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(20), "\n")
- );
-
- let contents = r#"
-"""Hello, world!"""
-"""Hello, world!"""
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(40), "\n")
- );
-
- let contents = r#"
-x = 1
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(0), "\n")
- );
-
- let contents = r#"
-#!/usr/bin/env python3
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(23), "\n")
- );
-
- let contents = r#"
-#!/usr/bin/env python3
-"""Hello, world!"""
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(43), "\n")
- );
-
- let contents = r#"
-"""Hello, world!"""
-#!/usr/bin/env python3
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(43), "\n")
- );
-
- let contents = r#"
-"""%s""" % "Hello, world!"
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new("", TextSize::from(0), "\n")
- );
-
- let contents = r#"
-"""Hello, world!"""; x = 1
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new(" ", TextSize::from(20), ";")
- );
-
- let contents = r#"
-"""Hello, world!"""; x = 1; y = \
- 2
-"#
- .trim_start();
- assert_eq!(
- insert(contents)?,
- Insertion::new(" ", TextSize::from(20), ";")
- );
-
- Ok(())
- }
-}
diff --git a/crates/ruff/src/importer/insertion.rs b/crates/ruff/src/importer/insertion.rs
new file mode 100644
index 0000000000000..713ca8524e2ad
--- /dev/null
+++ b/crates/ruff/src/importer/insertion.rs
@@ -0,0 +1,449 @@
+//! Insert statements into Python code.
+use std::ops::Add;
+
+use ruff_text_size::TextSize;
+use rustpython_parser::ast::{Ranged, Stmt};
+use rustpython_parser::{lexer, Mode, Tok};
+
+use ruff_diagnostics::Edit;
+use ruff_newlines::UniversalNewlineIterator;
+use ruff_python_ast::helpers::is_docstring_stmt;
+use ruff_python_ast::source_code::{Locator, Stylist};
+use ruff_textwrap::indent;
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(super) enum Placement<'a> {
+ /// The content will be inserted inline with the existing code (i.e., within semicolon-delimited
+ /// statements).
+ Inline,
+ /// The content will be inserted on its own line.
+ OwnLine,
+ /// The content will be inserted as an indented block.
+ Indented(&'a str),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(super) struct Insertion<'a> {
+ /// The content to add before the insertion.
+ prefix: &'a str,
+ /// The location at which to insert.
+ location: TextSize,
+ /// The content to add after the insertion.
+ suffix: &'a str,
+ /// The line placement of insertion.
+ placement: Placement<'a>,
+}
+
+impl<'a> Insertion<'a> {
+ /// Create an [`Insertion`] to insert (e.g.) an import statement at the start of a given
+ /// file, along with a prefix and suffix to use for the insertion.
+ ///
+ /// For example, given the following code:
+ ///
+ /// ```python
+ /// """Hello, world!"""
+ ///
+ /// import os
+ /// ```
+ ///
+ /// The insertion returned will begin at the start of the `import os` statement, and will
+ /// include a trailing newline.
+ pub(super) fn start_of_file(
+ body: &[Stmt],
+ locator: &Locator,
+ stylist: &Stylist,
+ ) -> Insertion<'static> {
+ // Skip over any docstrings.
+ let mut location = if let Some(location) = match_docstring_end(body) {
+ // If the first token after the docstring is a semicolon, insert after the semicolon as
+ // an inline statement.
+ if let Some(offset) = match_leading_semicolon(locator.after(location)) {
+ return Insertion::inline(" ", location.add(offset).add(TextSize::of(';')), ";");
+ }
+
+ // Otherwise, advance to the next row.
+ locator.full_line_end(location)
+ } else {
+ TextSize::default()
+ };
+
+ // Skip over commented lines.
+ for line in UniversalNewlineIterator::with_offset(locator.after(location), location) {
+ if line.trim_start().starts_with('#') {
+ location = line.full_end();
+ } else {
+ break;
+ }
+ }
+
+ Insertion::own_line("", location, stylist.line_ending().as_str())
+ }
+
+ /// Create an [`Insertion`] to insert (e.g.) an import after the end of the given
+ /// [`Stmt`], along with a prefix and suffix to use for the insertion.
+ ///
+ /// For example, given the following code:
+ ///
+ /// ```python
+ /// """Hello, world!"""
+ ///
+ /// import os
+ /// import math
+ ///
+ ///
+ /// def foo():
+ /// pass
+ /// ```
+ ///
+ /// The insertion returned will begin after the newline after the last import statement, which
+ /// in this case is the line after `import math`, and will include a trailing newline.
+ ///
+ /// The statement itself is assumed to be at the top-level of the module.
+ pub(super) fn end_of_statement(
+ stmt: &Stmt,
+ locator: &Locator,
+ stylist: &Stylist,
+ ) -> Insertion<'static> {
+ let location = stmt.end();
+ if let Some(offset) = match_leading_semicolon(locator.after(location)) {
+ // If the first token after the statement is a semicolon, insert after the semicolon as
+ // an inline statement.
+ Insertion::inline(" ", location.add(offset).add(TextSize::of(';')), ";")
+ } else {
+ // Otherwise, insert on the next line.
+ Insertion::own_line(
+ "",
+ locator.full_line_end(location),
+ stylist.line_ending().as_str(),
+ )
+ }
+ }
+
+ /// Create an [`Insertion`] to insert (e.g.) an import statement at the start of a given
+ /// block, along with a prefix and suffix to use for the insertion.
+ ///
+ /// For example, given the following code:
+ ///
+ /// ```python
+ /// if TYPE_CHECKING:
+ /// import os
+ /// ```
+ ///
+ /// The insertion returned will begin at the start of the `import os` statement, and will
+ /// include a trailing newline.
+ ///
+ /// The block itself is assumed to be at the top-level of the module.
+ pub(super) fn start_of_block(
+ mut location: TextSize,
+ locator: &Locator<'a>,
+ stylist: &Stylist,
+ ) -> Insertion<'a> {
+ enum Awaiting {
+ Colon(u32),
+ Newline,
+ Indent,
+ }
+
+ let mut state = Awaiting::Colon(0);
+ for (tok, range) in
+ lexer::lex_starts_at(locator.after(location), Mode::Module, location).flatten()
+ {
+ match state {
+ // Iterate until we find the colon indicating the start of the block body.
+ Awaiting::Colon(depth) => match tok {
+ Tok::Colon if depth == 0 => {
+ state = Awaiting::Newline;
+ }
+ Tok::Lpar | Tok::Lbrace | Tok::Lsqb => {
+ state = Awaiting::Colon(depth.saturating_add(1));
+ }
+ Tok::Rpar | Tok::Rbrace | Tok::Rsqb => {
+ state = Awaiting::Colon(depth.saturating_sub(1));
+ }
+ _ => {}
+ },
+ // Once we've seen the colon, we're looking for a newline; otherwise, there's no
+ // block body (e.g. `if True: pass`).
+ Awaiting::Newline => match tok {
+ Tok::Comment(..) => {}
+ Tok::Newline => {
+ state = Awaiting::Indent;
+ }
+ _ => {
+ location = range.start();
+ break;
+ }
+ },
+ // Once we've seen the newline, we're looking for the indentation of the block body.
+ Awaiting::Indent => match tok {
+ Tok::Comment(..) => {}
+ Tok::NonLogicalNewline => {}
+ Tok::Indent => {
+ // This is like:
+ // ```py
+ // if True:
+ // pass
+ // ```
+ // Where `range` is the indentation before the `pass` token.
+ return Insertion::indented(
+ "",
+ range.start(),
+ stylist.line_ending().as_str(),
+ locator.slice(range),
+ );
+ }
+ _ => {
+ location = range.start();
+ break;
+ }
+ },
+ }
+ }
+
+ // This is like: `if True: pass`, where `location` is the start of the `pass` token.
+ Insertion::inline("", location, "; ")
+ }
+
+ /// Convert this [`Insertion`] into an [`Edit`] that inserts the given content.
+ pub(super) fn into_edit(self, content: &str) -> Edit {
+ let Insertion {
+ prefix,
+ location,
+ suffix,
+ placement,
+ } = self;
+ let content = format!("{prefix}{content}{suffix}");
+ Edit::insertion(
+ match placement {
+ Placement::Indented(indentation) if !indentation.is_empty() => {
+ indent(&content, indentation).to_string()
+ }
+ _ => content,
+ },
+ location,
+ )
+ }
+
+ /// Returns `true` if this [`Insertion`] is inline.
+ pub(super) fn is_inline(&self) -> bool {
+ matches!(self.placement, Placement::Inline)
+ }
+
+ /// Create an [`Insertion`] that inserts content inline (i.e., within semicolon-delimited
+ /// statements).
+ fn inline(prefix: &'a str, location: TextSize, suffix: &'a str) -> Self {
+ Self {
+ prefix,
+ location,
+ suffix,
+ placement: Placement::Inline,
+ }
+ }
+
+ /// Create an [`Insertion`] that starts on its own line.
+ fn own_line(prefix: &'a str, location: TextSize, suffix: &'a str) -> Self {
+ Self {
+ prefix,
+ location,
+ suffix,
+ placement: Placement::OwnLine,
+ }
+ }
+
+ /// Create an [`Insertion`] that starts on its own line, with the given indentation.
+ fn indented(
+ prefix: &'a str,
+ location: TextSize,
+ suffix: &'a str,
+ indentation: &'a str,
+ ) -> Self {
+ Self {
+ prefix,
+ location,
+ suffix,
+ placement: Placement::Indented(indentation),
+ }
+ }
+}
+
+/// Find the end of the last docstring.
+fn match_docstring_end(body: &[Stmt]) -> Option {
+ let mut iter = body.iter();
+ let Some(mut stmt) = iter.next() else {
+ return None;
+ };
+ if !is_docstring_stmt(stmt) {
+ return None;
+ }
+ for next in iter {
+ if !is_docstring_stmt(next) {
+ break;
+ }
+ stmt = next;
+ }
+ Some(stmt.end())
+}
+
+/// If a line starts with a semicolon, return its offset.
+fn match_leading_semicolon(s: &str) -> Option {
+ for (offset, c) in s.char_indices() {
+ match c {
+ ' ' | '\t' => continue,
+ ';' => return Some(TextSize::try_from(offset).unwrap()),
+ _ => break,
+ }
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use anyhow::Result;
+ use ruff_text_size::TextSize;
+ use rustpython_parser::ast::Suite;
+ use rustpython_parser::lexer::LexResult;
+ use rustpython_parser::Parse;
+
+ use ruff_newlines::LineEnding;
+ use ruff_python_ast::source_code::{Locator, Stylist};
+
+ use super::Insertion;
+
+ #[test]
+ fn start_of_file() -> Result<()> {
+ fn insert(contents: &str) -> Result {
+ let program = Suite::parse(contents, "")?;
+ let tokens: Vec = ruff_rustpython::tokenize(contents);
+ let locator = Locator::new(contents);
+ let stylist = Stylist::from_tokens(&tokens, &locator);
+ Ok(Insertion::start_of_file(&program, &locator, &stylist))
+ }
+
+ let contents = "";
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(0), LineEnding::default().as_str())
+ );
+
+ let contents = r#"
+"""Hello, world!""""#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(19), LineEnding::default().as_str())
+ );
+
+ let contents = r#"
+"""Hello, world!"""
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(20), "\n")
+ );
+
+ let contents = r#"
+"""Hello, world!"""
+"""Hello, world!"""
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(40), "\n")
+ );
+
+ let contents = r#"
+x = 1
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(0), "\n")
+ );
+
+ let contents = r#"
+#!/usr/bin/env python3
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(23), "\n")
+ );
+
+ let contents = r#"
+#!/usr/bin/env python3
+"""Hello, world!"""
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(43), "\n")
+ );
+
+ let contents = r#"
+"""Hello, world!"""
+#!/usr/bin/env python3
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(43), "\n")
+ );
+
+ let contents = r#"
+"""%s""" % "Hello, world!"
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::own_line("", TextSize::from(0), "\n")
+ );
+
+ let contents = r#"
+"""Hello, world!"""; x = 1
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::inline(" ", TextSize::from(20), ";")
+ );
+
+ let contents = r#"
+"""Hello, world!"""; x = 1; y = \
+ 2
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents)?,
+ Insertion::inline(" ", TextSize::from(20), ";")
+ );
+
+ Ok(())
+ }
+
+ #[test]
+ fn start_of_block() {
+ fn insert(contents: &str, offset: TextSize) -> Insertion {
+ let tokens: Vec = ruff_rustpython::tokenize(contents);
+ let locator = Locator::new(contents);
+ let stylist = Stylist::from_tokens(&tokens, &locator);
+ Insertion::start_of_block(offset, &locator, &stylist)
+ }
+
+ let contents = "if True: pass";
+ assert_eq!(
+ insert(contents, TextSize::from(0)),
+ Insertion::inline("", TextSize::from(9), "; ")
+ );
+
+ let contents = r#"
+if True:
+ pass
+"#
+ .trim_start();
+ assert_eq!(
+ insert(contents, TextSize::from(0)),
+ Insertion::indented("", TextSize::from(9), "\n", " ")
+ );
+ }
+}
diff --git a/crates/ruff/src/importer/mod.rs b/crates/ruff/src/importer/mod.rs
new file mode 100644
index 0000000000000..aca15cd632d2a
--- /dev/null
+++ b/crates/ruff/src/importer/mod.rs
@@ -0,0 +1,487 @@
+//! Add and modify import statements to make module members available during fix execution.
+
+use std::error::Error;
+
+use anyhow::Result;
+use libcst_native::{ImportAlias, Name, NameOrAttribute};
+use ruff_text_size::TextSize;
+use rustpython_parser::ast::{self, Ranged, Stmt, Suite};
+
+use crate::autofix;
+use crate::autofix::codemods::CodegenStylist;
+use ruff_diagnostics::Edit;
+use ruff_python_ast::imports::{AnyImport, Import, ImportFrom};
+use ruff_python_ast::source_code::{Locator, Stylist};
+use ruff_python_semantic::model::SemanticModel;
+use ruff_textwrap::indent;
+
+use crate::cst::matchers::{match_aliases, match_import_from, match_statement};
+use crate::importer::insertion::Insertion;
+
+mod insertion;
+
+pub(crate) struct Importer<'a> {
+ /// The Python AST to which we are adding imports.
+ python_ast: &'a Suite,
+ /// The [`Locator`] for the Python AST.
+ locator: &'a Locator<'a>,
+ /// The [`Stylist`] for the Python AST.
+ stylist: &'a Stylist<'a>,
+ /// The list of visited, top-level runtime imports in the Python AST.
+ runtime_imports: Vec<&'a Stmt>,
+ /// The list of visited, top-level `if TYPE_CHECKING:` blocks in the Python AST.
+ type_checking_blocks: Vec<&'a Stmt>,
+}
+
+impl<'a> Importer<'a> {
+ pub(crate) fn new(
+ python_ast: &'a Suite,
+ locator: &'a Locator<'a>,
+ stylist: &'a Stylist<'a>,
+ ) -> Self {
+ Self {
+ python_ast,
+ locator,
+ stylist,
+ runtime_imports: Vec::default(),
+ type_checking_blocks: Vec::default(),
+ }
+ }
+
+ /// Visit a top-level import statement.
+ pub(crate) fn visit_import(&mut self, import: &'a Stmt) {
+ self.runtime_imports.push(import);
+ }
+
+ /// Visit a top-level type-checking block.
+ pub(crate) fn visit_type_checking_block(&mut self, type_checking_block: &'a Stmt) {
+ self.type_checking_blocks.push(type_checking_block);
+ }
+
+ /// Add an import statement to import the given module.
+ ///
+ /// If there are no existing imports, the new import will be added at the top
+ /// of the file. Otherwise, it will be added after the most recent top-level
+ /// import statement.
+ pub(crate) fn add_import(&self, import: &AnyImport, at: TextSize) -> Edit {
+ let required_import = import.to_string();
+ if let Some(stmt) = self.preceding_import(at) {
+ // Insert after the last top-level import.
+ Insertion::end_of_statement(stmt, self.locator, self.stylist)
+ .into_edit(&required_import)
+ } else {
+ // Insert at the start of the file.
+ Insertion::start_of_file(self.python_ast, self.locator, self.stylist)
+ .into_edit(&required_import)
+ }
+ }
+
+ /// Move an existing import to the top-level, thereby making it available at runtime.
+ ///
+ /// If there are no existing imports, the new import will be added at the top
+ /// of the file. Otherwise, it will be added after the most recent top-level
+ /// import statement.
+ pub(crate) fn runtime_import_edit(
+ &self,
+ import: &StmtImports,
+ at: TextSize,
+ ) -> Result {
+ // Generate the modified import statement.
+ let content = autofix::codemods::retain_imports(
+ &import.qualified_names,
+ import.stmt,
+ self.locator,
+ self.stylist,
+ )?;
+
+ // Add the import to the top-level.
+ let insertion = if let Some(stmt) = self.preceding_import(at) {
+ // Insert after the last top-level import.
+ Insertion::end_of_statement(stmt, self.locator, self.stylist)
+ } else {
+ // Insert at the start of the file.
+ Insertion::start_of_file(self.python_ast, self.locator, self.stylist)
+ };
+ let add_import_edit = insertion.into_edit(&content);
+
+ Ok(RuntimeImportEdit { add_import_edit })
+ }
+
+ /// Move an existing import into a `TYPE_CHECKING` block.
+ ///
+ /// If there are no existing `TYPE_CHECKING` blocks, a new one will be added at the top
+ /// of the file. Otherwise, it will be added after the most recent top-level
+ /// `TYPE_CHECKING` block.
+ pub(crate) fn typing_import_edit(
+ &self,
+ import: &StmtImports,
+ at: TextSize,
+ semantic_model: &SemanticModel,
+ ) -> Result {
+ // Generate the modified import statement.
+ let content = autofix::codemods::retain_imports(
+ &import.qualified_names,
+ import.stmt,
+ self.locator,
+ self.stylist,
+ )?;
+
+ // Import the `TYPE_CHECKING` symbol from the typing module.
+ let (type_checking_edit, type_checking) = self.get_or_import_symbol(
+ &ImportRequest::import_from("typing", "TYPE_CHECKING"),
+ at,
+ semantic_model,
+ )?;
+
+ // Add the import to a `TYPE_CHECKING` block.
+ let add_import_edit = if let Some(block) = self.preceding_type_checking_block(at) {
+ // Add the import to the `TYPE_CHECKING` block.
+ self.add_to_type_checking_block(&content, block.start())
+ } else {
+ // Add the import to a new `TYPE_CHECKING` block.
+ self.add_type_checking_block(
+ &format!(
+ "{}if {type_checking}:{}{}",
+ self.stylist.line_ending().as_str(),
+ self.stylist.line_ending().as_str(),
+ indent(&content, self.stylist.indentation())
+ ),
+ at,
+ )?
+ };
+
+ Ok(TypingImportEdit {
+ type_checking_edit,
+ add_import_edit,
+ })
+ }
+
+ /// Generate an [`Edit`] to reference the given symbol. Returns the [`Edit`] necessary to make
+ /// the symbol available in the current scope along with the bound name of the symbol.
+ ///
+ /// Attempts to reuse existing imports when possible.
+ pub(crate) fn get_or_import_symbol(
+ &self,
+ symbol: &ImportRequest,
+ at: TextSize,
+ semantic_model: &SemanticModel,
+ ) -> Result<(Edit, String), ResolutionError> {
+ match self.get_symbol(symbol, at, semantic_model) {
+ Some(result) => result,
+ None => self.import_symbol(symbol, at, semantic_model),
+ }
+ }
+
+ /// Return an [`Edit`] to reference an existing symbol, if it's present in the given [`SemanticModel`].
+ fn get_symbol(
+ &self,
+ symbol: &ImportRequest,
+ at: TextSize,
+ semantic_model: &SemanticModel,
+ ) -> Option> {
+ // If the symbol is already available in the current scope, use it.
+ let imported_name =
+ semantic_model.resolve_qualified_import_name(symbol.module, symbol.member)?;
+
+ // If the symbol source (i.e., the import statement) comes after the current location,
+ // abort. For example, we could be generating an edit within a function, and the import
+ // could be defined in the module scope, but after the function definition. In this case,
+ // it's unclear whether we can use the symbol (the function could be called between the
+ // import and the current location, and thus the symbol would not be available). It's also
+ // unclear whether should add an import statement at the start of the file, since it could
+ // be shadowed between the import and the current location.
+ if imported_name.range().start() > at {
+ return Some(Err(ResolutionError::ImportAfterUsage));
+ }
+
+ // If the symbol source (i.e., the import statement) is in a typing-only context, but we're
+ // in a runtime context, abort.
+ if imported_name.context().is_typing() && semantic_model.execution_context().is_runtime() {
+ return Some(Err(ResolutionError::IncompatibleContext));
+ }
+
+ // We also add a no-op edit to force conflicts with any other fixes that might try to
+ // remove the import. Consider:
+ //
+ // ```py
+ // import sys
+ //
+ // quit()
+ // ```
+ //
+ // Assume you omit this no-op edit. If you run Ruff with `unused-imports` and
+ // `sys-exit-alias` over this snippet, it will generate two fixes: (1) remove the unused
+ // `sys` import; and (2) replace `quit()` with `sys.exit()`, under the assumption that `sys`
+ // is already imported and available.
+ //
+ // By adding this no-op edit, we force the `unused-imports` fix to conflict with the
+ // `sys-exit-alias` fix, and thus will avoid applying both fixes in the same pass.
+ let import_edit = Edit::range_replacement(
+ self.locator.slice(imported_name.range()).to_string(),
+ imported_name.range(),
+ );
+ Some(Ok((import_edit, imported_name.into_name())))
+ }
+
+ /// Generate an [`Edit`] to reference the given symbol. Returns the [`Edit`] necessary to make
+ /// the symbol available in the current scope along with the bound name of the symbol.
+ ///
+ /// For example, assuming `module` is `"functools"` and `member` is `"lru_cache"`, this function
+ /// could return an [`Edit`] to add `import functools` to the start of the file, alongside with
+ /// the name on which the `lru_cache` symbol would be made available (`"functools.lru_cache"`).
+ fn import_symbol(
+ &self,
+ symbol: &ImportRequest,
+ at: TextSize,
+ semantic_model: &SemanticModel,
+ ) -> Result<(Edit, String), ResolutionError> {
+ if let Some(stmt) = self.find_import_from(symbol.module, at) {
+ // Case 1: `from functools import lru_cache` is in scope, and we're trying to reference
+ // `functools.cache`; thus, we add `cache` to the import, and return `"cache"` as the
+ // bound name.
+ if semantic_model.is_unbound(symbol.member) {
+ let Ok(import_edit) = self.add_member(stmt, symbol.member) else {
+ return Err(ResolutionError::InvalidEdit);
+ };
+ Ok((import_edit, symbol.member.to_string()))
+ } else {
+ Err(ResolutionError::ConflictingName(symbol.member.to_string()))
+ }
+ } else {
+ match symbol.style {
+ ImportStyle::Import => {
+ // Case 2a: No `functools` import is in scope; thus, we add `import functools`,
+ // and return `"functools.cache"` as the bound name.
+ if semantic_model.is_unbound(symbol.module) {
+ let import_edit =
+ self.add_import(&AnyImport::Import(Import::module(symbol.module)), at);
+ Ok((
+ import_edit,
+ format!(
+ "{module}.{member}",
+ module = symbol.module,
+ member = symbol.member
+ ),
+ ))
+ } else {
+ Err(ResolutionError::ConflictingName(symbol.module.to_string()))
+ }
+ }
+ ImportStyle::ImportFrom => {
+ // Case 2b: No `functools` import is in scope; thus, we add
+ // `from functools import cache`, and return `"cache"` as the bound name.
+ if semantic_model.is_unbound(symbol.member) {
+ let import_edit = self.add_import(
+ &AnyImport::ImportFrom(ImportFrom::member(
+ symbol.module,
+ symbol.member,
+ )),
+ at,
+ );
+ Ok((import_edit, symbol.member.to_string()))
+ } else {
+ Err(ResolutionError::ConflictingName(symbol.member.to_string()))
+ }
+ }
+ }
+ }
+ }
+
+ /// Return the top-level [`Stmt`] that imports the given module using `Stmt::ImportFrom`
+ /// preceding the given position, if any.
+ fn find_import_from(&self, module: &str, at: TextSize) -> Option<&Stmt> {
+ let mut import_from = None;
+ for stmt in &self.runtime_imports {
+ if stmt.start() >= at {
+ break;
+ }
+ if let Stmt::ImportFrom(ast::StmtImportFrom {
+ module: name,
+ level,
+ ..
+ }) = stmt
+ {
+ if level.map_or(true, |level| level.to_u32() == 0)
+ && name.as_ref().map_or(false, |name| name == module)
+ {
+ import_from = Some(*stmt);
+ }
+ }
+ }
+ import_from
+ }
+
+ /// Add the given member to an existing `Stmt::ImportFrom` statement.
+ fn add_member(&self, stmt: &Stmt, member: &str) -> Result {
+ let mut statement = match_statement(self.locator.slice(stmt.range()))?;
+ let import_from = match_import_from(&mut statement)?;
+ let aliases = match_aliases(import_from)?;
+ aliases.push(ImportAlias {
+ name: NameOrAttribute::N(Box::new(Name {
+ value: member,
+ lpar: vec![],
+ rpar: vec![],
+ })),
+ asname: None,
+ comma: aliases.last().and_then(|alias| alias.comma.clone()),
+ });
+ Ok(Edit::range_replacement(
+ statement.codegen_stylist(self.stylist),
+ stmt.range(),
+ ))
+ }
+
+ /// Add a `TYPE_CHECKING` block to the given module.
+ fn add_type_checking_block(&self, content: &str, at: TextSize) -> Result {
+ let insertion = if let Some(stmt) = self.preceding_import(at) {
+ // Insert after the last top-level import.
+ Insertion::end_of_statement(stmt, self.locator, self.stylist)
+ } else {
+ // Insert at the start of the file.
+ Insertion::start_of_file(self.python_ast, self.locator, self.stylist)
+ };
+ if insertion.is_inline() {
+ Err(anyhow::anyhow!(
+ "Cannot insert `TYPE_CHECKING` block inline"
+ ))
+ } else {
+ Ok(insertion.into_edit(content))
+ }
+ }
+
+ /// Add an import statement to an existing `TYPE_CHECKING` block.
+ fn add_to_type_checking_block(&self, content: &str, at: TextSize) -> Edit {
+ Insertion::start_of_block(at, self.locator, self.stylist).into_edit(content)
+ }
+
+ /// Return the import statement that precedes the given position, if any.
+ fn preceding_import(&self, at: TextSize) -> Option<&'a Stmt> {
+ self.runtime_imports
+ .partition_point(|stmt| stmt.start() < at)
+ .checked_sub(1)
+ .map(|idx| self.runtime_imports[idx])
+ }
+
+ /// Return the `TYPE_CHECKING` block that precedes the given position, if any.
+ fn preceding_type_checking_block(&self, at: TextSize) -> Option<&'a Stmt> {
+ let block = self.type_checking_blocks.first()?;
+ if block.start() <= at {
+ Some(block)
+ } else {
+ None
+ }
+ }
+}
+
+/// An edit to the top-level of a module, making it available at runtime.
+#[derive(Debug)]
+pub(crate) struct RuntimeImportEdit {
+ /// The edit to add the import to the top-level of the module.
+ add_import_edit: Edit,
+}
+
+impl RuntimeImportEdit {
+ pub(crate) fn into_edits(self) -> Vec {
+ vec![self.add_import_edit]
+ }
+}
+
+/// An edit to an import to a typing-only context.
+#[derive(Debug)]
+pub(crate) struct TypingImportEdit {
+ /// The edit to add the `TYPE_CHECKING` symbol to the module.
+ type_checking_edit: Edit,
+ /// The edit to add the import to a `TYPE_CHECKING` block.
+ add_import_edit: Edit,
+}
+
+impl TypingImportEdit {
+ pub(crate) fn into_edits(self) -> Vec {
+ vec![self.type_checking_edit, self.add_import_edit]
+ }
+}
+
+#[derive(Debug)]
+enum ImportStyle {
+ /// Import the symbol using the `import` statement (e.g. `import foo; foo.bar`).
+ Import,
+ /// Import the symbol using the `from` statement (e.g. `from foo import bar; bar`).
+ ImportFrom,
+}
+
+#[derive(Debug)]
+pub(crate) struct ImportRequest<'a> {
+ /// The module from which the symbol can be imported (e.g., `foo`, in `from foo import bar`).
+ module: &'a str,
+ /// The member to import (e.g., `bar`, in `from foo import bar`).
+ member: &'a str,
+ /// The preferred style to use when importing the symbol (e.g., `import foo` or
+ /// `from foo import bar`), if it's not already in scope.
+ style: ImportStyle,
+}
+
+impl<'a> ImportRequest<'a> {
+ /// Create a new `ImportRequest` from a module and member. If not present in the scope,
+ /// the symbol should be imported using the "import" statement.
+ pub(crate) fn import(module: &'a str, member: &'a str) -> Self {
+ Self {
+ module,
+ member,
+ style: ImportStyle::Import,
+ }
+ }
+
+ /// Create a new `ImportRequest` from a module and member. If not present in the scope,
+ /// the symbol should be imported using the "import from" statement.
+ pub(crate) fn import_from(module: &'a str, member: &'a str) -> Self {
+ Self {
+ module,
+ member,
+ style: ImportStyle::ImportFrom,
+ }
+ }
+}
+
+/// An existing list of module or member imports, located within an import statement.
+pub(crate) struct StmtImports<'a> {
+ /// The import statement.
+ pub(crate) stmt: &'a Stmt,
+ /// The "qualified names" of the imported modules or members.
+ pub(crate) qualified_names: Vec<&'a str>,
+}
+
+/// The result of an [`Importer::get_or_import_symbol`] call.
+#[derive(Debug)]
+pub(crate) enum ResolutionError {
+ /// The symbol is imported, but the import came after the current location.
+ ImportAfterUsage,
+ /// The symbol is imported, but in an incompatible context (e.g., in typing-only context, while
+ /// we're in a runtime context).
+ IncompatibleContext,
+ /// The symbol can't be imported, because another symbol is bound to the same name.
+ ConflictingName(String),
+ /// The symbol can't be imported due to an error in editing an existing import statement.
+ InvalidEdit,
+}
+
+impl std::fmt::Display for ResolutionError {
+ fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ ResolutionError::ImportAfterUsage => {
+ fmt.write_str("Unable to use existing symbol due to late binding")
+ }
+ ResolutionError::IncompatibleContext => {
+ fmt.write_str("Unable to use existing symbol due to incompatible context")
+ }
+ ResolutionError::ConflictingName(binding) => std::write!(
+ fmt,
+ "Unable to insert `{binding}` into scope due to name conflict"
+ ),
+ ResolutionError::InvalidEdit => {
+ fmt.write_str("Unable to modify existing import statement")
+ }
+ }
+ }
+}
+
+impl Error for ResolutionError {}
diff --git a/crates/ruff/src/jupyter/mod.rs b/crates/ruff/src/jupyter/mod.rs
index 92ee1e8a03cfc..ce6b9ef3bcaca 100644
--- a/crates/ruff/src/jupyter/mod.rs
+++ b/crates/ruff/src/jupyter/mod.rs
@@ -1,7 +1,7 @@
//! Utils for reading and writing jupyter notebooks
-mod notebook;
-mod schema;
-
pub use notebook::*;
pub use schema::*;
+
+mod notebook;
+mod schema;
diff --git a/crates/ruff/src/jupyter/notebook.rs b/crates/ruff/src/jupyter/notebook.rs
index 85a97dd6e6b5d..81a75805d1a38 100644
--- a/crates/ruff/src/jupyter/notebook.rs
+++ b/crates/ruff/src/jupyter/notebook.rs
@@ -1,9 +1,9 @@
-use ruff_text_size::TextRange;
use std::fs::File;
use std::io::{BufReader, BufWriter};
use std::iter;
use std::path::Path;
+use ruff_text_size::TextRange;
use serde::Serialize;
use serde_json::error::Category;
diff --git a/crates/ruff/src/lex/docstring_detection.rs b/crates/ruff/src/lex/docstring_detection.rs
index 09956511b47b6..58bdbf4a4c2c3 100644
--- a/crates/ruff/src/lex/docstring_detection.rs
+++ b/crates/ruff/src/lex/docstring_detection.rs
@@ -25,13 +25,13 @@ enum State {
}
#[derive(Default)]
-pub struct StateMachine {
+pub(crate) struct StateMachine {
state: State,
bracket_count: usize,
}
impl StateMachine {
- pub fn consume(&mut self, tok: &Tok) -> bool {
+ pub(crate) fn consume(&mut self, tok: &Tok) -> bool {
match tok {
Tok::NonLogicalNewline
| Tok::Newline
@@ -79,7 +79,7 @@ impl StateMachine {
}
Tok::Lpar | Tok::Lbrace | Tok::Lsqb => {
- self.bracket_count += 1;
+ self.bracket_count = self.bracket_count.saturating_add(1);
if matches!(
self.state,
State::ExpectModuleDocstring
@@ -92,7 +92,7 @@ impl StateMachine {
}
Tok::Rpar | Tok::Rbrace | Tok::Rsqb => {
- self.bracket_count -= 1;
+ self.bracket_count = self.bracket_count.saturating_sub(1);
if matches!(
self.state,
State::ExpectModuleDocstring
diff --git a/crates/ruff/src/lex/mod.rs b/crates/ruff/src/lex/mod.rs
index e2b4c3e5a0441..c6673c215d638 100644
--- a/crates/ruff/src/lex/mod.rs
+++ b/crates/ruff/src/lex/mod.rs
@@ -1 +1 @@
-pub mod docstring_detection;
+pub(crate) mod docstring_detection;
diff --git a/crates/ruff/src/lib.rs b/crates/ruff/src/lib.rs
index e359ffdbd0d09..fc73e6e40223a 100644
--- a/crates/ruff/src/lib.rs
+++ b/crates/ruff/src/lib.rs
@@ -21,11 +21,13 @@ pub mod fs;
mod importer;
pub mod jupyter;
mod lex;
+pub mod line_width;
pub mod linter;
pub mod logging;
pub mod message;
mod noqa;
pub mod packaging;
+pub mod pyproject_toml;
pub mod registry;
pub mod resolver;
mod rule_redirects;
@@ -33,5 +35,5 @@ mod rule_selector;
pub mod rules;
pub mod settings;
-#[cfg(test)]
-mod test;
+#[cfg(any(test, fuzzing))]
+pub mod test;
diff --git a/crates/ruff/src/line_width.rs b/crates/ruff/src/line_width.rs
new file mode 100644
index 0000000000000..8619b42aa36ac
--- /dev/null
+++ b/crates/ruff/src/line_width.rs
@@ -0,0 +1,165 @@
+use serde::{Deserialize, Serialize};
+use unicode_width::UnicodeWidthChar;
+
+use ruff_macros::CacheKey;
+
+/// The length of a line of text that is considered too long.
+#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, CacheKey)]
+#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
+pub struct LineLength(usize);
+
+impl Default for LineLength {
+ /// The default line length.
+ fn default() -> Self {
+ Self(88)
+ }
+}
+
+impl LineLength {
+ pub const fn get(&self) -> usize {
+ self.0
+ }
+}
+
+impl From for LineLength {
+ fn from(value: usize) -> Self {
+ Self(value)
+ }
+}
+
+/// A measure of the width of a line of text.
+///
+/// This is used to determine if a line is too long.
+/// It should be compared to a [`LineLength`].
+#[derive(Clone, Copy, Debug)]
+pub struct LineWidth {
+ /// The width of the line.
+ width: usize,
+ /// The column of the line.
+ /// This is used to calculate the width of tabs.
+ column: usize,
+ /// The tab size to use when calculating the width of tabs.
+ tab_size: TabSize,
+}
+
+impl Default for LineWidth {
+ fn default() -> Self {
+ Self::new(TabSize::default())
+ }
+}
+
+impl PartialEq for LineWidth {
+ fn eq(&self, other: &Self) -> bool {
+ self.width == other.width
+ }
+}
+
+impl Eq for LineWidth {}
+
+impl PartialOrd for LineWidth {
+ fn partial_cmp(&self, other: &Self) -> Option {
+ self.width.partial_cmp(&other.width)
+ }
+}
+
+impl Ord for LineWidth {
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.width.cmp(&other.width)
+ }
+}
+
+impl LineWidth {
+ pub fn get(&self) -> usize {
+ self.width
+ }
+
+ /// Creates a new `LineWidth` with the given tab size.
+ pub fn new(tab_size: TabSize) -> Self {
+ LineWidth {
+ width: 0,
+ column: 0,
+ tab_size,
+ }
+ }
+
+ fn update(mut self, chars: impl Iterator- ) -> Self {
+ let tab_size: usize = self.tab_size.into();
+ for c in chars {
+ match c {
+ '\t' => {
+ let tab_offset = tab_size - (self.column % tab_size);
+ self.width += tab_offset;
+ self.column += tab_offset;
+ }
+ '\n' | '\r' => {
+ self.width = 0;
+ self.column = 0;
+ }
+ _ => {
+ self.width += c.width().unwrap_or(0);
+ self.column += 1;
+ }
+ }
+ }
+ self
+ }
+
+ /// Adds the given text to the line width.
+ #[must_use]
+ pub fn add_str(self, text: &str) -> Self {
+ self.update(text.chars())
+ }
+
+ /// Adds the given character to the line width.
+ #[must_use]
+ pub fn add_char(self, c: char) -> Self {
+ self.update(std::iter::once(c))
+ }
+
+ /// Adds the given width to the line width.
+ /// Also adds the given width to the column.
+ /// It is generally better to use [`LineWidth::add_str`] or [`LineWidth::add_char`].
+ /// The width and column should be the same for the corresponding text.
+ /// Currently, this is only used to add spaces.
+ #[must_use]
+ pub fn add_width(mut self, width: usize) -> Self {
+ self.width += width;
+ self.column += width;
+ self
+ }
+}
+
+impl PartialEq
for LineWidth {
+ fn eq(&self, other: &LineLength) -> bool {
+ self.width == other.0
+ }
+}
+
+impl PartialOrd for LineWidth {
+ fn partial_cmp(&self, other: &LineLength) -> Option {
+ self.width.partial_cmp(&other.0)
+ }
+}
+
+/// The size of a tab.
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, CacheKey)]
+#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
+pub struct TabSize(pub u8);
+
+impl Default for TabSize {
+ fn default() -> Self {
+ Self(4)
+ }
+}
+
+impl From for TabSize {
+ fn from(tab_size: u8) -> Self {
+ Self(tab_size)
+ }
+}
+
+impl From for usize {
+ fn from(tab_size: TabSize) -> Self {
+ tab_size.0 as usize
+ }
+}
diff --git a/crates/ruff/src/linter.rs b/crates/ruff/src/linter.rs
index a9810042c511c..bd8f713d87f92 100644
--- a/crates/ruff/src/linter.rs
+++ b/crates/ruff/src/linter.rs
@@ -77,7 +77,6 @@ pub fn check_path(
directives: &Directives,
settings: &Settings,
noqa: flags::Noqa,
- autofix: flags::Autofix,
) -> LinterResult<(Vec, Option)> {
// Aggregate all diagnostics.
let mut diagnostics = vec![];
@@ -89,7 +88,7 @@ pub fn check_path(
let use_doc_lines = settings.rules.enabled(Rule::DocLineTooLong);
let mut doc_lines = vec![];
if use_doc_lines {
- doc_lines.extend(doc_lines_from_tokens(&tokens, locator));
+ doc_lines.extend(doc_lines_from_tokens(&tokens));
}
// Run the token-based rules.
@@ -99,7 +98,7 @@ pub fn check_path(
.any(|rule_code| rule_code.lint_source().is_tokens())
{
let is_stub = is_python_stub_file(path);
- diagnostics.extend(check_tokens(locator, &tokens, settings, autofix, is_stub));
+ diagnostics.extend(check_tokens(locator, indexer, &tokens, settings, is_stub));
}
// Run the filesystem-based rules.
@@ -117,13 +116,8 @@ pub fn check_path(
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_logical_lines())
{
- #[cfg(feature = "logical_lines")]
diagnostics.extend(crate::checkers::logical_lines::check_logical_lines(
- &tokens,
- locator,
- stylist,
- settings,
- flags::Autofix::Enabled,
+ &tokens, locator, stylist, settings,
));
}
@@ -148,7 +142,6 @@ pub fn check_path(
indexer,
&directives.noqa_line_for,
settings,
- autofix,
noqa,
path,
package,
@@ -162,7 +155,6 @@ pub fn check_path(
&directives.isort,
settings,
stylist,
- autofix,
path,
package,
);
@@ -170,7 +162,7 @@ pub fn check_path(
diagnostics.extend(import_diagnostics);
}
if use_doc_lines {
- doc_lines.extend(doc_lines_from_ast(&python_ast));
+ doc_lines.extend(doc_lines_from_ast(&python_ast, locator));
}
}
Err(parse_error) => {
@@ -198,7 +190,7 @@ pub fn check_path(
.any(|rule_code| rule_code.lint_source().is_physical_lines())
{
diagnostics.extend(check_physical_lines(
- path, locator, stylist, indexer, &doc_lines, settings, autofix,
+ path, locator, stylist, indexer, &doc_lines, settings,
));
}
@@ -222,8 +214,8 @@ pub fn check_path(
locator,
indexer.comment_ranges(),
&directives.noqa_line_for,
+ error.is_none(),
settings,
- error.as_ref().map_or(autofix, |_| flags::Autofix::Disabled),
);
if noqa.into() {
for index in ignored.iter().rev() {
@@ -293,7 +285,6 @@ pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings
&directives,
settings,
flags::Noqa::Disabled,
- flags::Autofix::Disabled,
);
// Log any parse errors.
@@ -320,7 +311,6 @@ pub fn lint_only(
package: Option<&Path>,
settings: &Settings,
noqa: flags::Noqa,
- autofix: flags::Autofix,
) -> LinterResult<(Vec, Option)> {
// Tokenize once.
let tokens: Vec = ruff_rustpython::tokenize(contents);
@@ -353,7 +343,6 @@ pub fn lint_only(
&directives,
settings,
noqa,
- autofix,
);
result.map(|(diagnostics, imports)| {
@@ -444,7 +433,6 @@ pub fn lint_fix<'a>(
&directives,
settings,
noqa,
- flags::Autofix::Enabled,
);
if iterations == 0 {
@@ -509,8 +497,8 @@ fn collect_rule_codes(rules: impl IntoIterator- ) -> String {
#[allow(clippy::print_stderr)]
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[Diagnostic]) {
+ let codes = collect_rule_codes(diagnostics.iter().map(|diagnostic| diagnostic.kind.rule()));
if cfg!(debug_assertions) {
- let codes = collect_rule_codes(diagnostics.iter().map(|diagnostic| diagnostic.kind.rule()));
eprintln!(
"{}: Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
"debug error".red().bold(),
@@ -528,13 +516,14 @@ This indicates a bug in `{}`. If you could open an issue at:
{}/issues/new?title=%5BInfinite%20loop%5D
-...quoting the contents of `{}`, along with the `pyproject.toml` settings and executed command, we'd be very appreciative!
+...quoting the contents of `{}`, the rule codes {}, along with the `pyproject.toml` settings and executed command, we'd be very appreciative!
"#,
"error".red().bold(),
MAX_ITERATIONS,
CARGO_PKG_NAME,
CARGO_PKG_REPOSITORY,
fs::relativize_path(path),
+ codes
);
}
}
@@ -546,8 +535,8 @@ fn report_autofix_syntax_error(
error: &ParseError,
rules: impl IntoIterator
- ,
) {
+ let codes = collect_rule_codes(rules);
if cfg!(debug_assertions) {
- let codes = collect_rule_codes(rules);
eprintln!(
"{}: Autofix introduced a syntax error in `{}` with rule codes {}: {}\n---\n{}\n---",
"error".red().bold(),
@@ -565,12 +554,13 @@ This indicates a bug in `{}`. If you could open an issue at:
{}/issues/new?title=%5BAutofix%20error%5D
-...quoting the contents of `{}`, along with the `pyproject.toml` settings and executed command, we'd be very appreciative!
+...quoting the contents of `{}`, the rule codes {}, along with the `pyproject.toml` settings and executed command, we'd be very appreciative!
"#,
"error".red().bold(),
CARGO_PKG_NAME,
CARGO_PKG_REPOSITORY,
fs::relativize_path(path),
+ codes,
);
}
}
diff --git a/crates/ruff/src/logging.rs b/crates/ruff/src/logging.rs
index 1607508ead203..ede7c8f43bc0d 100644
--- a/crates/ruff/src/logging.rs
+++ b/crates/ruff/src/logging.rs
@@ -1,15 +1,17 @@
-use std::fmt::{Display, Formatter};
+use std::fmt::{Display, Formatter, Write};
use std::path::Path;
use std::sync::Mutex;
-use crate::fs;
use anyhow::Result;
use colored::Colorize;
use fern;
use log::Level;
use once_cell::sync::Lazy;
+use rustpython_parser::{ParseError, ParseErrorType};
+
use ruff_python_ast::source_code::SourceCode;
-use rustpython_parser::ParseError;
+
+use crate::fs;
pub(crate) static WARNINGS: Lazy
>> = Lazy::new(Mutex::default);
@@ -145,17 +147,89 @@ impl<'a> DisplayParseError<'a> {
impl Display for DisplayParseError<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
- let source_location = self.source_code.source_location(self.error.location);
+ let source_location = self.source_code.source_location(self.error.offset);
write!(
f,
"{header} {path}{colon}{row}{colon}{column}{colon} {inner}",
- header = "Failed to parse ".bold(),
+ header = "Failed to parse".bold(),
path = fs::relativize_path(Path::new(&self.error.source_path)).bold(),
row = source_location.row,
column = source_location.column,
colon = ":".cyan(),
- inner = &self.error.error
+ inner = &DisplayParseErrorType(&self.error.error)
+ )
+ }
+}
+
+pub(crate) struct DisplayParseErrorType<'a>(&'a ParseErrorType);
+
+impl<'a> DisplayParseErrorType<'a> {
+ pub(crate) fn new(error: &'a ParseErrorType) -> Self {
+ Self(error)
+ }
+}
+
+impl Display for DisplayParseErrorType<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ match self.0 {
+ ParseErrorType::Eof => write!(f, "Expected token but reached end of file."),
+ ParseErrorType::ExtraToken(ref tok) => write!(
+ f,
+ "Got extraneous token: {tok}",
+ tok = TruncateAtNewline(&tok)
+ ),
+ ParseErrorType::InvalidToken => write!(f, "Got invalid token"),
+ ParseErrorType::UnrecognizedToken(ref tok, ref expected) => {
+ if let Some(expected) = expected.as_ref() {
+ write!(
+ f,
+ "expected '{expected}', but got {tok}",
+ tok = TruncateAtNewline(&tok)
+ )
+ } else {
+ write!(f, "unexpected token {tok}", tok = TruncateAtNewline(&tok))
+ }
+ }
+ ParseErrorType::Lexical(ref error) => write!(f, "{error}"),
+ }
+ }
+}
+
+/// Truncates the display text before the first newline character to avoid line breaks.
+struct TruncateAtNewline<'a>(&'a dyn Display);
+
+impl Display for TruncateAtNewline<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ struct TruncateAdapter<'a> {
+ inner: &'a mut dyn Write,
+ after_new_line: bool,
+ }
+
+ impl Write for TruncateAdapter<'_> {
+ fn write_str(&mut self, s: &str) -> std::fmt::Result {
+ if self.after_new_line {
+ Ok(())
+ } else {
+ if let Some(end) = s.find(['\n', '\r']) {
+ self.inner.write_str(&s[..end])?;
+ self.inner.write_str("\u{23ce}...")?;
+ self.after_new_line = true;
+ Ok(())
+ } else {
+ self.inner.write_str(s)
+ }
+ }
+ }
+ }
+
+ write!(
+ TruncateAdapter {
+ inner: f,
+ after_new_line: false,
+ },
+ "{}",
+ self.0
)
}
}
diff --git a/crates/ruff/src/message/azure.rs b/crates/ruff/src/message/azure.rs
index f29a89ce9cbc6..d5119faca03aa 100644
--- a/crates/ruff/src/message/azure.rs
+++ b/crates/ruff/src/message/azure.rs
@@ -1,7 +1,9 @@
+use std::io::Write;
+
+use ruff_python_ast::source_code::SourceLocation;
+
use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
-use ruff_python_ast::source_code::{OneIndexed, SourceLocation};
-use std::io::Write;
/// Generate error logging commands for Azure Pipelines format.
/// See [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning)
@@ -19,10 +21,7 @@ impl Emitter for AzureEmitter {
let location = if context.is_jupyter_notebook(message.filename()) {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
- SourceLocation {
- row: OneIndexed::from_zero_indexed(0),
- column: OneIndexed::from_zero_indexed(0),
- }
+ SourceLocation::default()
} else {
message.compute_start_location()
};
@@ -45,9 +44,10 @@ impl Emitter for AzureEmitter {
#[cfg(test)]
mod tests {
+ use insta::assert_snapshot;
+
use crate::message::tests::{capture_emitter_output, create_messages};
use crate::message::AzureEmitter;
- use insta::assert_snapshot;
#[test]
fn output() {
diff --git a/crates/ruff/src/message/diff.rs b/crates/ruff/src/message/diff.rs
index 76b9ef2901ace..8bda1c0534d51 100644
--- a/crates/ruff/src/message/diff.rs
+++ b/crates/ruff/src/message/diff.rs
@@ -1,11 +1,15 @@
-use crate::message::Message;
+use std::fmt::{Display, Formatter};
+use std::num::NonZeroUsize;
+
use colored::{Color, ColoredString, Colorize, Styles};
-use ruff_diagnostics::Fix;
-use ruff_python_ast::source_code::{OneIndexed, SourceFile};
+use itertools::Itertools;
use ruff_text_size::{TextRange, TextSize};
use similar::{ChangeTag, TextDiff};
-use std::fmt::{Display, Formatter};
-use std::num::NonZeroUsize;
+
+use ruff_diagnostics::{Applicability, Fix};
+use ruff_python_ast::source_code::{OneIndexed, SourceFile};
+
+use crate::message::Message;
/// Renders a diff that shows the code fixes.
///
@@ -21,15 +25,11 @@ pub(super) struct Diff<'a> {
}
impl<'a> Diff<'a> {
- pub fn from_message(message: &'a Message) -> Option {
- if message.fix.is_empty() {
- None
- } else {
- Some(Diff {
- source_code: &message.file,
- fix: &message.fix,
- })
- }
+ pub(crate) fn from_message(message: &'a Message) -> Option {
+ message.fix.as_ref().map(|fix| Diff {
+ source_code: &message.file,
+ fix,
+ })
}
}
@@ -38,7 +38,12 @@ impl Display for Diff<'_> {
let mut output = String::with_capacity(self.source_code.source_text().len());
let mut last_end = TextSize::default();
- for edit in self.fix.edits() {
+ for edit in self
+ .fix
+ .edits()
+ .iter()
+ .sorted_unstable_by_key(|edit| edit.start())
+ {
output.push_str(
self.source_code
.slice(TextRange::new(last_end, edit.start())),
@@ -51,7 +56,13 @@ impl Display for Diff<'_> {
let diff = TextDiff::from_lines(self.source_code.source_text(), &output);
- writeln!(f, "{}", "ℹ Suggested fix".blue())?;
+ let message = match self.fix.applicability() {
+ Applicability::Automatic => "Fix",
+ Applicability::Suggested => "Suggested fix",
+ Applicability::Manual => "Possible fix",
+ Applicability::Unspecified => "Suggested fix", // For backwards compatibility, unspecified fixes are 'suggested'
+ };
+ writeln!(f, "ℹ {}", message.blue())?;
let (largest_old, largest_new) = diff
.ops()
diff --git a/crates/ruff/src/message/github.rs b/crates/ruff/src/message/github.rs
index 5b3552473dacc..23ddae5d6701d 100644
--- a/crates/ruff/src/message/github.rs
+++ b/crates/ruff/src/message/github.rs
@@ -1,8 +1,10 @@
+use std::io::Write;
+
+use ruff_python_ast::source_code::SourceLocation;
+
use crate::fs::relativize_path;
use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
-use ruff_python_ast::source_code::{OneIndexed, SourceLocation};
-use std::io::Write;
/// Generate error workflow command in GitHub Actions format.
/// See: [GitHub documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message)
@@ -21,10 +23,7 @@ impl Emitter for GithubEmitter {
let location = if context.is_jupyter_notebook(message.filename()) {
// We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback
- SourceLocation {
- row: OneIndexed::from_zero_indexed(0),
- column: OneIndexed::from_zero_indexed(0),
- }
+ SourceLocation::default()
} else {
source_location.clone()
};
@@ -60,9 +59,10 @@ impl Emitter for GithubEmitter {
#[cfg(test)]
mod tests {
+ use insta::assert_snapshot;
+
use crate::message::tests::{capture_emitter_output, create_messages};
use crate::message::GithubEmitter;
- use insta::assert_snapshot;
#[test]
fn output() {
diff --git a/crates/ruff/src/message/gitlab.rs b/crates/ruff/src/message/gitlab.rs
index 355bdf0dbdea2..8538245b391c8 100644
--- a/crates/ruff/src/message/gitlab.rs
+++ b/crates/ruff/src/message/gitlab.rs
@@ -1,14 +1,17 @@
-use crate::fs::{relativize_path, relativize_path_to};
-use crate::message::{Emitter, EmitterContext, Message};
-use crate::registry::AsRule;
-use ruff_python_ast::source_code::SourceLocation;
-use serde::ser::SerializeSeq;
-use serde::{Serialize, Serializer};
-use serde_json::json;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::io::Write;
+use serde::ser::SerializeSeq;
+use serde::{Serialize, Serializer};
+use serde_json::json;
+
+use ruff_python_ast::source_code::SourceLocation;
+
+use crate::fs::{relativize_path, relativize_path_to};
+use crate::message::{Emitter, EmitterContext, Message};
+use crate::registry::AsRule;
+
/// Generate JSON with violations in GitLab CI format
// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool
pub struct GitlabEmitter {
@@ -122,9 +125,10 @@ fn fingerprint(
#[cfg(test)]
mod tests {
+ use insta::assert_snapshot;
+
use crate::message::tests::{capture_emitter_output, create_messages};
use crate::message::GitlabEmitter;
- use insta::assert_snapshot;
#[test]
fn output() {
diff --git a/crates/ruff/src/message/grouped.rs b/crates/ruff/src/message/grouped.rs
index 4502f2e20c334..5f8bfa411dbf5 100644
--- a/crates/ruff/src/message/grouped.rs
+++ b/crates/ruff/src/message/grouped.rs
@@ -1,3 +1,11 @@
+use std::fmt::{Display, Formatter};
+use std::io::Write;
+use std::num::NonZeroUsize;
+
+use colored::Colorize;
+
+use ruff_python_ast::source_code::OneIndexed;
+
use crate::fs::relativize_path;
use crate::jupyter::JupyterIndex;
use crate::message::diff::calculate_print_width;
@@ -5,11 +13,6 @@ use crate::message::text::{MessageCodeFrame, RuleCodeAndBody};
use crate::message::{
group_messages_by_filename, Emitter, EmitterContext, Message, MessageWithLocation,
};
-use colored::Colorize;
-use ruff_python_ast::source_code::OneIndexed;
-use std::fmt::{Display, Formatter};
-use std::io::Write;
-use std::num::NonZeroUsize;
#[derive(Default)]
pub struct GroupedEmitter {
@@ -71,7 +74,12 @@ impl Emitter for GroupedEmitter {
}
)?;
}
- writeln!(writer)?;
+
+ // Print a blank line between files, unless we're showing the source, in which case
+ // we'll have already printed a blank line between messages.
+ if !self.show_source {
+ writeln!(writer)?;
+ }
}
Ok(())
@@ -125,7 +133,7 @@ impl Display for DisplayGroupedMessage<'_> {
self.column_length.get() - calculate_print_width(start_location.column).get()
),
code_and_body = RuleCodeAndBody {
- message_kind: &message.kind,
+ message,
show_fix_status: self.show_fix_status
},
)?;
@@ -133,11 +141,9 @@ impl Display for DisplayGroupedMessage<'_> {
if self.show_source {
use std::fmt::Write;
let mut padded = PadAdapter::new(f);
- write!(padded, "{}", MessageCodeFrame { message })?;
+ writeln!(padded, "{}", MessageCodeFrame { message })?;
}
- writeln!(f)?;
-
Ok(())
}
}
@@ -175,12 +181,21 @@ impl std::fmt::Write for PadAdapter<'_> {
#[cfg(test)]
mod tests {
+ use insta::assert_snapshot;
+
use crate::message::tests::{capture_emitter_output, create_messages};
use crate::message::GroupedEmitter;
- use insta::assert_snapshot;
#[test]
fn default() {
+ let mut emitter = GroupedEmitter::default();
+ let content = capture_emitter_output(&mut emitter, &create_messages());
+
+ assert_snapshot!(content);
+ }
+
+ #[test]
+ fn show_source() {
let mut emitter = GroupedEmitter::default().with_show_source(true);
let content = capture_emitter_output(&mut emitter, &create_messages());
diff --git a/crates/ruff/src/message/json.rs b/crates/ruff/src/message/json.rs
index 1e980f78b59f0..c3adda51ba3d8 100644
--- a/crates/ruff/src/message/json.rs
+++ b/crates/ruff/src/message/json.rs
@@ -1,11 +1,14 @@
-use crate::message::{Emitter, EmitterContext, Message};
-use crate::registry::AsRule;
-use ruff_diagnostics::Edit;
-use ruff_python_ast::source_code::{SourceCode, SourceLocation};
+use std::io::Write;
+
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
-use serde_json::{json, Value};
-use std::io::Write;
+use serde_json::json;
+
+use ruff_diagnostics::Edit;
+use ruff_python_ast::source_code::SourceCode;
+
+use crate::message::{Emitter, EmitterContext, Message};
+use crate::registry::AsRule;
#[derive(Default)]
pub struct JsonEmitter;
@@ -37,14 +40,13 @@ impl Serialize for ExpandedMessages<'_> {
for message in self.messages {
let source_code = message.file.to_source_code();
- let fix = if message.fix.is_empty() {
- None
- } else {
- Some(json!({
+ let fix = message.fix.as_ref().map(|fix| {
+ json!({
+ "applicability": fix.applicability(),
"message": message.kind.suggestion.as_deref(),
- "edits": &ExpandedEdits { edits: message.fix.edits(), source_code: &source_code },
- }))
- };
+ "edits": &ExpandedEdits { edits: fix.edits(), source_code: &source_code },
+ })
+ });
let start_location = source_code.source_location(message.start());
let end_location = source_code.source_location(message.end());
@@ -80,12 +82,10 @@ impl Serialize for ExpandedEdits<'_> {
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
for edit in self.edits {
- let start_location = self.source_code.source_location(edit.start());
- let end_location = self.source_code.source_location(edit.end());
let value = json!({
"content": edit.content().unwrap_or_default(),
- "location": to_zero_indexed_column(&start_location),
- "end_location": to_zero_indexed_column(&end_location)
+ "location": self.source_code.source_location(edit.start()),
+ "end_location": self.source_code.source_location(edit.end())
});
s.serialize_element(&value)?;
@@ -95,18 +95,12 @@ impl Serialize for ExpandedEdits<'_> {
}
}
-fn to_zero_indexed_column(location: &SourceLocation) -> Value {
- json!({
- "row": location.row,
- "column": location.column.to_zero_indexed()
- })
-}
-
#[cfg(test)]
mod tests {
+ use insta::assert_snapshot;
+
use crate::message::tests::{capture_emitter_output, create_messages};
use crate::message::JsonEmitter;
- use insta::assert_snapshot;
#[test]
fn output() {
diff --git a/crates/ruff/src/message/junit.rs b/crates/ruff/src/message/junit.rs
index 2ed8e6dd7f007..f910b7e6ed4b0 100644
--- a/crates/ruff/src/message/junit.rs
+++ b/crates/ruff/src/message/junit.rs
@@ -1,11 +1,14 @@
+use std::io::Write;
+use std::path::Path;
+
+use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
+
+use ruff_python_ast::source_code::SourceLocation;
+
use crate::message::{
group_messages_by_filename, Emitter, EmitterContext, Message, MessageWithLocation,
};
use crate::registry::AsRule;
-use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
-use ruff_python_ast::source_code::{OneIndexed, SourceLocation};
-use std::io::Write;
-use std::path::Path;
#[derive(Default)]
pub struct JunitEmitter;
@@ -19,52 +22,60 @@ impl Emitter for JunitEmitter {
) -> anyhow::Result<()> {
let mut report = Report::new("ruff");
- for (filename, messages) in group_messages_by_filename(messages) {
- let mut test_suite = TestSuite::new(filename);
+ if messages.is_empty() {
+ let mut test_suite = TestSuite::new("ruff");
test_suite
.extra
.insert("package".to_string(), "org.ruff".to_string());
+ let mut case = TestCase::new("No errors found", TestCaseStatus::success());
+ case.set_classname("ruff");
+ test_suite.add_test_case(case);
+ report.add_test_suite(test_suite);
+ } else {
+ for (filename, messages) in group_messages_by_filename(messages) {
+ let mut test_suite = TestSuite::new(filename);
+ test_suite
+ .extra
+ .insert("package".to_string(), "org.ruff".to_string());
- for message in messages {
- let MessageWithLocation {
- message,
- start_location,
- } = message;
- let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
- status.set_message(message.kind.body.clone());
- let location = if context.is_jupyter_notebook(message.filename()) {
- // We can't give a reasonable location for the structured formats,
- // so we show one that's clearly a fallback
- SourceLocation {
- row: OneIndexed::from_zero_indexed(0),
- column: OneIndexed::from_zero_indexed(0),
- }
- } else {
- start_location
- };
+ for message in messages {
+ let MessageWithLocation {
+ message,
+ start_location,
+ } = message;
+ let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
+ status.set_message(message.kind.body.clone());
+ let location = if context.is_jupyter_notebook(message.filename()) {
+ // We can't give a reasonable location for the structured formats,
+ // so we show one that's clearly a fallback
+ SourceLocation::default()
+ } else {
+ start_location
+ };
- status.set_description(format!(
- "line {row}, col {col}, {body}",
- row = location.row,
- col = location.column,
- body = message.kind.body
- ));
- let mut case = TestCase::new(
- format!("org.ruff.{}", message.kind.rule().noqa_code()),
- status,
- );
- let file_path = Path::new(filename);
- let file_stem = file_path.file_stem().unwrap().to_str().unwrap();
- let classname = file_path.parent().unwrap().join(file_stem);
- case.set_classname(classname.to_str().unwrap());
- case.extra
- .insert("line".to_string(), location.row.to_string());
- case.extra
- .insert("column".to_string(), location.column.to_string());
+ status.set_description(format!(
+ "line {row}, col {col}, {body}",
+ row = location.row,
+ col = location.column,
+ body = message.kind.body
+ ));
+ let mut case = TestCase::new(
+ format!("org.ruff.{}", message.kind.rule().noqa_code()),
+ status,
+ );
+ let file_path = Path::new(filename);
+ let file_stem = file_path.file_stem().unwrap().to_str().unwrap();
+ let classname = file_path.parent().unwrap().join(file_stem);
+ case.set_classname(classname.to_str().unwrap());
+ case.extra
+ .insert("line".to_string(), location.row.to_string());
+ case.extra
+ .insert("column".to_string(), location.column.to_string());
- test_suite.add_test_case(case);
+ test_suite.add_test_case(case);
+ }
+ report.add_test_suite(test_suite);
}
- report.add_test_suite(test_suite);
}
report.serialize(writer)?;
@@ -75,9 +86,10 @@ impl Emitter for JunitEmitter {
#[cfg(test)]
mod tests {
+ use insta::assert_snapshot;
+
use crate::message::tests::{capture_emitter_output, create_messages};
use crate::message::JunitEmitter;
- use insta::assert_snapshot;
#[test]
fn output() {
diff --git a/crates/ruff/src/message/mod.rs b/crates/ruff/src/message/mod.rs
index 41c1776a884ba..072bf79fae918 100644
--- a/crates/ruff/src/message/mod.rs
+++ b/crates/ruff/src/message/mod.rs
@@ -1,20 +1,11 @@
-mod azure;
-mod diff;
-mod github;
-mod gitlab;
-mod grouped;
-mod json;
-mod junit;
-mod pylint;
-mod text;
-
-use ruff_text_size::{TextRange, TextSize};
-use rustc_hash::FxHashMap;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::io::Write;
use std::ops::Deref;
+use ruff_text_size::{TextRange, TextSize};
+use rustc_hash::FxHashMap;
+
pub use azure::AzureEmitter;
pub use github::GithubEmitter;
pub use gitlab::GitlabEmitter;
@@ -22,18 +13,27 @@ pub use grouped::GroupedEmitter;
pub use json::JsonEmitter;
pub use junit::JunitEmitter;
pub use pylint::PylintEmitter;
+use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
+use ruff_python_ast::source_code::{SourceFile, SourceLocation};
pub use text::TextEmitter;
use crate::jupyter::JupyterIndex;
-use crate::registry::AsRule;
-use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
-use ruff_python_ast::source_code::{SourceFile, SourceLocation};
+
+mod azure;
+mod diff;
+mod github;
+mod gitlab;
+mod grouped;
+mod json;
+mod junit;
+mod pylint;
+mod text;
#[derive(Debug, PartialEq, Eq)]
pub struct Message {
pub kind: DiagnosticKind,
pub range: TextRange,
- pub fix: Fix,
+ pub fix: Option,
pub file: SourceFile,
pub noqa_offset: TextSize,
}
@@ -76,11 +76,7 @@ impl Message {
impl Ord for Message {
fn cmp(&self, other: &Self) -> Ordering {
- (self.filename(), self.start(), self.kind.rule()).cmp(&(
- other.filename(),
- other.start(),
- other.kind.rule(),
- ))
+ (&self.file, self.start()).cmp(&(&other.file, other.start()))
}
}
@@ -152,13 +148,14 @@ impl<'a> EmitterContext<'a> {
#[cfg(test)]
mod tests {
- use crate::message::{Emitter, EmitterContext, Message};
- use crate::rules::pyflakes::rules::{UndefinedName, UnusedImport, UnusedVariable};
- use ruff_diagnostics::{Diagnostic, Edit, Fix};
- use ruff_python_ast::source_code::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use rustc_hash::FxHashMap;
+ use ruff_diagnostics::{Diagnostic, DiagnosticKind, Edit, Fix};
+ use ruff_python_ast::source_code::SourceFileBuilder;
+
+ use crate::message::{Emitter, EmitterContext, Message};
+
pub(super) fn create_messages() -> Vec {
let fib = r#"import os
@@ -175,32 +172,40 @@ def fibonacci(n):
"#;
let unused_import = Diagnostic::new(
- UnusedImport {
- name: "os".to_string(),
- context: None,
- multiple: false,
+ DiagnosticKind {
+ name: "UnusedImport".to_string(),
+ body: "`os` imported but unused".to_string(),
+ suggestion: Some("Remove unused import: `os`".to_string()),
},
TextRange::new(TextSize::from(7), TextSize::from(9)),
- );
+ )
+ .with_fix(Fix::suggested(Edit::range_deletion(TextRange::new(
+ TextSize::from(0),
+ TextSize::from(10),
+ ))));
let fib_source = SourceFileBuilder::new("fib.py", fib).finish();
let unused_variable = Diagnostic::new(
- UnusedVariable {
- name: "x".to_string(),
+ DiagnosticKind {
+ name: "UnusedVariable".to_string(),
+ body: "Local variable `x` is assigned to but never used".to_string(),
+ suggestion: Some("Remove assignment to unused variable `x`".to_string()),
},
TextRange::new(TextSize::from(94), TextSize::from(95)),
)
- .with_fix(Fix::new(vec![Edit::deletion(
+ .with_fix(Fix::suggested(Edit::deletion(
TextSize::from(94),
TextSize::from(99),
- )]));
+ )));
let file_2 = r#"if a == 1: pass"#;
let undefined_name = Diagnostic::new(
- UndefinedName {
- name: "a".to_string(),
+ DiagnosticKind {
+ name: "UndefinedName".to_string(),
+ body: "Undefined name `a`".to_string(),
+ suggestion: None,
},
TextRange::new(TextSize::from(3), TextSize::from(4)),
);
diff --git a/crates/ruff/src/message/pylint.rs b/crates/ruff/src/message/pylint.rs
index d456fa6e1083c..edede90422f16 100644
--- a/crates/ruff/src/message/pylint.rs
+++ b/crates/ruff/src/message/pylint.rs
@@ -1,8 +1,10 @@
+use std::io::Write;
+
+use ruff_python_ast::source_code::OneIndexed;
+
use crate::fs::relativize_path;
use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
-use ruff_python_ast::source_code::OneIndexed;
-use std::io::Write;
/// Generate violations in Pylint format.
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
@@ -40,9 +42,10 @@ impl Emitter for PylintEmitter {
#[cfg(test)]
mod tests {
+ use insta::assert_snapshot;
+
use crate::message::tests::{capture_emitter_output, create_messages};
use crate::message::PylintEmitter;
- use insta::assert_snapshot;
#[test]
fn output() {
diff --git a/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__default.snap b/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__default.snap
index b8f57564005dd..00d89c9ceb6b0 100644
--- a/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__default.snap
+++ b/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__default.snap
@@ -4,30 +4,9 @@ expression: content
---
fib.py:
1:8 F401 `os` imported but unused
- |
- 1 | import os
- | ^^ F401
- |
- = help: Remove unused import: `os`
-
6:5 F841 Local variable `x` is assigned to but never used
- |
- 6 | def fibonacci(n):
- 7 | """Compute the nth number in the Fibonacci sequence."""
- 8 | x = 1
- | ^ F841
- 9 | if n == 0:
- 10 | return 0
- |
- = help: Remove assignment to unused variable `x`
-
undef.py:
1:4 F821 Undefined name `a`
- |
- 1 | if a == 1: pass
- | ^ F821
- |
-
diff --git a/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__fix_status.snap b/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__fix_status.snap
index ebea79e6400b7..5da6d7cfaa447 100644
--- a/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__fix_status.snap
+++ b/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__fix_status.snap
@@ -9,7 +9,7 @@ fib.py:
| ^^ F401
|
= help: Remove unused import: `os`
-
+
6:5 F841 [*] Local variable `x` is assigned to but never used
|
6 | def fibonacci(n):
@@ -20,14 +20,12 @@ fib.py:
10 | return 0
|
= help: Remove assignment to unused variable `x`
-
-
+
undef.py:
1:4 F821 Undefined name `a`
|
1 | if a == 1: pass
| ^ F821
|
-
-
+
diff --git a/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__show_source.snap b/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__show_source.snap
new file mode 100644
index 0000000000000..957c3d877b37d
--- /dev/null
+++ b/crates/ruff/src/message/snapshots/ruff__message__grouped__tests__show_source.snap
@@ -0,0 +1,31 @@
+---
+source: crates/ruff/src/message/grouped.rs
+expression: content
+---
+fib.py:
+ 1:8 F401 `os` imported but unused
+ |
+ 1 | import os
+ | ^^ F401
+ |
+ = help: Remove unused import: `os`
+
+ 6:5 F841 Local variable `x` is assigned to but never used
+ |
+ 6 | def fibonacci(n):
+ 7 | """Compute the nth number in the Fibonacci sequence."""
+ 8 | x = 1
+ | ^ F841
+ 9 | if n == 0:
+ 10 | return 0
+ |
+ = help: Remove assignment to unused variable `x`
+
+undef.py:
+ 1:4 F821 Undefined name `a`
+ |
+ 1 | if a == 1: pass
+ | ^ F821
+ |
+
+
diff --git a/crates/ruff/src/message/snapshots/ruff__message__json__tests__output.snap b/crates/ruff/src/message/snapshots/ruff__message__json__tests__output.snap
index 2aa4807b1033f..e9272931d6a33 100644
--- a/crates/ruff/src/message/snapshots/ruff__message__json__tests__output.snap
+++ b/crates/ruff/src/message/snapshots/ruff__message__json__tests__output.snap
@@ -6,7 +6,23 @@ expression: content
{
"code": "F401",
"message": "`os` imported but unused",
- "fix": null,
+ "fix": {
+ "applicability": "Suggested",
+ "message": "Remove unused import: `os`",
+ "edits": [
+ {
+ "content": "",
+ "location": {
+ "row": 1,
+ "column": 1
+ },
+ "end_location": {
+ "row": 2,
+ "column": 1
+ }
+ }
+ ]
+ },
"location": {
"row": 1,
"column": 8
@@ -22,17 +38,18 @@ expression: content
"code": "F841",
"message": "Local variable `x` is assigned to but never used",
"fix": {
+ "applicability": "Suggested",
"message": "Remove assignment to unused variable `x`",
"edits": [
{
"content": "",
"location": {
"row": 6,
- "column": 4
+ "column": 5
},
"end_location": {
"row": 6,
- "column": 9
+ "column": 10
}
}
]
diff --git a/crates/ruff/src/message/text.rs b/crates/ruff/src/message/text.rs
index 547cb64e77a3c..d467926a67d61 100644
--- a/crates/ruff/src/message/text.rs
+++ b/crates/ruff/src/message/text.rs
@@ -1,23 +1,29 @@
-use crate::fs::relativize_path;
-use crate::message::diff::Diff;
-use crate::message::{Emitter, EmitterContext, Message};
-use crate::registry::AsRule;
+use std::borrow::Cow;
+use std::fmt::{Display, Formatter};
+use std::io::Write;
+
use annotate_snippets::display_list::{DisplayList, FormatOptions};
use annotate_snippets::snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation};
use bitflags::bitflags;
use colored::Colorize;
-use ruff_diagnostics::DiagnosticKind;
-use ruff_python_ast::source_code::{OneIndexed, SourceLocation};
use ruff_text_size::{TextRange, TextSize};
-use std::borrow::Cow;
-use std::fmt::{Display, Formatter};
-use std::io::Write;
+
+use ruff_python_ast::source_code::{OneIndexed, SourceLocation};
+
+use crate::fs::relativize_path;
+use crate::line_width::{LineWidth, TabSize};
+use crate::message::diff::Diff;
+use crate::message::{Emitter, EmitterContext, Message};
+use crate::registry::AsRule;
bitflags! {
#[derive(Default)]
struct EmitterFlags: u8 {
+ /// Whether to show the fix status of a diagnostic.
const SHOW_FIX_STATUS = 0b0000_0001;
- const SHOW_FIX = 0b0000_0010;
+ /// Whether to show the diff of a fix, for diagnostics that have a fix.
+ const SHOW_FIX_DIFF = 0b0000_0010;
+ /// Whether to show the source code of a diagnostic.
const SHOW_SOURCE = 0b0000_0100;
}
}
@@ -36,8 +42,8 @@ impl TextEmitter {
}
#[must_use]
- pub fn with_show_fix(mut self, show_fix: bool) -> Self {
- self.flags.set(EmitterFlags::SHOW_FIX, show_fix);
+ pub fn with_show_fix_diff(mut self, show_fix_diff: bool) -> Self {
+ self.flags.set(EmitterFlags::SHOW_FIX_DIFF, show_fix_diff);
self
}
@@ -93,7 +99,7 @@ impl Emitter for TextEmitter {
col = diagnostic_location.column,
sep = ":".cyan(),
code_and_body = RuleCodeAndBody {
- message_kind: &message.kind,
+ message,
show_fix_status: self.flags.contains(EmitterFlags::SHOW_FIX_STATUS)
}
)?;
@@ -102,7 +108,7 @@ impl Emitter for TextEmitter {
writeln!(writer, "{}", MessageCodeFrame { message })?;
}
- if self.flags.contains(EmitterFlags::SHOW_FIX) {
+ if self.flags.contains(EmitterFlags::SHOW_FIX_DIFF) {
if let Some(diff) = Diff::from_message(message) {
writeln!(writer, "{diff}")?;
}
@@ -114,45 +120,35 @@ impl Emitter for TextEmitter {
}
pub(super) struct RuleCodeAndBody<'a> {
- pub message_kind: &'a DiagnosticKind,
- pub show_fix_status: bool,
+ pub(crate) message: &'a Message,
+ pub(crate) show_fix_status: bool,
}
impl Display for RuleCodeAndBody<'_> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- if self.show_fix_status && self.message_kind.fixable {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ let kind = &self.message.kind;
+
+ if self.show_fix_status && self.message.fix.is_some() {
write!(
f,
"{code} {autofix}{body}",
- code = self
- .message_kind
- .rule()
- .noqa_code()
- .to_string()
- .red()
- .bold(),
+ code = kind.rule().noqa_code().to_string().red().bold(),
autofix = format_args!("[{}] ", "*".cyan()),
- body = self.message_kind.body,
+ body = kind.body,
)
} else {
write!(
f,
"{code} {body}",
- code = self
- .message_kind
- .rule()
- .noqa_code()
- .to_string()
- .red()
- .bold(),
- body = self.message_kind.body,
+ code = kind.rule().noqa_code().to_string().red().bold(),
+ body = kind.body,
)
}
}
}
pub(super) struct MessageCodeFrame<'a> {
- pub message: &'a Message,
+ pub(crate) message: &'a Message,
}
impl Display for MessageCodeFrame<'_> {
@@ -245,36 +241,35 @@ impl Display for MessageCodeFrame<'_> {
}
fn replace_whitespace(source: &str, annotation_range: TextRange) -> SourceCode {
- static TAB_SIZE: TextSize = TextSize::new(4);
+ static TAB_SIZE: TabSize = TabSize(4); // TODO(jonathan): use `tab-size`
let mut result = String::new();
let mut last_end = 0;
let mut range = annotation_range;
- let mut column = 0;
+ let mut line_width = LineWidth::new(TAB_SIZE);
- for (index, m) in source.match_indices(['\t', '\n', '\r']) {
- match m {
- "\t" => {
- let tab_width = TAB_SIZE - TextSize::new(column % 4);
+ for (index, c) in source.char_indices() {
+ let old_width = line_width.get();
+ line_width = line_width.add_char(c);
- if index < usize::from(annotation_range.start()) {
- range += tab_width - TextSize::new(1);
- } else if index < usize::from(annotation_range.end()) {
- range = range.add_end(tab_width - TextSize::new(1));
- }
+ if matches!(c, '\t') {
+ // SAFETY: The difference is a value in the range [1..TAB_SIZE] which is guaranteed to be less than `u32`.
+ #[allow(clippy::cast_possible_truncation)]
+ let tab_width = (line_width.get() - old_width) as u32;
- result.push_str(&source[last_end..index]);
+ if index < usize::from(annotation_range.start()) {
+ range += TextSize::new(tab_width - 1);
+ } else if index < usize::from(annotation_range.end()) {
+ range = range.add_end(TextSize::new(tab_width - 1));
+ }
- for _ in 0..u32::from(tab_width) {
- result.push(' ');
- }
+ result.push_str(&source[last_end..index]);
- last_end = index + 1;
+ for _ in 0..tab_width {
+ result.push(' ');
}
- "\n" | "\r" => {
- column = 0;
- }
- _ => unreachable!(),
+
+ last_end = index + 1;
}
}
@@ -300,9 +295,10 @@ struct SourceCode<'a> {
#[cfg(test)]
mod tests {
+ use insta::assert_snapshot;
+
use crate::message::tests::{capture_emitter_output, create_messages};
use crate::message::TextEmitter;
- use insta::assert_snapshot;
#[test]
fn default() {
diff --git a/crates/ruff/src/noqa.rs b/crates/ruff/src/noqa.rs
index 4434837f01873..62fcdf4b77142 100644
--- a/crates/ruff/src/noqa.rs
+++ b/crates/ruff/src/noqa.rs
@@ -11,7 +11,7 @@ use regex::Regex;
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_diagnostics::Diagnostic;
-use ruff_python_ast::newlines::LineEnding;
+use ruff_newlines::LineEnding;
use ruff_python_ast::source_code::Locator;
use crate::codes::NoqaCode;
@@ -24,10 +24,9 @@ static NOQA_LINE_REGEX: Lazy = Lazy::new(|| {
)
.unwrap()
});
-static SPLIT_COMMA_REGEX: Lazy = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
#[derive(Debug)]
-pub enum Directive<'a> {
+pub(crate) enum Directive<'a> {
None,
// (leading spaces, noqa_range, trailing_spaces)
All(TextSize, TextRange, TextSize),
@@ -36,7 +35,7 @@ pub enum Directive<'a> {
}
/// Extract the noqa `Directive` from a line of Python source code.
-pub fn extract_noqa_directive<'a>(range: TextRange, locator: &'a Locator) -> Directive<'a> {
+pub(crate) fn extract_noqa_directive<'a>(range: TextRange, locator: &'a Locator) -> Directive<'a> {
let text = &locator.contents()[range];
match NOQA_LINE_REGEX.captures(text) {
Some(caps) => match (
@@ -46,12 +45,12 @@ pub fn extract_noqa_directive<'a>(range: TextRange, locator: &'a Locator) -> Dir
caps.name("trailing_spaces"),
) {
(Some(leading_spaces), Some(noqa), Some(codes), Some(trailing_spaces)) => {
- let codes: Vec<&str> = SPLIT_COMMA_REGEX
- .split(codes.as_str().trim())
+ let codes = codes
+ .as_str()
+ .split(|c: char| c.is_whitespace() || c == ',')
.map(str::trim)
.filter(|code| !code.is_empty())
- .collect();
-
+ .collect_vec();
let start = range.start() + TextSize::try_from(noqa.start()).unwrap();
if codes.is_empty() {
#[allow(deprecated)]
@@ -105,11 +104,11 @@ fn parse_file_exemption(line: &str) -> ParsedExemption {
if remainder.is_empty() {
return ParsedExemption::All;
} else if let Some(codes) = remainder.strip_prefix(':') {
- let codes: Vec<&str> = SPLIT_COMMA_REGEX
- .split(codes.trim())
+ let codes = codes
+ .split(|c: char| c.is_whitespace() || c == ',')
.map(str::trim)
.filter(|code| !code.is_empty())
- .collect();
+ .collect_vec();
if codes.is_empty() {
warn!("Expected rule codes on `noqa` directive: \"{line}\"");
}
@@ -123,7 +122,7 @@ fn parse_file_exemption(line: &str) -> ParsedExemption {
/// Returns `true` if the string list of `codes` includes `code` (or an alias
/// thereof).
-pub fn includes(needle: Rule, haystack: &[&str]) -> bool {
+pub(crate) fn includes(needle: Rule, haystack: &[&str]) -> bool {
let needle = needle.noqa_code();
haystack
.iter()
@@ -131,7 +130,7 @@ pub fn includes(needle: Rule, haystack: &[&str]) -> bool {
}
/// Returns `true` if the given [`Rule`] is ignored at the specified `lineno`.
-pub fn rule_is_ignored(
+pub(crate) fn rule_is_ignored(
code: Rule,
offset: TextSize,
noqa_line_for: &NoqaMapping,
@@ -146,7 +145,7 @@ pub fn rule_is_ignored(
}
}
-pub enum FileExemption {
+pub(crate) enum FileExemption {
None,
All,
Codes(Vec),
@@ -154,7 +153,7 @@ pub enum FileExemption {
/// Extract the [`FileExemption`] for a given Python source file, enumerating any rules that are
/// globally ignored within the file.
-pub fn file_exemption(contents: &str, comment_ranges: &[TextRange]) -> FileExemption {
+pub(crate) fn file_exemption(contents: &str, comment_ranges: &[TextRange]) -> FileExemption {
let mut exempt_codes: Vec = vec![];
for range in comment_ranges {
@@ -184,7 +183,7 @@ pub fn file_exemption(contents: &str, comment_ranges: &[TextRange]) -> FileExemp
}
/// Adds noqa comments to suppress all diagnostics of a file.
-pub fn add_noqa(
+pub(crate) fn add_noqa(
path: &Path,
diagnostics: &[Diagnostic],
locator: &Locator,
@@ -368,9 +367,9 @@ fn push_codes(str: &mut String, codes: impl Iterator- ) {
#[derive(Debug)]
pub(crate) struct NoqaDirectiveLine<'a> {
// The range of the text line for which the noqa directive applies.
- pub range: TextRange,
- pub directive: Directive<'a>,
- pub matches: Vec
,
+ pub(crate) range: TextRange,
+ pub(crate) directive: Directive<'a>,
+ pub(crate) matches: Vec,
}
#[derive(Debug, Default)]
@@ -379,7 +378,10 @@ pub(crate) struct NoqaDirectives<'a> {
}
impl<'a> NoqaDirectives<'a> {
- pub fn from_commented_ranges(comment_ranges: &[TextRange], locator: &'a Locator<'a>) -> Self {
+ pub(crate) fn from_commented_ranges(
+ comment_ranges: &[TextRange],
+ locator: &'a Locator<'a>,
+ ) -> Self {
let mut directives = Vec::new();
for comment_range in comment_ranges {
@@ -409,11 +411,11 @@ impl<'a> NoqaDirectives<'a> {
Self { inner: directives }
}
- pub fn find_line_with_directive(&self, offset: TextSize) -> Option<&NoqaDirectiveLine> {
+ pub(crate) fn find_line_with_directive(&self, offset: TextSize) -> Option<&NoqaDirectiveLine> {
self.find_line_index(offset).map(|index| &self.inner[index])
}
- pub fn find_line_with_directive_mut(
+ pub(crate) fn find_line_with_directive_mut(
&mut self,
offset: TextSize,
) -> Option<&mut NoqaDirectiveLine<'a>> {
@@ -438,7 +440,7 @@ impl<'a> NoqaDirectives<'a> {
.ok()
}
- pub fn lines(&self) -> &[NoqaDirectiveLine] {
+ pub(crate) fn lines(&self) -> &[NoqaDirectiveLine] {
&self.inner
}
}
@@ -512,7 +514,7 @@ mod tests {
use ruff_text_size::{TextRange, TextSize};
use ruff_diagnostics::Diagnostic;
- use ruff_python_ast::newlines::LineEnding;
+ use ruff_newlines::LineEnding;
use ruff_python_ast::source_code::Locator;
use crate::noqa::{add_noqa_inner, NoqaMapping, NOQA_LINE_REGEX};
diff --git a/crates/ruff/src/pyproject_toml.rs b/crates/ruff/src/pyproject_toml.rs
new file mode 100644
index 0000000000000..21d3c04d1afe7
--- /dev/null
+++ b/crates/ruff/src/pyproject_toml.rs
@@ -0,0 +1,62 @@
+use anyhow::Result;
+use pyproject_toml::{BuildSystem, Project};
+use ruff_text_size::{TextRange, TextSize};
+use serde::{Deserialize, Serialize};
+
+use ruff_diagnostics::Diagnostic;
+use ruff_python_ast::source_code::SourceFile;
+
+use crate::message::Message;
+use crate::rules::ruff::rules::InvalidPyprojectToml;
+use crate::IOError;
+
+/// Unlike [`pyproject_toml::PyProjectToml`], in our case `build_system` is also optional
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
+#[serde(rename_all = "kebab-case")]
+struct PyProjectToml {
+ /// Build-related data
+ build_system: Option,
+ /// Project metadata
+ project: Option,
+}
+
+pub fn lint_pyproject_toml(source_file: SourceFile) -> Result> {
+ let err = match toml::from_str::(source_file.source_text()) {
+ Ok(_) => return Ok(Vec::default()),
+ Err(err) => err,
+ };
+
+ let range = match err.span() {
+ // This is bad but sometimes toml and/or serde just don't give us spans
+ // TODO(konstin,micha): https://github.com/charliermarsh/ruff/issues/4571
+ None => TextRange::default(),
+ Some(range) => {
+ let Ok(end) = TextSize::try_from(range.end) else {
+ let diagnostic = Diagnostic::new(
+ IOError {
+ message: "pyproject.toml is larger than 4GB".to_string(),
+ },
+ TextRange::default(),
+ );
+ return Ok(vec![Message::from_diagnostic(
+ diagnostic,
+ source_file,
+ TextSize::default(),
+ )]);
+ };
+ TextRange::new(
+ // start <= end, so if end < 4GB follows start < 4GB
+ TextSize::try_from(range.start).unwrap(),
+ end,
+ )
+ }
+ };
+
+ let toml_err = err.message().to_string();
+ let diagnostic = Diagnostic::new(InvalidPyprojectToml { message: toml_err }, range);
+ Ok(vec![Message::from_diagnostic(
+ diagnostic,
+ source_file,
+ TextSize::default(),
+ )])
+}
diff --git a/crates/ruff/src/registry.rs b/crates/ruff/src/registry.rs
index cc342b9d0eab7..c57390c0598d6 100644
--- a/crates/ruff/src/registry.rs
+++ b/crates/ruff/src/registry.rs
@@ -1,675 +1,15 @@
-//! Registry of all [`Rule`] implementations.
+//! Remnant of the registry of all [`Rule`] implementations, now it's reexporting from codes.rs
+//! with some helper symbols
-mod rule_set;
-
-use strum_macros::{AsRefStr, EnumIter};
+use strum_macros::EnumIter;
-use ruff_diagnostics::Violation;
+pub use codes::Rule;
use ruff_macros::RuleNamespace;
+pub use rule_set::{RuleSet, RuleSetIterator};
use crate::codes::{self, RuleCodePrefix};
-use crate::rules;
-pub use rule_set::{RuleSet, RuleSetIterator};
-ruff_macros::register_rules!(
- // pycodestyle errors
- rules::pycodestyle::rules::MixedSpacesAndTabs,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::IndentationWithInvalidMultiple,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::NoIndentedBlock,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::UnexpectedIndentation,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::IndentationWithInvalidMultipleComment,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::NoIndentedBlockComment,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::UnexpectedIndentationComment,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::OverIndented,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::WhitespaceAfterOpenBracket,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::WhitespaceBeforeCloseBracket,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::WhitespaceBeforePunctuation,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MultipleSpacesBeforeOperator,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MultipleSpacesAfterOperator,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::TabBeforeOperator,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::TabAfterOperator,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::TooFewSpacesBeforeInlineComment,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::NoSpaceAfterInlineComment,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::NoSpaceAfterBlockComment,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MultipleLeadingHashesForBlockComment,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MultipleSpacesAfterKeyword,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MissingWhitespace,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MissingWhitespaceAfterKeyword,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MultipleSpacesBeforeKeyword,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundOperator,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundArithmeticOperator,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundBitwiseOrShiftOperator,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundModuloOperator,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::TabAfterKeyword,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::UnexpectedSpacesAroundKeywordParameterEquals,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::MissingWhitespaceAroundParameterEquals,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::WhitespaceBeforeParameters,
- #[cfg(feature = "logical_lines")]
- rules::pycodestyle::rules::logical_lines::TabBeforeKeyword,
- rules::pycodestyle::rules::MultipleImportsOnOneLine,
- rules::pycodestyle::rules::ModuleImportNotAtTopOfFile,
- rules::pycodestyle::rules::LineTooLong,
- rules::pycodestyle::rules::MultipleStatementsOnOneLineColon,
- rules::pycodestyle::rules::MultipleStatementsOnOneLineSemicolon,
- rules::pycodestyle::rules::UselessSemicolon,
- rules::pycodestyle::rules::NoneComparison,
- rules::pycodestyle::rules::TrueFalseComparison,
- rules::pycodestyle::rules::NotInTest,
- rules::pycodestyle::rules::NotIsTest,
- rules::pycodestyle::rules::TypeComparison,
- rules::pycodestyle::rules::BareExcept,
- rules::pycodestyle::rules::LambdaAssignment,
- rules::pycodestyle::rules::AmbiguousVariableName,
- rules::pycodestyle::rules::AmbiguousClassName,
- rules::pycodestyle::rules::AmbiguousFunctionName,
- rules::pycodestyle::rules::IOError,
- rules::pycodestyle::rules::SyntaxError,
- // pycodestyle warnings
- rules::pycodestyle::rules::TabIndentation,
- rules::pycodestyle::rules::TrailingWhitespace,
- rules::pycodestyle::rules::MissingNewlineAtEndOfFile,
- rules::pycodestyle::rules::BlankLineWithWhitespace,
- rules::pycodestyle::rules::DocLineTooLong,
- rules::pycodestyle::rules::InvalidEscapeSequence,
- // pyflakes
- rules::pyflakes::rules::UnusedImport,
- rules::pyflakes::rules::ImportShadowedByLoopVar,
- rules::pyflakes::rules::UndefinedLocalWithImportStar,
- rules::pyflakes::rules::LateFutureImport,
- rules::pyflakes::rules::UndefinedLocalWithImportStarUsage,
- rules::pyflakes::rules::UndefinedLocalWithNestedImportStarUsage,
- rules::pyflakes::rules::FutureFeatureNotDefined,
- rules::pyflakes::rules::PercentFormatInvalidFormat,
- rules::pyflakes::rules::PercentFormatExpectedMapping,
- rules::pyflakes::rules::PercentFormatExpectedSequence,
- rules::pyflakes::rules::PercentFormatExtraNamedArguments,
- rules::pyflakes::rules::PercentFormatMissingArgument,
- rules::pyflakes::rules::PercentFormatMixedPositionalAndNamed,
- rules::pyflakes::rules::PercentFormatPositionalCountMismatch,
- rules::pyflakes::rules::PercentFormatStarRequiresSequence,
- rules::pyflakes::rules::PercentFormatUnsupportedFormatCharacter,
- rules::pyflakes::rules::StringDotFormatInvalidFormat,
- rules::pyflakes::rules::StringDotFormatExtraNamedArguments,
- rules::pyflakes::rules::StringDotFormatExtraPositionalArguments,
- rules::pyflakes::rules::StringDotFormatMissingArguments,
- rules::pyflakes::rules::StringDotFormatMixingAutomatic,
- rules::pyflakes::rules::FStringMissingPlaceholders,
- rules::pyflakes::rules::MultiValueRepeatedKeyLiteral,
- rules::pyflakes::rules::MultiValueRepeatedKeyVariable,
- rules::pyflakes::rules::ExpressionsInStarAssignment,
- rules::pyflakes::rules::MultipleStarredExpressions,
- rules::pyflakes::rules::AssertTuple,
- rules::pyflakes::rules::IsLiteral,
- rules::pyflakes::rules::InvalidPrintSyntax,
- rules::pyflakes::rules::IfTuple,
- rules::pyflakes::rules::BreakOutsideLoop,
- rules::pyflakes::rules::ContinueOutsideLoop,
- rules::pyflakes::rules::YieldOutsideFunction,
- rules::pyflakes::rules::ReturnOutsideFunction,
- rules::pyflakes::rules::DefaultExceptNotLast,
- rules::pyflakes::rules::ForwardAnnotationSyntaxError,
- rules::pyflakes::rules::RedefinedWhileUnused,
- rules::pyflakes::rules::UndefinedName,
- rules::pyflakes::rules::UndefinedExport,
- rules::pyflakes::rules::UndefinedLocal,
- rules::pyflakes::rules::UnusedVariable,
- rules::pyflakes::rules::UnusedAnnotation,
- rules::pyflakes::rules::RaiseNotImplemented,
- // pylint
- rules::pylint::rules::AssertOnStringLiteral,
- rules::pylint::rules::UselessReturn,
- rules::pylint::rules::YieldInInit,
- rules::pylint::rules::InvalidAllObject,
- rules::pylint::rules::InvalidAllFormat,
- rules::pylint::rules::InvalidEnvvarDefault,
- rules::pylint::rules::InvalidEnvvarValue,
- rules::pylint::rules::BadStringFormatType,
- rules::pylint::rules::BidirectionalUnicode,
- rules::pylint::rules::BinaryOpException,
- rules::pylint::rules::ImportSelf,
- rules::pylint::rules::InvalidCharacterBackspace,
- rules::pylint::rules::InvalidCharacterSub,
- rules::pylint::rules::InvalidCharacterEsc,
- rules::pylint::rules::InvalidCharacterNul,
- rules::pylint::rules::InvalidCharacterZeroWidthSpace,
- rules::pylint::rules::BadStrStripCall,
- rules::pylint::rules::CollapsibleElseIf,
- rules::pylint::rules::ContinueInFinally,
- rules::pylint::rules::UselessImportAlias,
- rules::pylint::rules::UnnecessaryDirectLambdaCall,
- rules::pylint::rules::NonlocalWithoutBinding,
- rules::pylint::rules::LoadBeforeGlobalDeclaration,
- rules::pylint::rules::AwaitOutsideAsync,
- rules::pylint::rules::PropertyWithParameters,
- rules::pylint::rules::ReturnInInit,
- rules::pylint::rules::ManualFromImport,
- rules::pylint::rules::CompareToEmptyString,
- rules::pylint::rules::ComparisonOfConstant,
- rules::pylint::rules::RepeatedIsinstanceCalls,
- rules::pylint::rules::SysExitAlias,
- rules::pylint::rules::MagicValueComparison,
- rules::pylint::rules::UselessElseOnLoop,
- rules::pylint::rules::GlobalStatement,
- rules::pylint::rules::GlobalVariableNotAssigned,
- rules::pylint::rules::TooManyReturnStatements,
- rules::pylint::rules::TooManyArguments,
- rules::pylint::rules::TooManyBranches,
- rules::pylint::rules::TooManyStatements,
- rules::pylint::rules::RedefinedLoopName,
- rules::pylint::rules::LoggingTooFewArgs,
- rules::pylint::rules::LoggingTooManyArgs,
- rules::pylint::rules::UnexpectedSpecialMethodSignature,
- // flake8-builtins
- rules::flake8_builtins::rules::BuiltinVariableShadowing,
- rules::flake8_builtins::rules::BuiltinArgumentShadowing,
- rules::flake8_builtins::rules::BuiltinAttributeShadowing,
- // flake8-bugbear
- rules::flake8_bugbear::rules::UnaryPrefixIncrement,
- rules::flake8_bugbear::rules::AssignmentToOsEnviron,
- rules::flake8_bugbear::rules::UnreliableCallableCheck,
- rules::flake8_bugbear::rules::StripWithMultiCharacters,
- rules::flake8_bugbear::rules::MutableArgumentDefault,
- rules::flake8_bugbear::rules::NoExplicitStacklevel,
- rules::flake8_bugbear::rules::UnusedLoopControlVariable,
- rules::flake8_bugbear::rules::FunctionCallInDefaultArgument,
- rules::flake8_bugbear::rules::GetAttrWithConstant,
- rules::flake8_bugbear::rules::SetAttrWithConstant,
- rules::flake8_bugbear::rules::AssertFalse,
- rules::flake8_bugbear::rules::JumpStatementInFinally,
- rules::flake8_bugbear::rules::RedundantTupleInExceptionHandler,
- rules::flake8_bugbear::rules::DuplicateHandlerException,
- rules::flake8_bugbear::rules::UselessComparison,
- rules::flake8_bugbear::rules::CannotRaiseLiteral,
- rules::flake8_bugbear::rules::AssertRaisesException,
- rules::flake8_bugbear::rules::UselessExpression,
- rules::flake8_bugbear::rules::CachedInstanceMethod,
- rules::flake8_bugbear::rules::LoopVariableOverridesIterator,
- rules::flake8_bugbear::rules::FStringDocstring,
- rules::flake8_bugbear::rules::UselessContextlibSuppress,
- rules::flake8_bugbear::rules::FunctionUsesLoopVariable,
- rules::flake8_bugbear::rules::AbstractBaseClassWithoutAbstractMethod,
- rules::flake8_bugbear::rules::DuplicateTryBlockException,
- rules::flake8_bugbear::rules::StarArgUnpackingAfterKeywordArg,
- rules::flake8_bugbear::rules::EmptyMethodWithoutAbstractDecorator,
- rules::flake8_bugbear::rules::RaiseWithoutFromInsideExcept,
- rules::flake8_bugbear::rules::ZipWithoutExplicitStrict,
- rules::flake8_bugbear::rules::ExceptWithEmptyTuple,
- rules::flake8_bugbear::rules::ExceptWithNonExceptionClasses,
- rules::flake8_bugbear::rules::ReuseOfGroupbyGenerator,
- rules::flake8_bugbear::rules::UnintentionalTypeAnnotation,
- // flake8-blind-except
- rules::flake8_blind_except::rules::BlindExcept,
- // flake8-comprehensions
- rules::flake8_comprehensions::rules::UnnecessaryCallAroundSorted,
- rules::flake8_comprehensions::rules::UnnecessaryCollectionCall,
- rules::flake8_comprehensions::rules::UnnecessaryComprehension,
- rules::flake8_comprehensions::rules::UnnecessaryComprehensionAnyAll,
- rules::flake8_comprehensions::rules::UnnecessaryDoubleCastOrProcess,
- rules::flake8_comprehensions::rules::UnnecessaryGeneratorDict,
- rules::flake8_comprehensions::rules::UnnecessaryGeneratorList,
- rules::flake8_comprehensions::rules::UnnecessaryGeneratorSet,
- rules::flake8_comprehensions::rules::UnnecessaryListCall,
- rules::flake8_comprehensions::rules::UnnecessaryListComprehensionDict,
- rules::flake8_comprehensions::rules::UnnecessaryListComprehensionSet,
- rules::flake8_comprehensions::rules::UnnecessaryLiteralDict,
- rules::flake8_comprehensions::rules::UnnecessaryLiteralSet,
- rules::flake8_comprehensions::rules::UnnecessaryLiteralWithinDictCall,
- rules::flake8_comprehensions::rules::UnnecessaryLiteralWithinListCall,
- rules::flake8_comprehensions::rules::UnnecessaryLiteralWithinTupleCall,
- rules::flake8_comprehensions::rules::UnnecessaryMap,
- rules::flake8_comprehensions::rules::UnnecessarySubscriptReversal,
- // flake8-debugger
- rules::flake8_debugger::rules::Debugger,
- // mccabe
- rules::mccabe::rules::ComplexStructure,
- // flake8-tidy-imports
- rules::flake8_tidy_imports::banned_api::BannedApi,
- rules::flake8_tidy_imports::relative_imports::RelativeImports,
- // flake8-return
- rules::flake8_return::rules::UnnecessaryReturnNone,
- rules::flake8_return::rules::ImplicitReturnValue,
- rules::flake8_return::rules::ImplicitReturn,
- rules::flake8_return::rules::UnnecessaryAssign,
- rules::flake8_return::rules::SuperfluousElseReturn,
- rules::flake8_return::rules::SuperfluousElseRaise,
- rules::flake8_return::rules::SuperfluousElseContinue,
- rules::flake8_return::rules::SuperfluousElseBreak,
- // flake8-implicit-str-concat
- rules::flake8_implicit_str_concat::rules::SingleLineImplicitStringConcatenation,
- rules::flake8_implicit_str_concat::rules::MultiLineImplicitStringConcatenation,
- rules::flake8_implicit_str_concat::rules::ExplicitStringConcatenation,
- // flake8-print
- rules::flake8_print::rules::Print,
- rules::flake8_print::rules::PPrint,
- // flake8-quotes
- rules::flake8_quotes::rules::BadQuotesInlineString,
- rules::flake8_quotes::rules::BadQuotesMultilineString,
- rules::flake8_quotes::rules::BadQuotesDocstring,
- rules::flake8_quotes::rules::AvoidableEscapedQuote,
- // flake8-annotations
- rules::flake8_annotations::rules::MissingTypeFunctionArgument,
- rules::flake8_annotations::rules::MissingTypeArgs,
- rules::flake8_annotations::rules::MissingTypeKwargs,
- rules::flake8_annotations::rules::MissingTypeSelf,
- rules::flake8_annotations::rules::MissingTypeCls,
- rules::flake8_annotations::rules::MissingReturnTypeUndocumentedPublicFunction,
- rules::flake8_annotations::rules::MissingReturnTypePrivateFunction,
- rules::flake8_annotations::rules::MissingReturnTypeSpecialMethod,
- rules::flake8_annotations::rules::MissingReturnTypeStaticMethod,
- rules::flake8_annotations::rules::MissingReturnTypeClassMethod,
- rules::flake8_annotations::rules::AnyType,
- // flake8-2020
- rules::flake8_2020::rules::SysVersionSlice3,
- rules::flake8_2020::rules::SysVersion2,
- rules::flake8_2020::rules::SysVersionCmpStr3,
- rules::flake8_2020::rules::SysVersionInfo0Eq3,
- rules::flake8_2020::rules::SixPY3,
- rules::flake8_2020::rules::SysVersionInfo1CmpInt,
- rules::flake8_2020::rules::SysVersionInfoMinorCmpInt,
- rules::flake8_2020::rules::SysVersion0,
- rules::flake8_2020::rules::SysVersionCmpStr10,
- rules::flake8_2020::rules::SysVersionSlice1,
- // flake8-simplify
- rules::flake8_simplify::rules::IfElseBlockInsteadOfDictLookup,
- rules::flake8_simplify::rules::DuplicateIsinstanceCall,
- rules::flake8_simplify::rules::CollapsibleIf,
- rules::flake8_simplify::rules::NeedlessBool,
- rules::flake8_simplify::rules::SuppressibleException,
- rules::flake8_simplify::rules::ReturnInTryExceptFinally,
- rules::flake8_simplify::rules::IfElseBlockInsteadOfIfExp,
- rules::flake8_simplify::rules::CompareWithTuple,
- rules::flake8_simplify::rules::ReimplementedBuiltin,
- rules::flake8_simplify::rules::UncapitalizedEnvironmentVariables,
- rules::flake8_simplify::rules::IfWithSameArms,
- rules::flake8_simplify::rules::OpenFileWithContextHandler,
- rules::flake8_simplify::rules::MultipleWithStatements,
- rules::flake8_simplify::rules::InDictKeys,
- rules::flake8_simplify::rules::NegateEqualOp,
- rules::flake8_simplify::rules::NegateNotEqualOp,
- rules::flake8_simplify::rules::DoubleNegation,
- rules::flake8_simplify::rules::IfExprWithTrueFalse,
- rules::flake8_simplify::rules::IfExprWithFalseTrue,
- rules::flake8_simplify::rules::IfExprWithTwistedArms,
- rules::flake8_simplify::rules::ExprAndNotExpr,
- rules::flake8_simplify::rules::ExprOrNotExpr,
- rules::flake8_simplify::rules::ExprOrTrue,
- rules::flake8_simplify::rules::ExprAndFalse,
- rules::flake8_simplify::rules::YodaConditions,
- rules::flake8_simplify::rules::IfElseBlockInsteadOfDictGet,
- rules::flake8_simplify::rules::DictGetWithNoneDefault,
- // pyupgrade
- rules::pyupgrade::rules::UselessMetaclassType,
- rules::pyupgrade::rules::TypeOfPrimitive,
- rules::pyupgrade::rules::UselessObjectInheritance,
- rules::pyupgrade::rules::DeprecatedUnittestAlias,
- rules::pyupgrade::rules::NonPEP585Annotation,
- rules::pyupgrade::rules::NonPEP604Annotation,
- rules::pyupgrade::rules::SuperCallWithParameters,
- rules::pyupgrade::rules::UTF8EncodingDeclaration,
- rules::pyupgrade::rules::UnnecessaryFutureImport,
- rules::pyupgrade::rules::LRUCacheWithoutParameters,
- rules::pyupgrade::rules::UnnecessaryEncodeUTF8,
- rules::pyupgrade::rules::ConvertTypedDictFunctionalToClass,
- rules::pyupgrade::rules::ConvertNamedTupleFunctionalToClass,
- rules::pyupgrade::rules::RedundantOpenModes,
- rules::pyupgrade::rules::DatetimeTimezoneUTC,
- rules::pyupgrade::rules::NativeLiterals,
- rules::pyupgrade::rules::TypingTextStrAlias,
- rules::pyupgrade::rules::OpenAlias,
- rules::pyupgrade::rules::ReplaceUniversalNewlines,
- rules::pyupgrade::rules::ReplaceStdoutStderr,
- rules::pyupgrade::rules::DeprecatedCElementTree,
- rules::pyupgrade::rules::OSErrorAlias,
- rules::pyupgrade::rules::UnicodeKindPrefix,
- rules::pyupgrade::rules::DeprecatedMockImport,
- rules::pyupgrade::rules::UnpackedListComprehension,
- rules::pyupgrade::rules::YieldInForLoop,
- rules::pyupgrade::rules::UnnecessaryBuiltinImport,
- rules::pyupgrade::rules::FormatLiterals,
- rules::pyupgrade::rules::PrintfStringFormatting,
- rules::pyupgrade::rules::FString,
- rules::pyupgrade::rules::LRUCacheWithMaxsizeNone,
- rules::pyupgrade::rules::ExtraneousParentheses,
- rules::pyupgrade::rules::DeprecatedImport,
- rules::pyupgrade::rules::OutdatedVersionBlock,
- rules::pyupgrade::rules::QuotedAnnotation,
- rules::pyupgrade::rules::NonPEP604Isinstance,
- // pydocstyle
- rules::pydocstyle::rules::UndocumentedPublicModule,
- rules::pydocstyle::rules::UndocumentedPublicClass,
- rules::pydocstyle::rules::UndocumentedPublicMethod,
- rules::pydocstyle::rules::UndocumentedPublicFunction,
- rules::pydocstyle::rules::UndocumentedPublicPackage,
- rules::pydocstyle::rules::UndocumentedMagicMethod,
- rules::pydocstyle::rules::UndocumentedPublicNestedClass,
- rules::pydocstyle::rules::UndocumentedPublicInit,
- rules::pydocstyle::rules::FitsOnOneLine,
- rules::pydocstyle::rules::NoBlankLineBeforeFunction,
- rules::pydocstyle::rules::NoBlankLineAfterFunction,
- rules::pydocstyle::rules::OneBlankLineBeforeClass,
- rules::pydocstyle::rules::OneBlankLineAfterClass,
- rules::pydocstyle::rules::BlankLineAfterSummary,
- rules::pydocstyle::rules::IndentWithSpaces,
- rules::pydocstyle::rules::UnderIndentation,
- rules::pydocstyle::rules::OverIndentation,
- rules::pydocstyle::rules::NewLineAfterLastParagraph,
- rules::pydocstyle::rules::SurroundingWhitespace,
- rules::pydocstyle::rules::BlankLineBeforeClass,
- rules::pydocstyle::rules::MultiLineSummaryFirstLine,
- rules::pydocstyle::rules::MultiLineSummarySecondLine,
- rules::pydocstyle::rules::SectionNotOverIndented,
- rules::pydocstyle::rules::SectionUnderlineNotOverIndented,
- rules::pydocstyle::rules::TripleSingleQuotes,
- rules::pydocstyle::rules::EscapeSequenceInDocstring,
- rules::pydocstyle::rules::EndsInPeriod,
- rules::pydocstyle::rules::NonImperativeMood,
- rules::pydocstyle::rules::NoSignature,
- rules::pydocstyle::rules::FirstLineCapitalized,
- rules::pydocstyle::rules::DocstringStartsWithThis,
- rules::pydocstyle::rules::CapitalizeSectionName,
- rules::pydocstyle::rules::NewLineAfterSectionName,
- rules::pydocstyle::rules::DashedUnderlineAfterSection,
- rules::pydocstyle::rules::SectionUnderlineAfterName,
- rules::pydocstyle::rules::SectionUnderlineMatchesSectionLength,
- rules::pydocstyle::rules::NoBlankLineAfterSection,
- rules::pydocstyle::rules::NoBlankLineBeforeSection,
- rules::pydocstyle::rules::BlankLinesBetweenHeaderAndContent,
- rules::pydocstyle::rules::BlankLineAfterLastSection,
- rules::pydocstyle::rules::EmptyDocstringSection,
- rules::pydocstyle::rules::EndsInPunctuation,
- rules::pydocstyle::rules::SectionNameEndsInColon,
- rules::pydocstyle::rules::UndocumentedParam,
- rules::pydocstyle::rules::OverloadWithDocstring,
- rules::pydocstyle::rules::EmptyDocstring,
- // pep8-naming
- rules::pep8_naming::rules::InvalidClassName,
- rules::pep8_naming::rules::InvalidFunctionName,
- rules::pep8_naming::rules::InvalidArgumentName,
- rules::pep8_naming::rules::InvalidFirstArgumentNameForClassMethod,
- rules::pep8_naming::rules::InvalidFirstArgumentNameForMethod,
- rules::pep8_naming::rules::NonLowercaseVariableInFunction,
- rules::pep8_naming::rules::DunderFunctionName,
- rules::pep8_naming::rules::ConstantImportedAsNonConstant,
- rules::pep8_naming::rules::LowercaseImportedAsNonLowercase,
- rules::pep8_naming::rules::CamelcaseImportedAsLowercase,
- rules::pep8_naming::rules::CamelcaseImportedAsConstant,
- rules::pep8_naming::rules::MixedCaseVariableInClassScope,
- rules::pep8_naming::rules::MixedCaseVariableInGlobalScope,
- rules::pep8_naming::rules::CamelcaseImportedAsAcronym,
- rules::pep8_naming::rules::ErrorSuffixOnExceptionName,
- rules::pep8_naming::rules::InvalidModuleName,
- // isort
- rules::isort::rules::UnsortedImports,
- rules::isort::rules::MissingRequiredImport,
- // eradicate
- rules::eradicate::rules::CommentedOutCode,
- // flake8-bandit
- rules::flake8_bandit::rules::Assert,
- rules::flake8_bandit::rules::BadFilePermissions,
- rules::flake8_bandit::rules::ExecBuiltin,
- rules::flake8_bandit::rules::HardcodedBindAllInterfaces,
- rules::flake8_bandit::rules::HardcodedPasswordDefault,
- rules::flake8_bandit::rules::HardcodedPasswordFuncArg,
- rules::flake8_bandit::rules::HardcodedPasswordString,
- rules::flake8_bandit::rules::HardcodedSQLExpression,
- rules::flake8_bandit::rules::HardcodedTempFile,
- rules::flake8_bandit::rules::HashlibInsecureHashFunction,
- rules::flake8_bandit::rules::Jinja2AutoescapeFalse,
- rules::flake8_bandit::rules::LoggingConfigInsecureListen,
- rules::flake8_bandit::rules::RequestWithNoCertValidation,
- rules::flake8_bandit::rules::RequestWithoutTimeout,
- rules::flake8_bandit::rules::SnmpInsecureVersion,
- rules::flake8_bandit::rules::SnmpWeakCryptography,
- rules::flake8_bandit::rules::SubprocessPopenWithShellEqualsTrue,
- rules::flake8_bandit::rules::SubprocessWithoutShellEqualsTrue,
- rules::flake8_bandit::rules::CallWithShellEqualsTrue,
- rules::flake8_bandit::rules::StartProcessWithAShell,
- rules::flake8_bandit::rules::StartProcessWithNoShell,
- rules::flake8_bandit::rules::StartProcessWithPartialPath,
- rules::flake8_bandit::rules::SuspiciousEvalUsage,
- rules::flake8_bandit::rules::SuspiciousFTPLibUsage,
- rules::flake8_bandit::rules::SuspiciousInsecureCipherUsage,
- rules::flake8_bandit::rules::SuspiciousInsecureCipherModeUsage,
- rules::flake8_bandit::rules::SuspiciousInsecureHashUsage,
- rules::flake8_bandit::rules::SuspiciousMarkSafeUsage,
- rules::flake8_bandit::rules::SuspiciousMarshalUsage,
- rules::flake8_bandit::rules::SuspiciousMktempUsage,
- rules::flake8_bandit::rules::SuspiciousNonCryptographicRandomUsage,
- rules::flake8_bandit::rules::SuspiciousPickleUsage,
- rules::flake8_bandit::rules::SuspiciousTelnetUsage,
- rules::flake8_bandit::rules::SuspiciousURLOpenUsage,
- rules::flake8_bandit::rules::SuspiciousUnverifiedContextUsage,
- rules::flake8_bandit::rules::SuspiciousXMLCElementTreeUsage,
- rules::flake8_bandit::rules::SuspiciousXMLETreeUsage,
- rules::flake8_bandit::rules::SuspiciousXMLElementTreeUsage,
- rules::flake8_bandit::rules::SuspiciousXMLExpatBuilderUsage,
- rules::flake8_bandit::rules::SuspiciousXMLExpatReaderUsage,
- rules::flake8_bandit::rules::SuspiciousXMLMiniDOMUsage,
- rules::flake8_bandit::rules::SuspiciousXMLPullDOMUsage,
- rules::flake8_bandit::rules::SuspiciousXMLSaxUsage,
- rules::flake8_bandit::rules::TryExceptContinue,
- rules::flake8_bandit::rules::TryExceptPass,
- rules::flake8_bandit::rules::UnsafeYAMLLoad,
- // flake8-boolean-trap
- rules::flake8_boolean_trap::rules::BooleanPositionalArgInFunctionDefinition,
- rules::flake8_boolean_trap::rules::BooleanDefaultValueInFunctionDefinition,
- rules::flake8_boolean_trap::rules::BooleanPositionalValueInFunctionCall,
- // flake8-unused-arguments
- rules::flake8_unused_arguments::rules::UnusedFunctionArgument,
- rules::flake8_unused_arguments::rules::UnusedMethodArgument,
- rules::flake8_unused_arguments::rules::UnusedClassMethodArgument,
- rules::flake8_unused_arguments::rules::UnusedStaticMethodArgument,
- rules::flake8_unused_arguments::rules::UnusedLambdaArgument,
- // flake8-import-conventions
- rules::flake8_import_conventions::rules::UnconventionalImportAlias,
- rules::flake8_import_conventions::rules::BannedImportAlias,
- rules::flake8_import_conventions::rules::BannedImportFrom,
- // flake8-datetimez
- rules::flake8_datetimez::rules::CallDatetimeWithoutTzinfo,
- rules::flake8_datetimez::rules::CallDatetimeToday,
- rules::flake8_datetimez::rules::CallDatetimeUtcnow,
- rules::flake8_datetimez::rules::CallDatetimeUtcfromtimestamp,
- rules::flake8_datetimez::rules::CallDatetimeNowWithoutTzinfo,
- rules::flake8_datetimez::rules::CallDatetimeFromtimestamp,
- rules::flake8_datetimez::rules::CallDatetimeStrptimeWithoutZone,
- rules::flake8_datetimez::rules::CallDateToday,
- rules::flake8_datetimez::rules::CallDateFromtimestamp,
- // pygrep-hooks
- rules::pygrep_hooks::rules::Eval,
- rules::pygrep_hooks::rules::DeprecatedLogWarn,
- rules::pygrep_hooks::rules::BlanketTypeIgnore,
- rules::pygrep_hooks::rules::BlanketNOQA,
- // pandas-vet
- rules::pandas_vet::rules::PandasUseOfInplaceArgument,
- rules::pandas_vet::rules::PandasUseOfDotIsNull,
- rules::pandas_vet::rules::PandasUseOfDotNotNull,
- rules::pandas_vet::rules::PandasUseOfDotIx,
- rules::pandas_vet::rules::PandasUseOfDotAt,
- rules::pandas_vet::rules::PandasUseOfDotIat,
- rules::pandas_vet::rules::PandasUseOfDotPivotOrUnstack,
- rules::pandas_vet::rules::PandasUseOfDotValues,
- rules::pandas_vet::rules::PandasUseOfDotReadTable,
- rules::pandas_vet::rules::PandasUseOfDotStack,
- rules::pandas_vet::rules::PandasUseOfPdMerge,
- rules::pandas_vet::rules::PandasDfVariableName,
- // flake8-errmsg
- rules::flake8_errmsg::rules::RawStringInException,
- rules::flake8_errmsg::rules::FStringInException,
- rules::flake8_errmsg::rules::DotFormatInException,
- // flake8-pyi
- rules::flake8_pyi::rules::ArgumentDefaultInStub,
- rules::flake8_pyi::rules::AssignmentDefaultInStub,
- rules::flake8_pyi::rules::BadVersionInfoComparison,
- rules::flake8_pyi::rules::DocstringInStub,
- rules::flake8_pyi::rules::NonEmptyStubBody,
- rules::flake8_pyi::rules::PassStatementStubBody,
- rules::flake8_pyi::rules::TypeCommentInStub,
- rules::flake8_pyi::rules::TypedArgumentDefaultInStub,
- rules::flake8_pyi::rules::UnprefixedTypeParam,
- rules::flake8_pyi::rules::UnrecognizedPlatformCheck,
- rules::flake8_pyi::rules::UnrecognizedPlatformName,
- rules::flake8_pyi::rules::PassInClassBody,
- rules::flake8_pyi::rules::DuplicateUnionMember,
- rules::flake8_pyi::rules::QuotedAnnotationInStub,
- rules::flake8_pyi::rules::SnakeCaseTypeAlias,
- rules::flake8_pyi::rules::TSuffixedTypeAlias,
- // flake8-pytest-style
- rules::flake8_pytest_style::rules::PytestFixtureIncorrectParenthesesStyle,
- rules::flake8_pytest_style::rules::PytestFixturePositionalArgs,
- rules::flake8_pytest_style::rules::PytestExtraneousScopeFunction,
- rules::flake8_pytest_style::rules::PytestMissingFixtureNameUnderscore,
- rules::flake8_pytest_style::rules::PytestIncorrectFixtureNameUnderscore,
- rules::flake8_pytest_style::rules::PytestParametrizeNamesWrongType,
- rules::flake8_pytest_style::rules::PytestParametrizeValuesWrongType,
- rules::flake8_pytest_style::rules::PytestPatchWithLambda,
- rules::flake8_pytest_style::rules::PytestUnittestAssertion,
- rules::flake8_pytest_style::rules::PytestRaisesWithoutException,
- rules::flake8_pytest_style::rules::PytestRaisesTooBroad,
- rules::flake8_pytest_style::rules::PytestRaisesWithMultipleStatements,
- rules::flake8_pytest_style::rules::PytestIncorrectPytestImport,
- rules::flake8_pytest_style::rules::PytestAssertAlwaysFalse,
- rules::flake8_pytest_style::rules::PytestFailWithoutMessage,
- rules::flake8_pytest_style::rules::PytestAssertInExcept,
- rules::flake8_pytest_style::rules::PytestCompositeAssertion,
- rules::flake8_pytest_style::rules::PytestFixtureParamWithoutValue,
- rules::flake8_pytest_style::rules::PytestDeprecatedYieldFixture,
- rules::flake8_pytest_style::rules::PytestFixtureFinalizerCallback,
- rules::flake8_pytest_style::rules::PytestUselessYieldFixture,
- rules::flake8_pytest_style::rules::PytestIncorrectMarkParenthesesStyle,
- rules::flake8_pytest_style::rules::PytestUnnecessaryAsyncioMarkOnFixture,
- rules::flake8_pytest_style::rules::PytestErroneousUseFixturesOnFixture,
- rules::flake8_pytest_style::rules::PytestUseFixturesWithoutParameters,
- // flake8-pie
- rules::flake8_pie::rules::UnnecessaryPass,
- rules::flake8_pie::rules::DuplicateClassFieldDefinition,
- rules::flake8_pie::rules::NonUniqueEnums,
- rules::flake8_pie::rules::UnnecessarySpread,
- rules::flake8_pie::rules::UnnecessaryDictKwargs,
- rules::flake8_pie::rules::ReimplementedListBuiltin,
- rules::flake8_pie::rules::MultipleStartsEndsWith,
- // flake8-commas
- rules::flake8_commas::rules::MissingTrailingComma,
- rules::flake8_commas::rules::TrailingCommaOnBareTuple,
- rules::flake8_commas::rules::ProhibitedTrailingComma,
- // flake8-no-pep420
- rules::flake8_no_pep420::rules::ImplicitNamespacePackage,
- // flake8-executable
- rules::flake8_executable::rules::ShebangNotExecutable,
- rules::flake8_executable::rules::ShebangMissingExecutableFile,
- rules::flake8_executable::rules::ShebangMissingPython,
- rules::flake8_executable::rules::ShebangLeadingWhitespace,
- rules::flake8_executable::rules::ShebangNotFirstLine,
- // flake8-type-checking
- rules::flake8_type_checking::rules::TypingOnlyFirstPartyImport,
- rules::flake8_type_checking::rules::TypingOnlyThirdPartyImport,
- rules::flake8_type_checking::rules::TypingOnlyStandardLibraryImport,
- rules::flake8_type_checking::rules::RuntimeImportInTypeCheckingBlock,
- rules::flake8_type_checking::rules::EmptyTypeCheckingBlock,
- // tryceratops
- rules::tryceratops::rules::RaiseVanillaClass,
- rules::tryceratops::rules::RaiseVanillaArgs,
- rules::tryceratops::rules::TypeCheckWithoutTypeError,
- rules::tryceratops::rules::ReraiseNoCause,
- rules::tryceratops::rules::VerboseRaise,
- rules::tryceratops::rules::TryConsiderElse,
- rules::tryceratops::rules::RaiseWithinTry,
- rules::tryceratops::rules::ErrorInsteadOfException,
- rules::tryceratops::rules::VerboseLogMessage,
- // flake8-use-pathlib
- rules::flake8_use_pathlib::violations::OsPathAbspath,
- rules::flake8_use_pathlib::violations::OsChmod,
- rules::flake8_use_pathlib::violations::OsMkdir,
- rules::flake8_use_pathlib::violations::OsMakedirs,
- rules::flake8_use_pathlib::violations::OsRename,
- rules::flake8_use_pathlib::violations::PathlibReplace,
- rules::flake8_use_pathlib::violations::OsRmdir,
- rules::flake8_use_pathlib::violations::OsRemove,
- rules::flake8_use_pathlib::violations::OsUnlink,
- rules::flake8_use_pathlib::violations::OsGetcwd,
- rules::flake8_use_pathlib::violations::OsPathExists,
- rules::flake8_use_pathlib::violations::OsPathExpanduser,
- rules::flake8_use_pathlib::violations::OsPathIsdir,
- rules::flake8_use_pathlib::violations::OsPathIsfile,
- rules::flake8_use_pathlib::violations::OsPathIslink,
- rules::flake8_use_pathlib::violations::OsReadlink,
- rules::flake8_use_pathlib::violations::OsStat,
- rules::flake8_use_pathlib::violations::OsPathIsabs,
- rules::flake8_use_pathlib::violations::OsPathJoin,
- rules::flake8_use_pathlib::violations::OsPathBasename,
- rules::flake8_use_pathlib::violations::OsPathDirname,
- rules::flake8_use_pathlib::violations::OsPathSamefile,
- rules::flake8_use_pathlib::violations::OsPathSplitext,
- rules::flake8_use_pathlib::violations::BuiltinOpen,
- rules::flake8_use_pathlib::violations::PyPath,
- // flake8-logging-format
- rules::flake8_logging_format::violations::LoggingStringFormat,
- rules::flake8_logging_format::violations::LoggingPercentFormat,
- rules::flake8_logging_format::violations::LoggingStringConcat,
- rules::flake8_logging_format::violations::LoggingFString,
- rules::flake8_logging_format::violations::LoggingWarn,
- rules::flake8_logging_format::violations::LoggingExtraAttrClash,
- rules::flake8_logging_format::violations::LoggingExcInfo,
- rules::flake8_logging_format::violations::LoggingRedundantExcInfo,
- // flake8-raise
- rules::flake8_raise::rules::UnnecessaryParenOnRaiseException,
- // flake8-self
- rules::flake8_self::rules::PrivateMemberAccess,
- // flake8-gettext
- rules::flake8_gettext::rules::FStringInGetTextFuncCall,
- rules::flake8_gettext::rules::FormatInGetTextFuncCall,
- rules::flake8_gettext::rules::PrintfInGetTextFuncCall,
- // numpy
- rules::numpy::rules::NumpyDeprecatedTypeAlias,
- rules::numpy::rules::NumpyLegacyRandom,
- // ruff
- rules::ruff::rules::AmbiguousUnicodeCharacterString,
- rules::ruff::rules::AmbiguousUnicodeCharacterDocstring,
- rules::ruff::rules::AmbiguousUnicodeCharacterComment,
- rules::ruff::rules::CollectionLiteralConcatenation,
- rules::ruff::rules::AsyncioDanglingTask,
- rules::ruff::rules::UnusedNOQA,
- rules::ruff::rules::PairwiseOverZipped,
- rules::ruff::rules::MutableDataclassDefault,
- rules::ruff::rules::FunctionCallInDataclassDefaultArgument,
- // flake8-django
- rules::flake8_django::rules::DjangoNullableModelStringField,
- rules::flake8_django::rules::DjangoLocalsInRenderFunction,
- rules::flake8_django::rules::DjangoExcludeWithModelForm,
- rules::flake8_django::rules::DjangoAllWithModelForm,
- rules::flake8_django::rules::DjangoModelWithoutDunderStr,
- rules::flake8_django::rules::DjangoUnorderedBodyContentInModel,
- rules::flake8_django::rules::DjangoNonLeadingReceiverDecorator,
-);
+mod rule_set;
pub trait AsRule {
fn rule(&self) -> Rule;
@@ -719,6 +59,9 @@ pub enum Linter {
/// [flake8-annotations](https://pypi.org/project/flake8-annotations/)
#[prefix = "ANN"]
Flake8Annotations,
+ /// [flake8-async](https://pypi.org/project/flake8-async/)
+ #[prefix = "ASYNC"]
+ Flake8Async,
/// [flake8-bandit](https://pypi.org/project/flake8-bandit/)
#[prefix = "S"]
Flake8Bandit,
@@ -755,6 +98,9 @@ pub enum Linter {
/// [flake8-executable](https://pypi.org/project/flake8-executable/)
#[prefix = "EXE"]
Flake8Executable,
+ /// [flake8-future-annotations](https://pypi.org/project/flake8-future-annotations/)
+ #[prefix = "FA"]
+ Flake8FutureAnnotations,
/// [flake8-implicit-str-concat](https://pypi.org/project/flake8-implicit-str-concat/)
#[prefix = "ISC"]
Flake8ImplicitStrConcat,
@@ -809,6 +155,12 @@ pub enum Linter {
/// [flake8-use-pathlib](https://pypi.org/project/flake8-use-pathlib/)
#[prefix = "PTH"]
Flake8UsePathlib,
+ /// [flake8-todos](https://github.com/orsinium-labs/flake8-todos/)
+ #[prefix = "TD"]
+ Flake8Todos,
+ /// [flake8-fixme](https://github.com/tommilligan/flake8-fixme)
+ #[prefix = "FIX"]
+ Flake8Fixme,
/// [eradicate](https://pypi.org/project/eradicate/)
#[prefix = "ERA"]
Eradicate,
@@ -824,9 +176,15 @@ pub enum Linter {
/// [tryceratops](https://pypi.org/project/tryceratops/1.1.0/)
#[prefix = "TRY"]
Tryceratops,
+ /// [flynt](https://pypi.org/project/flynt/)
+ #[prefix = "FLY"]
+ Flynt,
/// NumPy-specific rules
#[prefix = "NPY"]
Numpy,
+ /// [Airflow](https://pypi.org/project/apache-airflow/)
+ #[prefix = "AIR"]
+ Airflow,
/// Ruff-specific rules
#[prefix = "RUF"]
Ruff,
@@ -930,11 +288,21 @@ impl Rule {
| Rule::UselessSemicolon
| Rule::MultipleStatementsOnOneLineSemicolon
| Rule::ProhibitedTrailingComma
- | Rule::TypeCommentInStub => LintSource::Tokens,
+ | Rule::TypeCommentInStub
+ | Rule::InvalidTodoTag
+ | Rule::MissingTodoAuthor
+ | Rule::MissingTodoLink
+ | Rule::MissingTodoColon
+ | Rule::MissingTodoDescription
+ | Rule::InvalidTodoCapitalization
+ | Rule::MissingSpaceAfterTodoColon
+ | Rule::LineContainsFixme
+ | Rule::LineContainsHack
+ | Rule::LineContainsTodo
+ | Rule::LineContainsXxx => LintSource::Tokens,
Rule::IOError => LintSource::Io,
Rule::UnsortedImports | Rule::MissingRequiredImport => LintSource::Imports,
Rule::ImplicitNamespacePackage | Rule::InvalidModuleName => LintSource::Filesystem,
- #[cfg(feature = "logical_lines")]
Rule::IndentationWithInvalidMultiple
| Rule::IndentationWithInvalidMultipleComment
| Rule::MissingWhitespace
@@ -990,6 +358,7 @@ pub const INCOMPATIBLE_CODES: &[(Rule, Rule, &str); 2] = &[
#[cfg(test)]
mod tests {
use std::mem::size_of;
+
use strum::IntoEnumIterator;
use super::{Linter, Rule, RuleNamespace};
diff --git a/crates/ruff/src/registry/rule_set.rs b/crates/ruff/src/registry/rule_set.rs
index 4cb6df76b423a..7fdbf8b19d970 100644
--- a/crates/ruff/src/registry/rule_set.rs
+++ b/crates/ruff/src/registry/rule_set.rs
@@ -3,14 +3,16 @@ use ruff_macros::CacheKey;
use std::fmt::{Debug, Formatter};
use std::iter::FusedIterator;
+const RULESET_SIZE: usize = 10;
+
/// A set of [`Rule`]s.
///
/// Uses a bitset where a bit of one signals that the Rule with that [u16] is in this set.
#[derive(Clone, Default, CacheKey, PartialEq, Eq)]
-pub struct RuleSet([u64; 10]);
+pub struct RuleSet([u64; RULESET_SIZE]);
impl RuleSet {
- const EMPTY: [u64; 10] = [0; 10];
+ const EMPTY: [u64; RULESET_SIZE] = [0; RULESET_SIZE];
// 64 fits into a u16 without truncation
#[allow(clippy::cast_possible_truncation)]
@@ -252,7 +254,7 @@ impl RuleSet {
///
/// let iter: Vec<_> = set.iter().collect();
///
- /// assert_eq!(iter, vec![Rule::AmbiguousFunctionName, Rule::AnyType]);
+ /// assert_eq!(iter, vec![Rule::AnyType, Rule::AmbiguousFunctionName]);
/// ```
pub fn iter(&self) -> RuleSetIterator {
RuleSetIterator {
diff --git a/crates/ruff/src/rule_redirects.rs b/crates/ruff/src/rule_redirects.rs
index 4749b64ccacb4..a9fd305027db2 100644
--- a/crates/ruff/src/rule_redirects.rs
+++ b/crates/ruff/src/rule_redirects.rs
@@ -93,5 +93,10 @@ static REDIRECTS: Lazy> = Lazy::new(|| {
// TODO(charlie): Remove by 2023-06-01.
("RUF004", "B026"),
("PIE802", "C419"),
+ ("PLW0130", "B033"),
+ ("T001", "FIX001"),
+ ("T002", "FIX002"),
+ ("T003", "FIX003"),
+ ("T004", "FIX004"),
])
});
diff --git a/crates/ruff/src/rule_selector.rs b/crates/ruff/src/rule_selector.rs
index b3ce3df5bebae..6985c1be3cf40 100644
--- a/crates/ruff/src/rule_selector.rs
+++ b/crates/ruff/src/rule_selector.rs
@@ -1,22 +1,21 @@
use std::str::FromStr;
-use itertools::Itertools;
-use schemars::_serde_json::Value;
-use schemars::schema::{InstanceType, Schema, SchemaObject};
-use schemars::JsonSchema;
use serde::de::{self, Visitor};
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use strum_macros::EnumIter;
use crate::codes::RuleCodePrefix;
-use crate::registry::{Linter, Rule, RuleIter, RuleNamespace};
+use crate::codes::RuleIter;
+use crate::registry::{Linter, Rule, RuleNamespace};
use crate::rule_redirects::get_redirect;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum RuleSelector {
- /// Select all rules.
+ /// Select all stable rules.
All,
+ /// Select all nursery rules.
+ Nursery,
/// Legacy category to select both the `mccabe` and `flake8-comprehensions` linters
/// via a single selector.
C,
@@ -42,30 +41,30 @@ impl FromStr for RuleSelector {
type Err = ParseError;
fn from_str(s: &str) -> Result {
- if s == "ALL" {
- Ok(Self::All)
- } else if s == "C" {
- Ok(Self::C)
- } else if s == "T" {
- Ok(Self::T)
- } else {
- let (s, redirected_from) = match get_redirect(s) {
- Some((from, target)) => (target, Some(from)),
- None => (s, None),
- };
-
- let (linter, code) =
- Linter::parse_code(s).ok_or_else(|| ParseError::Unknown(s.to_string()))?;
-
- if code.is_empty() {
- return Ok(Self::Linter(linter));
- }
+ match s {
+ "ALL" => Ok(Self::All),
+ "NURSERY" => Ok(Self::Nursery),
+ "C" => Ok(Self::C),
+ "T" => Ok(Self::T),
+ _ => {
+ let (s, redirected_from) = match get_redirect(s) {
+ Some((from, target)) => (target, Some(from)),
+ None => (s, None),
+ };
+
+ let (linter, code) =
+ Linter::parse_code(s).ok_or_else(|| ParseError::Unknown(s.to_string()))?;
+
+ if code.is_empty() {
+ return Ok(Self::Linter(linter));
+ }
- Ok(Self::Prefix {
- prefix: RuleCodePrefix::parse(&linter, code)
- .map_err(|_| ParseError::Unknown(s.to_string()))?,
- redirected_from,
- })
+ Ok(Self::Prefix {
+ prefix: RuleCodePrefix::parse(&linter, code)
+ .map_err(|_| ParseError::Unknown(s.to_string()))?,
+ redirected_from,
+ })
+ }
}
}
}
@@ -82,6 +81,7 @@ impl RuleSelector {
pub fn prefix_and_code(&self) -> (&'static str, &'static str) {
match self {
RuleSelector::All => ("", "ALL"),
+ RuleSelector::Nursery => ("", "NURSERY"),
RuleSelector::C => ("", "C"),
RuleSelector::T => ("", "T"),
RuleSelector::Prefix { prefix, .. } => {
@@ -123,8 +123,7 @@ impl Visitor<'_> for SelectorVisitor {
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str(
- "expected a string code identifying a linter or specific rule, or a partial rule code \
- or ALL to refer to all rules",
+ "expected a string code identifying a linter or specific rule, or a partial rule code or ALL to refer to all rules",
)
}
@@ -146,12 +145,17 @@ impl From for RuleSelector {
}
impl IntoIterator for &RuleSelector {
- type IntoIter = RuleSelectorIter;
type Item = Rule;
+ type IntoIter = RuleSelectorIter;
fn into_iter(self) -> Self::IntoIter {
match self {
- RuleSelector::All => RuleSelectorIter::All(Rule::iter()),
+ RuleSelector::All => {
+ RuleSelectorIter::All(Rule::iter().filter(|rule| !rule.is_nursery()))
+ }
+ RuleSelector::Nursery => {
+ RuleSelectorIter::Nursery(Rule::iter().filter(Rule::is_nursery))
+ }
RuleSelector::C => RuleSelectorIter::Chain(
Linter::Flake8Comprehensions
.into_iter()
@@ -169,7 +173,8 @@ impl IntoIterator for &RuleSelector {
}
pub enum RuleSelectorIter {
- All(RuleIter),
+ All(std::iter::Filter bool>),
+ Nursery(std::iter::Filter bool>),
Chain(std::iter::Chain, std::vec::IntoIter>),
Vec(std::vec::IntoIter),
}
@@ -180,6 +185,7 @@ impl Iterator for RuleSelectorIter {
fn next(&mut self) -> Option {
match self {
RuleSelectorIter::All(iter) => iter.next(),
+ RuleSelectorIter::Nursery(iter) => iter.next(),
RuleSelectorIter::Chain(iter) => iter.next(),
RuleSelectorIter::Vec(iter) => iter.next(),
}
@@ -198,59 +204,58 @@ pub(crate) const fn prefix_to_selector(prefix: RuleCodePrefix) -> RuleSelector {
}
}
-impl JsonSchema for RuleSelector {
- fn schema_name() -> String {
- "RuleSelector".to_string()
- }
+#[cfg(feature = "schemars")]
+mod schema {
+ use itertools::Itertools;
+ use schemars::JsonSchema;
+ use schemars::_serde_json::Value;
+ use schemars::schema::{InstanceType, Schema, SchemaObject};
+ use strum::IntoEnumIterator;
- fn json_schema(_gen: &mut schemars::gen::SchemaGenerator) -> Schema {
- Schema::Object(SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- enum_values: Some(
- [
- // Include the non-standard "ALL" selector.
- "ALL".to_string(),
- // Include the legacy "C" and "T" selectors.
- "C".to_string(),
- "T".to_string(),
- // Include some common redirect targets for those legacy selectors.
- "C9".to_string(),
- "T1".to_string(),
- "T2".to_string(),
- ]
- .into_iter()
- .chain(
- RuleCodePrefix::iter()
- .filter(|p| {
- // Once logical lines are active by default, please remove this.
- // This is here because generate-all output otherwise depends on
- // the feature sets which makes the test running with
- // `--all-features` fail
- !Rule::from_code(&format!(
- "{}{}",
- p.linter().common_prefix(),
- p.short_code()
- ))
- .unwrap()
- .lint_source()
- .is_logical_lines()
- })
- .map(|p| {
- let prefix = p.linter().common_prefix();
- let code = p.short_code();
- format!("{prefix}{code}")
- })
- .chain(Linter::iter().filter_map(|l| {
- let prefix = l.common_prefix();
- (!prefix.is_empty()).then(|| prefix.to_string())
- })),
- )
- .sorted()
- .map(Value::String)
- .collect(),
- ),
- ..SchemaObject::default()
- })
+ use crate::registry::RuleNamespace;
+ use crate::rule_selector::{Linter, RuleCodePrefix};
+ use crate::RuleSelector;
+
+ impl JsonSchema for RuleSelector {
+ fn schema_name() -> String {
+ "RuleSelector".to_string()
+ }
+
+ fn json_schema(_gen: &mut schemars::gen::SchemaGenerator) -> Schema {
+ Schema::Object(SchemaObject {
+ instance_type: Some(InstanceType::String.into()),
+ enum_values: Some(
+ [
+ // Include the non-standard "ALL" selector.
+ "ALL".to_string(),
+ // Include the legacy "C" and "T" selectors.
+ "C".to_string(),
+ "T".to_string(),
+ // Include some common redirect targets for those legacy selectors.
+ "C9".to_string(),
+ "T1".to_string(),
+ "T2".to_string(),
+ ]
+ .into_iter()
+ .chain(
+ RuleCodePrefix::iter()
+ .map(|p| {
+ let prefix = p.linter().common_prefix();
+ let code = p.short_code();
+ format!("{prefix}{code}")
+ })
+ .chain(Linter::iter().filter_map(|l| {
+ let prefix = l.common_prefix();
+ (!prefix.is_empty()).then(|| prefix.to_string())
+ })),
+ )
+ .sorted()
+ .map(Value::String)
+ .collect(),
+ ),
+ ..SchemaObject::default()
+ })
+ }
}
}
@@ -258,6 +263,7 @@ impl RuleSelector {
pub(crate) fn specificity(&self) -> Specificity {
match self {
RuleSelector::All => Specificity::All,
+ RuleSelector::Nursery => Specificity::All,
RuleSelector::T => Specificity::LinterGroup,
RuleSelector::C => Specificity::LinterGroup,
RuleSelector::Linter(..) => Specificity::Linter,
diff --git a/crates/ruff/src/rules/airflow/mod.rs b/crates/ruff/src/rules/airflow/mod.rs
new file mode 100644
index 0000000000000..12871a42aabc6
--- /dev/null
+++ b/crates/ruff/src/rules/airflow/mod.rs
@@ -0,0 +1,25 @@
+//! Airflow-specific rules.
+pub(crate) mod rules;
+
+#[cfg(test)]
+mod tests {
+ use std::path::Path;
+
+ use anyhow::Result;
+ use test_case::test_case;
+
+ use crate::registry::Rule;
+ use crate::test::test_path;
+ use crate::{assert_messages, settings};
+
+ #[test_case(Rule::AirflowVariableNameTaskIdMismatch, Path::new("AIR001.py"))]
+ fn rules(rule_code: Rule, path: &Path) -> Result<()> {
+ let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
+ let diagnostics = test_path(
+ Path::new("airflow").join(path).as_path(),
+ &settings::Settings::for_rule(rule_code),
+ )?;
+ assert_messages!(snapshot, diagnostics);
+ Ok(())
+ }
+}
diff --git a/crates/ruff/src/rules/airflow/rules/mod.rs b/crates/ruff/src/rules/airflow/rules/mod.rs
new file mode 100644
index 0000000000000..0dbd1cf914ea6
--- /dev/null
+++ b/crates/ruff/src/rules/airflow/rules/mod.rs
@@ -0,0 +1,3 @@
+mod task_variable_name;
+
+pub(crate) use task_variable_name::{variable_name_task_id, AirflowVariableNameTaskIdMismatch};
diff --git a/crates/ruff/src/rules/airflow/rules/task_variable_name.rs b/crates/ruff/src/rules/airflow/rules/task_variable_name.rs
new file mode 100644
index 0000000000000..eae2490138f39
--- /dev/null
+++ b/crates/ruff/src/rules/airflow/rules/task_variable_name.rs
@@ -0,0 +1,102 @@
+use rustpython_parser::ast;
+use rustpython_parser::ast::{Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+use ruff_python_ast::prelude::Constant;
+
+use crate::checkers::ast::Checker;
+
+/// ## What it does
+/// Checks that the task variable name matches the `task_id` value for
+/// Airflow Operators.
+///
+/// ## Why is this bad?
+/// When initializing an Airflow Operator, for consistency, the variable
+/// name should match the `task_id` value. This makes it easier to
+/// follow the flow of the DAG.
+///
+/// ## Example
+/// ```python
+/// from airflow.operators import PythonOperator
+///
+///
+/// incorrect_name = PythonOperator(task_id="my_task")
+/// ```
+///
+/// Use instead:
+/// ```python
+/// from airflow.operators import PythonOperator
+///
+///
+/// my_task = PythonOperator(task_id="my_task")
+/// ```
+#[violation]
+pub struct AirflowVariableNameTaskIdMismatch {
+ task_id: String,
+}
+
+impl Violation for AirflowVariableNameTaskIdMismatch {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ let AirflowVariableNameTaskIdMismatch { task_id } = self;
+ format!("Task variable name should match the `task_id`: \"{task_id}\"")
+ }
+}
+
+/// AIR001
+pub(crate) fn variable_name_task_id(
+ checker: &mut Checker,
+ targets: &[Expr],
+ value: &Expr,
+) -> Option {
+ // If we have more than one target, we can't do anything.
+ if targets.len() != 1 {
+ return None;
+ }
+
+ let target = &targets[0];
+ let Expr::Name(ast::ExprName { id, .. }) = target else {
+ return None;
+ };
+
+ // If the value is not a call, we can't do anything.
+ let Expr::Call(ast::ExprCall { func, keywords, .. }) = value else {
+ return None;
+ };
+
+ // If the function doesn't come from Airflow, we can't do anything.
+ if !checker
+ .semantic_model()
+ .resolve_call_path(func)
+ .map_or(false, |call_path| matches!(call_path[0], "airflow"))
+ {
+ return None;
+ }
+
+ // If the call doesn't have a `task_id` keyword argument, we can't do anything.
+ let keyword = keywords
+ .iter()
+ .find(|keyword| keyword.arg.as_ref().map_or(false, |arg| arg == "task_id"))?;
+
+ // If the keyword argument is not a string, we can't do anything.
+ let task_id = match &keyword.value {
+ Expr::Constant(constant) => match &constant.value {
+ Constant::Str(value) => value,
+ _ => return None,
+ },
+ _ => return None,
+ };
+
+ // If the target name is the same as the task_id, no violation.
+ if id == task_id {
+ return None;
+ }
+
+ Some(Diagnostic::new(
+ AirflowVariableNameTaskIdMismatch {
+ task_id: task_id.to_string(),
+ },
+ target.range(),
+ ))
+}
diff --git a/crates/ruff/src/rules/airflow/snapshots/ruff__rules__airflow__tests__AIR001_AIR001.py.snap b/crates/ruff/src/rules/airflow/snapshots/ruff__rules__airflow__tests__AIR001_AIR001.py.snap
new file mode 100644
index 0000000000000..be7e9d7af654a
--- /dev/null
+++ b/crates/ruff/src/rules/airflow/snapshots/ruff__rules__airflow__tests__AIR001_AIR001.py.snap
@@ -0,0 +1,22 @@
+---
+source: crates/ruff/src/rules/airflow/mod.rs
+---
+AIR001.py:11:1: AIR001 Task variable name should match the `task_id`: "my_task"
+ |
+11 | my_task_2 = PythonOperator(callable=my_callable, task_id="my_task_2")
+12 |
+13 | incorrect_name = PythonOperator(task_id="my_task")
+ | ^^^^^^^^^^^^^^ AIR001
+14 | incorrect_name_2 = PythonOperator(callable=my_callable, task_id="my_task_2")
+ |
+
+AIR001.py:12:1: AIR001 Task variable name should match the `task_id`: "my_task_2"
+ |
+12 | incorrect_name = PythonOperator(task_id="my_task")
+13 | incorrect_name_2 = PythonOperator(callable=my_callable, task_id="my_task_2")
+ | ^^^^^^^^^^^^^^^^ AIR001
+14 |
+15 | from my_module import MyClass
+ |
+
+
diff --git a/crates/ruff/src/rules/eradicate/detection.rs b/crates/ruff/src/rules/eradicate/detection.rs
index 2cff42cd6466e..4f3a77f15a924 100644
--- a/crates/ruff/src/rules/eradicate/detection.rs
+++ b/crates/ruff/src/rules/eradicate/detection.rs
@@ -1,11 +1,12 @@
/// See: [eradicate.py](https://github.com/myint/eradicate/blob/98f199940979c94447a461d50d27862b118b282d/eradicate.py)
use once_cell::sync::Lazy;
use regex::Regex;
-use rustpython_parser as parser;
+use rustpython_parser::ast::Suite;
+use rustpython_parser::Parse;
static ALLOWLIST_REGEX: Lazy = Lazy::new(|| {
Regex::new(
- r"^(?i)(?:pylint|pyright|noqa|nosec|type:\s*ignore|fmt:\s*(on|off)|isort:\s*(on|off|skip|skip_file|split|dont-add-imports(:\s*\[.*?])?)|mypy:|SPDX-License-Identifier:)"
+ r"^(?i)(?:pylint|pyright|noqa|nosec|region|endregion|type:\s*ignore|fmt:\s*(on|off)|isort:\s*(on|off|skip|skip_file|split|dont-add-imports(:\s*\[.*?])?)|mypy:|SPDX-License-Identifier:)"
).unwrap()
});
static BRACKET_REGEX: Lazy = Lazy::new(|| Regex::new(r"^[()\[\]{}\s]+$").unwrap());
@@ -31,7 +32,7 @@ static PARTIAL_DICTIONARY_REGEX: Lazy =
static PRINT_RETURN_REGEX: Lazy = Lazy::new(|| Regex::new(r"^(print|return)\b\s*").unwrap());
/// Returns `true` if a comment contains Python code.
-pub fn comment_contains_code(line: &str, task_tags: &[String]) -> bool {
+pub(crate) fn comment_contains_code(line: &str, task_tags: &[String]) -> bool {
let line = if let Some(line) = line.trim().strip_prefix('#') {
line.trim_start_matches([' ', '#'])
} else {
@@ -78,7 +79,7 @@ pub fn comment_contains_code(line: &str, task_tags: &[String]) -> bool {
}
// Finally, compile the source code.
- parser::parse_program(&line, "").is_ok()
+ Suite::parse(&line, "").is_ok()
}
/// Returns `true` if a line is probably part of some multiline code.
@@ -224,6 +225,11 @@ mod tests {
assert!(!comment_contains_code("# noqa: A123", &[]));
assert!(!comment_contains_code("# noqa:A123", &[]));
assert!(!comment_contains_code("# nosec", &[]));
+ assert!(!comment_contains_code("# region", &[]));
+ assert!(!comment_contains_code("# endregion", &[]));
+ assert!(!comment_contains_code("# region.name", &[]));
+ assert!(!comment_contains_code("# region name", &[]));
+ assert!(!comment_contains_code("# region: name", &[]));
assert!(!comment_contains_code("# fmt: on", &[]));
assert!(!comment_contains_code("# fmt: off", &[]));
assert!(!comment_contains_code("# fmt:on", &[]));
diff --git a/crates/ruff/src/rules/eradicate/mod.rs b/crates/ruff/src/rules/eradicate/mod.rs
index b355e2bda32ce..a363458154a5e 100644
--- a/crates/ruff/src/rules/eradicate/mod.rs
+++ b/crates/ruff/src/rules/eradicate/mod.rs
@@ -7,14 +7,13 @@ mod tests {
use std::path::Path;
use anyhow::Result;
-
use test_case::test_case;
use crate::registry::Rule;
use crate::test::test_path;
use crate::{assert_messages, settings};
- #[test_case(Rule::CommentedOutCode, Path::new("ERA001.py"); "ERA001")]
+ #[test_case(Rule::CommentedOutCode, Path::new("ERA001.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(
diff --git a/crates/ruff/src/rules/eradicate/rules.rs b/crates/ruff/src/rules/eradicate/rules/commented_out_code.rs
similarity index 52%
rename from crates/ruff/src/rules/eradicate/rules.rs
rename to crates/ruff/src/rules/eradicate/rules/commented_out_code.rs
index 2a67f4ec9ea5e..3d1a5d22901f3 100644
--- a/crates/ruff/src/rules/eradicate/rules.rs
+++ b/crates/ruff/src/rules/eradicate/rules/commented_out_code.rs
@@ -1,13 +1,11 @@
-use ruff_text_size::TextRange;
-
-use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
+use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::source_code::Locator;
+use ruff_python_ast::source_code::{Indexer, Locator};
use crate::registry::Rule;
-use crate::settings::{flags, Settings};
+use crate::settings::Settings;
-use super::detection::comment_contains_code;
+use super::super::detection::comment_contains_code;
/// ## What it does
/// Checks for commented-out Python code.
@@ -16,6 +14,9 @@ use super::detection::comment_contains_code;
/// Commented-out code is dead code, and is often included inadvertently.
/// It should be removed.
///
+/// ## Options
+/// - `task-tags`
+///
/// ## Example
/// ```python
/// # print('foo')
@@ -46,22 +47,29 @@ fn is_standalone_comment(line: &str) -> bool {
}
/// ERA001
-pub fn commented_out_code(
+pub(crate) fn commented_out_code(
+ indexer: &Indexer,
locator: &Locator,
- range: TextRange,
settings: &Settings,
- autofix: flags::Autofix,
-) -> Option {
- let line = locator.full_lines(range);
+) -> Vec {
+ let mut diagnostics = vec![];
+
+ for range in indexer.comment_ranges() {
+ let line = locator.full_lines(*range);
- // Verify that the comment is on its own line, and that it contains code.
- if is_standalone_comment(line) && comment_contains_code(line, &settings.task_tags[..]) {
- let mut diagnostic = Diagnostic::new(CommentedOutCode, range);
- if autofix.into() && settings.rules.should_fix(Rule::CommentedOutCode) {
- diagnostic.set_fix(Edit::range_deletion(locator.full_lines_range(range)));
+ // Verify that the comment is on its own line, and that it contains code.
+ if is_standalone_comment(line) && comment_contains_code(line, &settings.task_tags[..]) {
+ let mut diagnostic = Diagnostic::new(CommentedOutCode, *range);
+
+ if settings.rules.should_fix(Rule::CommentedOutCode) {
+ #[allow(deprecated)]
+ diagnostic.set_fix(Fix::unspecified(Edit::range_deletion(
+ locator.full_lines_range(*range),
+ )));
+ }
+ diagnostics.push(diagnostic);
}
- Some(diagnostic)
- } else {
- None
}
+
+ diagnostics
}
diff --git a/crates/ruff/src/rules/eradicate/rules/mod.rs b/crates/ruff/src/rules/eradicate/rules/mod.rs
new file mode 100644
index 0000000000000..8ec37813d9f35
--- /dev/null
+++ b/crates/ruff/src/rules/eradicate/rules/mod.rs
@@ -0,0 +1,3 @@
+pub(crate) use commented_out_code::{commented_out_code, CommentedOutCode};
+
+mod commented_out_code;
diff --git a/crates/ruff/src/rules/flake8_2020/helpers.rs b/crates/ruff/src/rules/flake8_2020/helpers.rs
new file mode 100644
index 0000000000000..e5bb8fd4b60c5
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_2020/helpers.rs
@@ -0,0 +1,8 @@
+use ruff_python_semantic::model::SemanticModel;
+use rustpython_parser::ast::Expr;
+
+pub(super) fn is_sys(model: &SemanticModel, expr: &Expr, target: &str) -> bool {
+ model
+ .resolve_call_path(expr)
+ .map_or(false, |call_path| call_path.as_slice() == ["sys", target])
+}
diff --git a/crates/ruff/src/rules/flake8_2020/mod.rs b/crates/ruff/src/rules/flake8_2020/mod.rs
index b34b124e70938..ac20ee0b38816 100644
--- a/crates/ruff/src/rules/flake8_2020/mod.rs
+++ b/crates/ruff/src/rules/flake8_2020/mod.rs
@@ -1,4 +1,5 @@
//! Rules from [flake8-2020](https://pypi.org/project/flake8-2020/).
+mod helpers;
pub(crate) mod rules;
#[cfg(test)]
@@ -6,23 +7,22 @@ mod tests {
use std::path::Path;
use anyhow::Result;
-
use test_case::test_case;
use crate::registry::Rule;
use crate::test::test_path;
use crate::{assert_messages, settings};
- #[test_case(Rule::SysVersionSlice3, Path::new("YTT101.py"); "YTT101")]
- #[test_case(Rule::SysVersion2, Path::new("YTT102.py"); "YTT102")]
- #[test_case(Rule::SysVersionCmpStr3, Path::new("YTT103.py"); "YTT103")]
- #[test_case(Rule::SysVersionInfo0Eq3, Path::new("YTT201.py"); "YTT201")]
- #[test_case(Rule::SixPY3, Path::new("YTT202.py"); "YTT202")]
- #[test_case(Rule::SysVersionInfo1CmpInt, Path::new("YTT203.py"); "YTT203")]
- #[test_case(Rule::SysVersionInfoMinorCmpInt, Path::new("YTT204.py"); "YTT204")]
- #[test_case(Rule::SysVersion0, Path::new("YTT301.py"); "YTT301")]
- #[test_case(Rule::SysVersionCmpStr10, Path::new("YTT302.py"); "YTT302")]
- #[test_case(Rule::SysVersionSlice1, Path::new("YTT303.py"); "YTT303")]
+ #[test_case(Rule::SysVersionSlice3, Path::new("YTT101.py"))]
+ #[test_case(Rule::SysVersion2, Path::new("YTT102.py"))]
+ #[test_case(Rule::SysVersionCmpStr3, Path::new("YTT103.py"))]
+ #[test_case(Rule::SysVersionInfo0Eq3, Path::new("YTT201.py"))]
+ #[test_case(Rule::SixPY3, Path::new("YTT202.py"))]
+ #[test_case(Rule::SysVersionInfo1CmpInt, Path::new("YTT203.py"))]
+ #[test_case(Rule::SysVersionInfoMinorCmpInt, Path::new("YTT204.py"))]
+ #[test_case(Rule::SysVersion0, Path::new("YTT301.py"))]
+ #[test_case(Rule::SysVersionCmpStr10, Path::new("YTT302.py"))]
+ #[test_case(Rule::SysVersionSlice1, Path::new("YTT303.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(
diff --git a/crates/ruff/src/rules/flake8_2020/rules.rs b/crates/ruff/src/rules/flake8_2020/rules.rs
deleted file mode 100644
index a4d7bd3898164..0000000000000
--- a/crates/ruff/src/rules/flake8_2020/rules.rs
+++ /dev/null
@@ -1,297 +0,0 @@
-use num_bigint::BigInt;
-use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Located};
-
-use ruff_diagnostics::{Diagnostic, Violation};
-use ruff_macros::{derive_message_formats, violation};
-
-use crate::checkers::ast::Checker;
-use crate::registry::Rule;
-
-#[violation]
-pub struct SysVersionSlice3;
-
-impl Violation for SysVersionSlice3 {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("`sys.version[:3]` referenced (python3.10), use `sys.version_info`")
- }
-}
-
-#[violation]
-pub struct SysVersion2;
-
-impl Violation for SysVersion2 {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("`sys.version[2]` referenced (python3.10), use `sys.version_info`")
- }
-}
-
-#[violation]
-pub struct SysVersionCmpStr3;
-
-impl Violation for SysVersionCmpStr3 {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("`sys.version` compared to string (python3.10), use `sys.version_info`")
- }
-}
-
-#[violation]
-pub struct SysVersionInfo0Eq3;
-
-impl Violation for SysVersionInfo0Eq3 {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("`sys.version_info[0] == 3` referenced (python4), use `>=`")
- }
-}
-
-#[violation]
-pub struct SixPY3;
-
-impl Violation for SixPY3 {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("`six.PY3` referenced (python4), use `not six.PY2`")
- }
-}
-
-#[violation]
-pub struct SysVersionInfo1CmpInt;
-
-impl Violation for SysVersionInfo1CmpInt {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!(
- "`sys.version_info[1]` compared to integer (python4), compare `sys.version_info` to \
- tuple"
- )
- }
-}
-
-#[violation]
-pub struct SysVersionInfoMinorCmpInt;
-
-impl Violation for SysVersionInfoMinorCmpInt {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!(
- "`sys.version_info.minor` compared to integer (python4), compare `sys.version_info` \
- to tuple"
- )
- }
-}
-
-#[violation]
-pub struct SysVersion0;
-
-impl Violation for SysVersion0 {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("`sys.version[0]` referenced (python10), use `sys.version_info`")
- }
-}
-
-#[violation]
-pub struct SysVersionCmpStr10;
-
-impl Violation for SysVersionCmpStr10 {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("`sys.version` compared to string (python10), use `sys.version_info`")
- }
-}
-
-#[violation]
-pub struct SysVersionSlice1;
-
-impl Violation for SysVersionSlice1 {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("`sys.version[:1]` referenced (python10), use `sys.version_info`")
- }
-}
-
-fn is_sys(checker: &Checker, expr: &Expr, target: &str) -> bool {
- checker
- .ctx
- .resolve_call_path(expr)
- .map_or(false, |call_path| call_path.as_slice() == ["sys", target])
-}
-
-/// YTT101, YTT102, YTT301, YTT303
-pub fn subscript(checker: &mut Checker, value: &Expr, slice: &Expr) {
- if is_sys(checker, value, "version") {
- match &slice.node {
- ExprKind::Slice {
- lower: None,
- upper: Some(upper),
- step: None,
- ..
- } => {
- if let ExprKind::Constant {
- value: Constant::Int(i),
- ..
- } = &upper.node
- {
- if *i == BigInt::from(1)
- && checker.settings.rules.enabled(Rule::SysVersionSlice1)
- {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersionSlice1, value.range()));
- } else if *i == BigInt::from(3)
- && checker.settings.rules.enabled(Rule::SysVersionSlice3)
- {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersionSlice3, value.range()));
- }
- }
- }
-
- ExprKind::Constant {
- value: Constant::Int(i),
- ..
- } => {
- if *i == BigInt::from(2) && checker.settings.rules.enabled(Rule::SysVersion2) {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersion2, value.range()));
- } else if *i == BigInt::from(0) && checker.settings.rules.enabled(Rule::SysVersion0)
- {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersion0, value.range()));
- }
- }
-
- _ => {}
- }
- }
-}
-
-/// YTT103, YTT201, YTT203, YTT204, YTT302
-pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &[Expr]) {
- match &left.node {
- ExprKind::Subscript { value, slice, .. } if is_sys(checker, value, "version_info") => {
- if let ExprKind::Constant {
- value: Constant::Int(i),
- ..
- } = &slice.node
- {
- if *i == BigInt::from(0) {
- if let (
- [Cmpop::Eq | Cmpop::NotEq],
- [Located {
- node:
- ExprKind::Constant {
- value: Constant::Int(n),
- ..
- },
- ..
- }],
- ) = (ops, comparators)
- {
- if *n == BigInt::from(3)
- && checker.settings.rules.enabled(Rule::SysVersionInfo0Eq3)
- {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersionInfo0Eq3, left.range()));
- }
- }
- } else if *i == BigInt::from(1) {
- if let (
- [Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
- [Located {
- node:
- ExprKind::Constant {
- value: Constant::Int(_),
- ..
- },
- ..
- }],
- ) = (ops, comparators)
- {
- if checker.settings.rules.enabled(Rule::SysVersionInfo1CmpInt) {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersionInfo1CmpInt, left.range()));
- }
- }
- }
- }
- }
-
- ExprKind::Attribute { value, attr, .. }
- if is_sys(checker, value, "version_info") && attr == "minor" =>
- {
- if let (
- [Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
- [Located {
- node:
- ExprKind::Constant {
- value: Constant::Int(_),
- ..
- },
- ..
- }],
- ) = (ops, comparators)
- {
- if checker
- .settings
- .rules
- .enabled(Rule::SysVersionInfoMinorCmpInt)
- {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersionInfoMinorCmpInt, left.range()));
- }
- }
- }
-
- _ => {}
- }
-
- if is_sys(checker, left, "version") {
- if let (
- [Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
- [Located {
- node:
- ExprKind::Constant {
- value: Constant::Str(s),
- ..
- },
- ..
- }],
- ) = (ops, comparators)
- {
- if s.len() == 1 {
- if checker.settings.rules.enabled(Rule::SysVersionCmpStr10) {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersionCmpStr10, left.range()));
- }
- } else if checker.settings.rules.enabled(Rule::SysVersionCmpStr3) {
- checker
- .diagnostics
- .push(Diagnostic::new(SysVersionCmpStr3, left.range()));
- }
- }
- }
-}
-
-/// YTT202
-pub fn name_or_attribute(checker: &mut Checker, expr: &Expr) {
- if checker
- .ctx
- .resolve_call_path(expr)
- .map_or(false, |call_path| call_path.as_slice() == ["six", "PY3"])
- {
- checker
- .diagnostics
- .push(Diagnostic::new(SixPY3, expr.range()));
- }
-}
diff --git a/crates/ruff/src/rules/flake8_2020/rules/compare.rs b/crates/ruff/src/rules/flake8_2020/rules/compare.rs
new file mode 100644
index 0000000000000..9877be4fae83b
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_2020/rules/compare.rs
@@ -0,0 +1,157 @@
+use num_bigint::BigInt;
+use rustpython_parser::ast::{self, Cmpop, Constant, Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+use crate::registry::Rule;
+
+use super::super::helpers::is_sys;
+
+#[violation]
+pub struct SysVersionCmpStr3;
+
+impl Violation for SysVersionCmpStr3 {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("`sys.version` compared to string (python3.10), use `sys.version_info`")
+ }
+}
+
+#[violation]
+pub struct SysVersionInfo0Eq3;
+
+impl Violation for SysVersionInfo0Eq3 {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("`sys.version_info[0] == 3` referenced (python4), use `>=`")
+ }
+}
+
+#[violation]
+pub struct SysVersionInfo1CmpInt;
+
+impl Violation for SysVersionInfo1CmpInt {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!(
+ "`sys.version_info[1]` compared to integer (python4), compare `sys.version_info` to \
+ tuple"
+ )
+ }
+}
+
+#[violation]
+pub struct SysVersionInfoMinorCmpInt;
+
+impl Violation for SysVersionInfoMinorCmpInt {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!(
+ "`sys.version_info.minor` compared to integer (python4), compare `sys.version_info` \
+ to tuple"
+ )
+ }
+}
+
+#[violation]
+pub struct SysVersionCmpStr10;
+
+impl Violation for SysVersionCmpStr10 {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("`sys.version` compared to string (python10), use `sys.version_info`")
+ }
+}
+
+/// YTT103, YTT201, YTT203, YTT204, YTT302
+pub(crate) fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &[Expr]) {
+ match left {
+ Expr::Subscript(ast::ExprSubscript { value, slice, .. })
+ if is_sys(checker.semantic_model(), value, "version_info") =>
+ {
+ if let Expr::Constant(ast::ExprConstant {
+ value: Constant::Int(i),
+ ..
+ }) = slice.as_ref()
+ {
+ if *i == BigInt::from(0) {
+ if let (
+ [Cmpop::Eq | Cmpop::NotEq],
+ [Expr::Constant(ast::ExprConstant {
+ value: Constant::Int(n),
+ ..
+ })],
+ ) = (ops, comparators)
+ {
+ if *n == BigInt::from(3) && checker.enabled(Rule::SysVersionInfo0Eq3) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersionInfo0Eq3, left.range()));
+ }
+ }
+ } else if *i == BigInt::from(1) {
+ if let (
+ [Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
+ [Expr::Constant(ast::ExprConstant {
+ value: Constant::Int(_),
+ ..
+ })],
+ ) = (ops, comparators)
+ {
+ if checker.enabled(Rule::SysVersionInfo1CmpInt) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersionInfo1CmpInt, left.range()));
+ }
+ }
+ }
+ }
+ }
+
+ Expr::Attribute(ast::ExprAttribute { value, attr, .. })
+ if is_sys(checker.semantic_model(), value, "version_info") && attr == "minor" =>
+ {
+ if let (
+ [Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
+ [Expr::Constant(ast::ExprConstant {
+ value: Constant::Int(_),
+ ..
+ })],
+ ) = (ops, comparators)
+ {
+ if checker.enabled(Rule::SysVersionInfoMinorCmpInt) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersionInfoMinorCmpInt, left.range()));
+ }
+ }
+ }
+
+ _ => {}
+ }
+
+ if is_sys(checker.semantic_model(), left, "version") {
+ if let (
+ [Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
+ [Expr::Constant(ast::ExprConstant {
+ value: Constant::Str(s),
+ ..
+ })],
+ ) = (ops, comparators)
+ {
+ if s.len() == 1 {
+ if checker.enabled(Rule::SysVersionCmpStr10) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersionCmpStr10, left.range()));
+ }
+ } else if checker.enabled(Rule::SysVersionCmpStr3) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersionCmpStr3, left.range()));
+ }
+ }
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_2020/rules/mod.rs b/crates/ruff/src/rules/flake8_2020/rules/mod.rs
new file mode 100644
index 0000000000000..cb77bcc0ddda0
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_2020/rules/mod.rs
@@ -0,0 +1,12 @@
+pub(crate) use compare::{
+ compare, SysVersionCmpStr10, SysVersionCmpStr3, SysVersionInfo0Eq3, SysVersionInfo1CmpInt,
+ SysVersionInfoMinorCmpInt,
+};
+pub(crate) use name_or_attribute::{name_or_attribute, SixPY3};
+pub(crate) use subscript::{
+ subscript, SysVersion0, SysVersion2, SysVersionSlice1, SysVersionSlice3,
+};
+
+mod compare;
+mod name_or_attribute;
+mod subscript;
diff --git a/crates/ruff/src/rules/flake8_2020/rules/name_or_attribute.rs b/crates/ruff/src/rules/flake8_2020/rules/name_or_attribute.rs
new file mode 100644
index 0000000000000..d861abd262f51
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_2020/rules/name_or_attribute.rs
@@ -0,0 +1,29 @@
+use rustpython_parser::ast::{Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+
+#[violation]
+pub struct SixPY3;
+
+impl Violation for SixPY3 {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("`six.PY3` referenced (python4), use `not six.PY2`")
+ }
+}
+
+/// YTT202
+pub(crate) fn name_or_attribute(checker: &mut Checker, expr: &Expr) {
+ if checker
+ .semantic_model()
+ .resolve_call_path(expr)
+ .map_or(false, |call_path| call_path.as_slice() == ["six", "PY3"])
+ {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SixPY3, expr.range()));
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_2020/rules/subscript.rs b/crates/ruff/src/rules/flake8_2020/rules/subscript.rs
new file mode 100644
index 0000000000000..b55a602423a6f
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_2020/rules/subscript.rs
@@ -0,0 +1,96 @@
+use num_bigint::BigInt;
+use rustpython_parser::ast::{self, Constant, Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+use crate::registry::Rule;
+use crate::rules::flake8_2020::helpers::is_sys;
+
+#[violation]
+pub struct SysVersionSlice3;
+
+impl Violation for SysVersionSlice3 {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("`sys.version[:3]` referenced (python3.10), use `sys.version_info`")
+ }
+}
+
+#[violation]
+pub struct SysVersion2;
+
+impl Violation for SysVersion2 {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("`sys.version[2]` referenced (python3.10), use `sys.version_info`")
+ }
+}
+
+#[violation]
+pub struct SysVersion0;
+
+impl Violation for SysVersion0 {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("`sys.version[0]` referenced (python10), use `sys.version_info`")
+ }
+}
+
+#[violation]
+pub struct SysVersionSlice1;
+
+impl Violation for SysVersionSlice1 {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("`sys.version[:1]` referenced (python10), use `sys.version_info`")
+ }
+}
+
+/// YTT101, YTT102, YTT301, YTT303
+pub(crate) fn subscript(checker: &mut Checker, value: &Expr, slice: &Expr) {
+ if is_sys(checker.semantic_model(), value, "version") {
+ match slice {
+ Expr::Slice(ast::ExprSlice {
+ lower: None,
+ upper: Some(upper),
+ step: None,
+ range: _,
+ }) => {
+ if let Expr::Constant(ast::ExprConstant {
+ value: Constant::Int(i),
+ ..
+ }) = upper.as_ref()
+ {
+ if *i == BigInt::from(1) && checker.enabled(Rule::SysVersionSlice1) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersionSlice1, value.range()));
+ } else if *i == BigInt::from(3) && checker.enabled(Rule::SysVersionSlice3) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersionSlice3, value.range()));
+ }
+ }
+ }
+
+ Expr::Constant(ast::ExprConstant {
+ value: Constant::Int(i),
+ ..
+ }) => {
+ if *i == BigInt::from(2) && checker.enabled(Rule::SysVersion2) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersion2, value.range()));
+ } else if *i == BigInt::from(0) && checker.enabled(Rule::SysVersion0) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(SysVersion0, value.range()));
+ }
+ }
+
+ _ => {}
+ }
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_annotations/fixes.rs b/crates/ruff/src/rules/flake8_annotations/fixes.rs
index 5a6fea6f4f670..26ae559005131 100644
--- a/crates/ruff/src/rules/flake8_annotations/fixes.rs
+++ b/crates/ruff/src/rules/flake8_annotations/fixes.rs
@@ -1,19 +1,23 @@
use anyhow::{bail, Result};
-use rustpython_parser::ast::Stmt;
+use rustpython_parser::ast::{Ranged, Stmt};
use rustpython_parser::{lexer, Mode, Tok};
use ruff_diagnostics::Edit;
use ruff_python_ast::source_code::Locator;
/// ANN204
-pub fn add_return_annotation(locator: &Locator, stmt: &Stmt, annotation: &str) -> Result {
+pub(crate) fn add_return_annotation(
+ locator: &Locator,
+ stmt: &Stmt,
+ annotation: &str,
+) -> Result {
let contents = &locator.contents()[stmt.range()];
// Find the colon (following the `def` keyword).
let mut seen_lpar = false;
let mut seen_rpar = false;
- let mut count: usize = 0;
- for (tok, range) in lexer::lex_located(contents, Mode::Module, stmt.start()).flatten() {
+ let mut count = 0u32;
+ for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() {
if seen_lpar && seen_rpar {
if matches!(tok, Tok::Colon) {
return Ok(Edit::insertion(format!(" -> {annotation}"), range.start()));
@@ -24,10 +28,10 @@ pub fn add_return_annotation(locator: &Locator, stmt: &Stmt, annotation: &str) -
if count == 0 {
seen_lpar = true;
}
- count += 1;
+ count = count.saturating_add(1);
}
if matches!(tok, Tok::Rpar) {
- count -= 1;
+ count = count.saturating_sub(1);
if count == 0 {
seen_rpar = true;
}
diff --git a/crates/ruff/src/rules/flake8_annotations/helpers.rs b/crates/ruff/src/rules/flake8_annotations/helpers.rs
index 8702c6f9578fa..6813af958180a 100644
--- a/crates/ruff/src/rules/flake8_annotations/helpers.rs
+++ b/crates/ruff/src/rules/flake8_annotations/helpers.rs
@@ -1,38 +1,49 @@
-use rustpython_parser::ast::{Arguments, Expr, Stmt, StmtKind};
+use rustpython_parser::ast::{self, Arguments, Expr, Stmt};
use ruff_python_ast::cast;
use ruff_python_semantic::analyze::visibility;
+use ruff_python_semantic::definition::{Definition, Member, MemberKind};
+use ruff_python_semantic::model::SemanticModel;
-use crate::checkers::ast::Checker;
-use crate::docstrings::definition::{Definition, DefinitionKind};
-
-pub(super) fn match_function_def(stmt: &Stmt) -> (&str, &Arguments, Option<&Expr>, &Vec) {
- match &stmt.node {
- StmtKind::FunctionDef {
+pub(super) fn match_function_def(
+ stmt: &Stmt,
+) -> (&str, &Arguments, Option<&Expr>, &[Stmt], &[Expr]) {
+ match stmt {
+ Stmt::FunctionDef(ast::StmtFunctionDef {
name,
args,
returns,
body,
+ decorator_list,
..
- }
- | StmtKind::AsyncFunctionDef {
+ })
+ | Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef {
name,
args,
returns,
body,
+ decorator_list,
..
- } => (name, args, returns.as_ref().map(|expr| &**expr), body),
+ }) => (
+ name,
+ args,
+ returns.as_ref().map(|expr| &**expr),
+ body,
+ decorator_list,
+ ),
_ => panic!("Found non-FunctionDef in match_name"),
}
}
/// Return the name of the function, if it's overloaded.
-pub fn overloaded_name(checker: &Checker, definition: &Definition) -> Option {
- if let DefinitionKind::Function(stmt)
- | DefinitionKind::NestedFunction(stmt)
- | DefinitionKind::Method(stmt) = definition.kind
+pub(crate) fn overloaded_name(model: &SemanticModel, definition: &Definition) -> Option {
+ if let Definition::Member(Member {
+ kind: MemberKind::Function | MemberKind::NestedFunction | MemberKind::Method,
+ stmt,
+ ..
+ }) = definition
{
- if visibility::is_overload(&checker.ctx, cast::decorator_list(stmt)) {
+ if visibility::is_overload(model, cast::decorator_list(stmt)) {
let (name, ..) = match_function_def(stmt);
Some(name.to_string())
} else {
@@ -45,12 +56,18 @@ pub fn overloaded_name(checker: &Checker, definition: &Definition) -> Option bool {
- if let DefinitionKind::Function(stmt)
- | DefinitionKind::NestedFunction(stmt)
- | DefinitionKind::Method(stmt) = definition.kind
+pub(crate) fn is_overload_impl(
+ model: &SemanticModel,
+ definition: &Definition,
+ overloaded_name: &str,
+) -> bool {
+ if let Definition::Member(Member {
+ kind: MemberKind::Function | MemberKind::NestedFunction | MemberKind::Method,
+ stmt,
+ ..
+ }) = definition
{
- if visibility::is_overload(&checker.ctx, cast::decorator_list(stmt)) {
+ if visibility::is_overload(model, cast::decorator_list(stmt)) {
false
} else {
let (name, ..) = match_function_def(stmt);
diff --git a/crates/ruff/src/rules/flake8_annotations/mod.rs b/crates/ruff/src/rules/flake8_annotations/mod.rs
index ecfab2d4be75a..9928a48fb1434 100644
--- a/crates/ruff/src/rules/flake8_annotations/mod.rs
+++ b/crates/ruff/src/rules/flake8_annotations/mod.rs
@@ -8,9 +8,9 @@ pub mod settings;
mod tests {
use std::path::Path;
- use crate::assert_messages;
use anyhow::Result;
+ use crate::assert_messages;
use crate::registry::Rule;
use crate::settings::Settings;
use crate::test::test_path;
diff --git a/crates/ruff/src/rules/flake8_annotations/rules.rs b/crates/ruff/src/rules/flake8_annotations/rules/definition.rs
similarity index 55%
rename from crates/ruff/src/rules/flake8_annotations/rules.rs
rename to crates/ruff/src/rules/flake8_annotations/rules/definition.rs
index f3f479ab3af23..492be796130c5 100644
--- a/crates/ruff/src/rules/flake8_annotations/rules.rs
+++ b/crates/ruff/src/rules/flake8_annotations/rules/definition.rs
@@ -1,20 +1,21 @@
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Stmt};
+use rustpython_parser::ast::{Expr, Ranged, Stmt};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::ReturnStatementVisitor;
-use ruff_python_ast::visitor::Visitor;
+use ruff_python_ast::statement_visitor::StatementVisitor;
use ruff_python_ast::{cast, helpers};
use ruff_python_semantic::analyze::visibility;
use ruff_python_semantic::analyze::visibility::Visibility;
+use ruff_python_semantic::definition::{Definition, Member, MemberKind};
+use ruff_python_semantic::model::SemanticModel;
use ruff_python_stdlib::typing::SIMPLE_MAGIC_RETURN_TYPES;
use crate::checkers::ast::Checker;
-use crate::docstrings::definition::{Definition, DefinitionKind};
use crate::registry::{AsRule, Rule};
-use super::fixes;
-use super::helpers::match_function_def;
+use super::super::fixes;
+use super::super::helpers::match_function_def;
/// ## What it does
/// Checks that function arguments have type annotations.
@@ -37,7 +38,7 @@ use super::helpers::match_function_def;
/// ```
#[violation]
pub struct MissingTypeFunctionArgument {
- pub name: String,
+ name: String,
}
impl Violation for MissingTypeFunctionArgument {
@@ -69,7 +70,7 @@ impl Violation for MissingTypeFunctionArgument {
/// ```
#[violation]
pub struct MissingTypeArgs {
- pub name: String,
+ name: String,
}
impl Violation for MissingTypeArgs {
@@ -101,7 +102,7 @@ impl Violation for MissingTypeArgs {
/// ```
#[violation]
pub struct MissingTypeKwargs {
- pub name: String,
+ name: String,
}
impl Violation for MissingTypeKwargs {
@@ -138,7 +139,7 @@ impl Violation for MissingTypeKwargs {
/// ```
#[violation]
pub struct MissingTypeSelf {
- pub name: String,
+ name: String,
}
impl Violation for MissingTypeSelf {
@@ -177,7 +178,7 @@ impl Violation for MissingTypeSelf {
/// ```
#[violation]
pub struct MissingTypeCls {
- pub name: String,
+ name: String,
}
impl Violation for MissingTypeCls {
@@ -209,7 +210,7 @@ impl Violation for MissingTypeCls {
/// ```
#[violation]
pub struct MissingReturnTypeUndocumentedPublicFunction {
- pub name: String,
+ name: String,
}
impl Violation for MissingReturnTypeUndocumentedPublicFunction {
@@ -241,7 +242,7 @@ impl Violation for MissingReturnTypeUndocumentedPublicFunction {
/// ```
#[violation]
pub struct MissingReturnTypePrivateFunction {
- pub name: String,
+ name: String,
}
impl Violation for MissingReturnTypePrivateFunction {
@@ -286,7 +287,7 @@ impl Violation for MissingReturnTypePrivateFunction {
/// ```
#[violation]
pub struct MissingReturnTypeSpecialMethod {
- pub name: String,
+ name: String,
}
impl AlwaysAutofixableViolation for MissingReturnTypeSpecialMethod {
@@ -326,7 +327,7 @@ impl AlwaysAutofixableViolation for MissingReturnTypeSpecialMethod {
/// ```
#[violation]
pub struct MissingReturnTypeStaticMethod {
- pub name: String,
+ name: String,
}
impl Violation for MissingReturnTypeStaticMethod {
@@ -362,7 +363,7 @@ impl Violation for MissingReturnTypeStaticMethod {
/// ```
#[violation]
pub struct MissingReturnTypeClassMethod {
- pub name: String,
+ name: String,
}
impl Violation for MissingReturnTypeClassMethod {
@@ -403,7 +404,7 @@ impl Violation for MissingReturnTypeClassMethod {
/// - [Mypy: The Any type](https://mypy.readthedocs.io/en/stable/kinds_of_types.html#the-any-type)
#[violation]
pub struct AnyType {
- pub name: String,
+ name: String,
}
impl Violation for AnyType {
@@ -416,18 +417,15 @@ impl Violation for AnyType {
fn is_none_returning(body: &[Stmt]) -> bool {
let mut visitor = ReturnStatementVisitor::default();
- for stmt in body {
- visitor.visit_stmt(stmt);
- }
- for expr in visitor.returns.into_iter().flatten() {
- if !matches!(
- expr.node,
- ExprKind::Constant {
- value: Constant::None,
- ..
+ visitor.visit_body(body);
+ for stmt in visitor.returns {
+ if let Some(value) = stmt.value.as_deref() {
+ if !matches!(
+ value,
+ Expr::Constant(constant) if constant.value.is_none()
+ ) {
+ return false;
}
- ) {
- return false;
}
}
true
@@ -435,14 +433,15 @@ fn is_none_returning(body: &[Stmt]) -> bool {
/// ANN401
fn check_dynamically_typed(
- checker: &Checker,
+ model: &SemanticModel,
annotation: &Expr,
func: F,
diagnostics: &mut Vec,
+ is_overridden: bool,
) where
F: FnOnce() -> String,
{
- if checker.ctx.match_typing_expr(annotation, "Any") {
+ if !is_overridden && model.match_typing_expr(annotation, "Any") {
diagnostics.push(Diagnostic::new(
AnyType { name: func() },
annotation.range(),
@@ -451,7 +450,7 @@ fn check_dynamically_typed(
}
/// Generate flake8-annotation checks for a given `Definition`.
-pub fn definition(
+pub(crate) fn definition(
checker: &Checker,
definition: &Definition,
visibility: Visibility,
@@ -459,285 +458,283 @@ pub fn definition(
// TODO(charlie): Consider using the AST directly here rather than `Definition`.
// We could adhere more closely to `flake8-annotations` by defining public
// vs. secret vs. protected.
- if let DefinitionKind::Function(stmt)
- | DefinitionKind::NestedFunction(stmt)
- | DefinitionKind::Method(stmt) = &definition.kind
+ let Definition::Member(Member {
+ kind,
+ stmt,
+ ..
+ }) = definition else {
+ return vec![];
+ };
+
+ let is_method = match kind {
+ MemberKind::Method => true,
+ MemberKind::Function | MemberKind::NestedFunction => false,
+ _ => return vec![],
+ };
+
+ let (name, args, returns, body, decorator_list) = match_function_def(stmt);
+ // Keep track of whether we've seen any typed arguments or return values.
+ let mut has_any_typed_arg = false; // Any argument has been typed?
+ let mut has_typed_return = false; // Return value has been typed?
+ let mut has_typed_self_or_cls = false; // Has a typed `self` or `cls` argument?
+
+ // Temporary storage for diagnostics; we emit them at the end
+ // unless configured to suppress ANN* for declarations that are fully untyped.
+ let mut diagnostics = Vec::new();
+
+ let is_overridden = visibility::is_override(checker.semantic_model(), decorator_list);
+
+ // ANN001, ANN401
+ for arg in args
+ .posonlyargs
+ .iter()
+ .chain(args.args.iter())
+ .chain(args.kwonlyargs.iter())
+ .skip(
+ // If this is a non-static method, skip `cls` or `self`.
+ usize::from(
+ is_method
+ && !visibility::is_staticmethod(
+ checker.semantic_model(),
+ cast::decorator_list(stmt),
+ ),
+ ),
+ )
{
- let is_method = matches!(definition.kind, DefinitionKind::Method(_));
- let (name, args, returns, body) = match_function_def(stmt);
- // Keep track of whether we've seen any typed arguments or return values.
- let mut has_any_typed_arg = false; // Any argument has been typed?
- let mut has_typed_return = false; // Return value has been typed?
- let mut has_typed_self_or_cls = false; // Has a typed `self` or `cls` argument?
-
- // Temporary storage for diagnostics; we emit them at the end
- // unless configured to suppress ANN* for declarations that are fully untyped.
- let mut diagnostics = Vec::new();
-
- // ANN001, ANN401
- for arg in args
- .posonlyargs
- .iter()
- .chain(args.args.iter())
- .chain(args.kwonlyargs.iter())
- .skip(
- // If this is a non-static method, skip `cls` or `self`.
- usize::from(
- is_method
- && !visibility::is_staticmethod(&checker.ctx, cast::decorator_list(stmt)),
- ),
- )
- {
- // ANN401 for dynamically typed arguments
- if let Some(annotation) = &arg.node.annotation {
- has_any_typed_arg = true;
- if checker.settings.rules.enabled(Rule::AnyType) {
+ // ANN401 for dynamically typed arguments
+ if let Some(annotation) = &arg.annotation {
+ has_any_typed_arg = true;
+ if checker.enabled(Rule::AnyType) {
+ check_dynamically_typed(
+ checker.semantic_model(),
+ annotation,
+ || arg.arg.to_string(),
+ &mut diagnostics,
+ is_overridden,
+ );
+ }
+ } else {
+ if !(checker.settings.flake8_annotations.suppress_dummy_args
+ && checker.settings.dummy_variable_rgx.is_match(&arg.arg))
+ {
+ if checker.enabled(Rule::MissingTypeFunctionArgument) {
+ diagnostics.push(Diagnostic::new(
+ MissingTypeFunctionArgument {
+ name: arg.arg.to_string(),
+ },
+ arg.range(),
+ ));
+ }
+ }
+ }
+ }
+
+ // ANN002, ANN401
+ if let Some(arg) = &args.vararg {
+ if let Some(expr) = &arg.annotation {
+ has_any_typed_arg = true;
+ if !checker.settings.flake8_annotations.allow_star_arg_any {
+ if checker.enabled(Rule::AnyType) {
+ let name = &arg.arg;
check_dynamically_typed(
- checker,
- annotation,
- || arg.node.arg.to_string(),
+ checker.semantic_model(),
+ expr,
+ || format!("*{name}"),
&mut diagnostics,
+ is_overridden,
);
}
- } else {
- if !(checker.settings.flake8_annotations.suppress_dummy_args
- && checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
- {
- if checker
- .settings
- .rules
- .enabled(Rule::MissingTypeFunctionArgument)
- {
- diagnostics.push(Diagnostic::new(
- MissingTypeFunctionArgument {
- name: arg.node.arg.to_string(),
- },
- arg.range(),
- ));
- }
+ }
+ } else {
+ if !(checker.settings.flake8_annotations.suppress_dummy_args
+ && checker.settings.dummy_variable_rgx.is_match(&arg.arg))
+ {
+ if checker.enabled(Rule::MissingTypeArgs) {
+ diagnostics.push(Diagnostic::new(
+ MissingTypeArgs {
+ name: arg.arg.to_string(),
+ },
+ arg.range(),
+ ));
}
}
}
+ }
- // ANN002, ANN401
- if let Some(arg) = &args.vararg {
- if let Some(expr) = &arg.node.annotation {
- has_any_typed_arg = true;
- if !checker.settings.flake8_annotations.allow_star_arg_any {
- if checker.settings.rules.enabled(Rule::AnyType) {
- let name = &arg.node.arg;
- check_dynamically_typed(
- checker,
- expr,
- || format!("*{name}"),
- &mut diagnostics,
- );
- }
+ // ANN003, ANN401
+ if let Some(arg) = &args.kwarg {
+ if let Some(expr) = &arg.annotation {
+ has_any_typed_arg = true;
+ if !checker.settings.flake8_annotations.allow_star_arg_any {
+ if checker.enabled(Rule::AnyType) {
+ let name = &arg.arg;
+ check_dynamically_typed(
+ checker.semantic_model(),
+ expr,
+ || format!("**{name}"),
+ &mut diagnostics,
+ is_overridden,
+ );
}
- } else {
- if !(checker.settings.flake8_annotations.suppress_dummy_args
- && checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
+ }
+ } else {
+ if !(checker.settings.flake8_annotations.suppress_dummy_args
+ && checker.settings.dummy_variable_rgx.is_match(&arg.arg))
+ {
+ if checker.enabled(Rule::MissingTypeKwargs) {
+ diagnostics.push(Diagnostic::new(
+ MissingTypeKwargs {
+ name: arg.arg.to_string(),
+ },
+ arg.range(),
+ ));
+ }
+ }
+ }
+ }
+
+ // ANN101, ANN102
+ if is_method
+ && !visibility::is_staticmethod(checker.semantic_model(), cast::decorator_list(stmt))
+ {
+ if let Some(arg) = args.posonlyargs.first().or_else(|| args.args.first()) {
+ if arg.annotation.is_none() {
+ if visibility::is_classmethod(checker.semantic_model(), cast::decorator_list(stmt))
{
- if checker.settings.rules.enabled(Rule::MissingTypeArgs) {
+ if checker.enabled(Rule::MissingTypeCls) {
diagnostics.push(Diagnostic::new(
- MissingTypeArgs {
- name: arg.node.arg.to_string(),
+ MissingTypeCls {
+ name: arg.arg.to_string(),
},
arg.range(),
));
}
- }
- }
- }
-
- // ANN003, ANN401
- if let Some(arg) = &args.kwarg {
- if let Some(expr) = &arg.node.annotation {
- has_any_typed_arg = true;
- if !checker.settings.flake8_annotations.allow_star_arg_any {
- if checker.settings.rules.enabled(Rule::AnyType) {
- let name = &arg.node.arg;
- check_dynamically_typed(
- checker,
- expr,
- || format!("**{name}"),
- &mut diagnostics,
- );
- }
- }
- } else {
- if !(checker.settings.flake8_annotations.suppress_dummy_args
- && checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
- {
- if checker.settings.rules.enabled(Rule::MissingTypeKwargs) {
+ } else {
+ if checker.enabled(Rule::MissingTypeSelf) {
diagnostics.push(Diagnostic::new(
- MissingTypeKwargs {
- name: arg.node.arg.to_string(),
+ MissingTypeSelf {
+ name: arg.arg.to_string(),
},
arg.range(),
));
}
}
+ } else {
+ has_typed_self_or_cls = true;
}
}
+ }
- // ANN101, ANN102
- if is_method && !visibility::is_staticmethod(&checker.ctx, cast::decorator_list(stmt)) {
- if let Some(arg) = args.posonlyargs.first().or_else(|| args.args.first()) {
- if arg.node.annotation.is_none() {
- if visibility::is_classmethod(&checker.ctx, cast::decorator_list(stmt)) {
- if checker.settings.rules.enabled(Rule::MissingTypeCls) {
- diagnostics.push(Diagnostic::new(
- MissingTypeCls {
- name: arg.node.arg.to_string(),
- },
- arg.range(),
- ));
- }
- } else {
- if checker.settings.rules.enabled(Rule::MissingTypeSelf) {
- diagnostics.push(Diagnostic::new(
- MissingTypeSelf {
- name: arg.node.arg.to_string(),
- },
- arg.range(),
- ));
- }
- }
- } else {
- has_typed_self_or_cls = true;
- }
- }
+ // ANN201, ANN202, ANN401
+ if let Some(expr) = &returns {
+ has_typed_return = true;
+ if checker.enabled(Rule::AnyType) {
+ check_dynamically_typed(
+ checker.semantic_model(),
+ expr,
+ || name.to_string(),
+ &mut diagnostics,
+ is_overridden,
+ );
}
-
- // ANN201, ANN202, ANN401
- if let Some(expr) = &returns {
- has_typed_return = true;
- if checker.settings.rules.enabled(Rule::AnyType) {
- check_dynamically_typed(checker, expr, || name.to_string(), &mut diagnostics);
+ } else if !(
+ // Allow omission of return annotation if the function only returns `None`
+ // (explicitly or implicitly).
+ checker.settings.flake8_annotations.suppress_none_returning && is_none_returning(body)
+ ) {
+ if is_method
+ && visibility::is_classmethod(checker.semantic_model(), cast::decorator_list(stmt))
+ {
+ if checker.enabled(Rule::MissingReturnTypeClassMethod) {
+ diagnostics.push(Diagnostic::new(
+ MissingReturnTypeClassMethod {
+ name: name.to_string(),
+ },
+ helpers::identifier_range(stmt, checker.locator),
+ ));
}
- } else if !(
- // Allow omission of return annotation if the function only returns `None`
- // (explicitly or implicitly).
- checker.settings.flake8_annotations.suppress_none_returning && is_none_returning(body)
- ) {
- if is_method && visibility::is_classmethod(&checker.ctx, cast::decorator_list(stmt)) {
- if checker
- .settings
- .rules
- .enabled(Rule::MissingReturnTypeClassMethod)
- {
- diagnostics.push(Diagnostic::new(
- MissingReturnTypeClassMethod {
- name: name.to_string(),
- },
- helpers::identifier_range(stmt, checker.locator),
- ));
- }
- } else if is_method
- && visibility::is_staticmethod(&checker.ctx, cast::decorator_list(stmt))
- {
- if checker
- .settings
- .rules
- .enabled(Rule::MissingReturnTypeStaticMethod)
- {
- diagnostics.push(Diagnostic::new(
- MissingReturnTypeStaticMethod {
- name: name.to_string(),
- },
- helpers::identifier_range(stmt, checker.locator),
- ));
- }
- } else if is_method && visibility::is_init(name) {
- // Allow omission of return annotation in `__init__` functions, as long as at
- // least one argument is typed.
- if checker
- .settings
- .rules
- .enabled(Rule::MissingReturnTypeSpecialMethod)
- {
- if !(checker.settings.flake8_annotations.mypy_init_return && has_any_typed_arg)
- {
- let mut diagnostic = Diagnostic::new(
- MissingReturnTypeSpecialMethod {
- name: name.to_string(),
- },
- helpers::identifier_range(stmt, checker.locator),
- );
- if checker.patch(diagnostic.kind.rule()) {
- diagnostic.try_set_fix(|| {
- fixes::add_return_annotation(checker.locator, stmt, "None")
- });
- }
- diagnostics.push(diagnostic);
- }
- }
- } else if is_method && visibility::is_magic(name) {
- if checker
- .settings
- .rules
- .enabled(Rule::MissingReturnTypeSpecialMethod)
- {
+ } else if is_method
+ && visibility::is_staticmethod(checker.semantic_model(), cast::decorator_list(stmt))
+ {
+ if checker.enabled(Rule::MissingReturnTypeStaticMethod) {
+ diagnostics.push(Diagnostic::new(
+ MissingReturnTypeStaticMethod {
+ name: name.to_string(),
+ },
+ helpers::identifier_range(stmt, checker.locator),
+ ));
+ }
+ } else if is_method && visibility::is_init(name) {
+ // Allow omission of return annotation in `__init__` functions, as long as at
+ // least one argument is typed.
+ if checker.enabled(Rule::MissingReturnTypeSpecialMethod) {
+ if !(checker.settings.flake8_annotations.mypy_init_return && has_any_typed_arg) {
let mut diagnostic = Diagnostic::new(
MissingReturnTypeSpecialMethod {
name: name.to_string(),
},
helpers::identifier_range(stmt, checker.locator),
);
- let return_type = SIMPLE_MAGIC_RETURN_TYPES.get(name);
- if let Some(return_type) = return_type {
- if checker.patch(diagnostic.kind.rule()) {
- diagnostic.try_set_fix(|| {
- fixes::add_return_annotation(checker.locator, stmt, return_type)
- });
- }
+ if checker.patch(diagnostic.kind.rule()) {
+ #[allow(deprecated)]
+ diagnostic.try_set_fix_from_edit(|| {
+ fixes::add_return_annotation(checker.locator, stmt, "None")
+ });
}
diagnostics.push(diagnostic);
}
- } else {
- match visibility {
- Visibility::Public => {
- if checker
- .settings
- .rules
- .enabled(Rule::MissingReturnTypeUndocumentedPublicFunction)
- {
- diagnostics.push(Diagnostic::new(
- MissingReturnTypeUndocumentedPublicFunction {
- name: name.to_string(),
- },
- helpers::identifier_range(stmt, checker.locator),
- ));
- }
- }
- Visibility::Private => {
- if checker
- .settings
- .rules
- .enabled(Rule::MissingReturnTypePrivateFunction)
- {
- diagnostics.push(Diagnostic::new(
- MissingReturnTypePrivateFunction {
- name: name.to_string(),
- },
- helpers::identifier_range(stmt, checker.locator),
- ));
- }
+ }
+ } else if is_method && visibility::is_magic(name) {
+ if checker.enabled(Rule::MissingReturnTypeSpecialMethod) {
+ let mut diagnostic = Diagnostic::new(
+ MissingReturnTypeSpecialMethod {
+ name: name.to_string(),
+ },
+ helpers::identifier_range(stmt, checker.locator),
+ );
+ let return_type = SIMPLE_MAGIC_RETURN_TYPES.get(name);
+ if let Some(return_type) = return_type {
+ if checker.patch(diagnostic.kind.rule()) {
+ #[allow(deprecated)]
+ diagnostic.try_set_fix_from_edit(|| {
+ fixes::add_return_annotation(checker.locator, stmt, return_type)
+ });
}
}
+ diagnostics.push(diagnostic);
}
- }
- // If settings say so, don't report any of the
- // diagnostics gathered here if there were no type annotations at all.
- if checker.settings.flake8_annotations.ignore_fully_untyped
- && !(has_any_typed_arg || has_typed_self_or_cls || has_typed_return)
- {
- vec![]
} else {
- diagnostics
+ match visibility {
+ Visibility::Public => {
+ if checker.enabled(Rule::MissingReturnTypeUndocumentedPublicFunction) {
+ diagnostics.push(Diagnostic::new(
+ MissingReturnTypeUndocumentedPublicFunction {
+ name: name.to_string(),
+ },
+ helpers::identifier_range(stmt, checker.locator),
+ ));
+ }
+ }
+ Visibility::Private => {
+ if checker.enabled(Rule::MissingReturnTypePrivateFunction) {
+ diagnostics.push(Diagnostic::new(
+ MissingReturnTypePrivateFunction {
+ name: name.to_string(),
+ },
+ helpers::identifier_range(stmt, checker.locator),
+ ));
+ }
+ }
+ }
}
- } else {
+ }
+ // If settings say so, don't report any of the
+ // diagnostics gathered here if there were no type annotations at all.
+ if checker.settings.flake8_annotations.ignore_fully_untyped
+ && !(has_any_typed_arg || has_typed_self_or_cls || has_typed_return)
+ {
vec![]
+ } else {
+ diagnostics
}
}
diff --git a/crates/ruff/src/rules/flake8_annotations/rules/mod.rs b/crates/ruff/src/rules/flake8_annotations/rules/mod.rs
new file mode 100644
index 0000000000000..b57c156b183dc
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_annotations/rules/mod.rs
@@ -0,0 +1,8 @@
+pub(crate) use definition::{
+ definition, AnyType, MissingReturnTypeClassMethod, MissingReturnTypePrivateFunction,
+ MissingReturnTypeSpecialMethod, MissingReturnTypeStaticMethod,
+ MissingReturnTypeUndocumentedPublicFunction, MissingTypeArgs, MissingTypeCls,
+ MissingTypeFunctionArgument, MissingTypeKwargs, MissingTypeSelf,
+};
+
+mod definition;
diff --git a/crates/ruff/src/rules/flake8_annotations/settings.rs b/crates/ruff/src/rules/flake8_annotations/settings.rs
index 4bf40f2b4cd81..a0c3ba5115e66 100644
--- a/crates/ruff/src/rules/flake8_annotations/settings.rs
+++ b/crates/ruff/src/rules/flake8_annotations/settings.rs
@@ -1,19 +1,18 @@
//! Settings for the `flake-annotations` plugin.
-use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use ruff_macros::CacheKey;
-use ruff_macros::ConfigurationOptions;
+use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
- Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, JsonSchema,
+ Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8AnnotationsOptions"
)]
+#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "false",
diff --git a/crates/ruff/src/rules/flake8_annotations/snapshots/ruff__rules__flake8_annotations__tests__defaults.snap b/crates/ruff/src/rules/flake8_annotations/snapshots/ruff__rules__flake8_annotations__tests__defaults.snap
index 85337e599e468..61d69da4b50e3 100644
--- a/crates/ruff/src/rules/flake8_annotations/snapshots/ruff__rules__flake8_annotations__tests__defaults.snap
+++ b/crates/ruff/src/rules/flake8_annotations/snapshots/ruff__rules__flake8_annotations__tests__defaults.snap
@@ -1,189 +1,189 @@
---
source: crates/ruff/src/rules/flake8_annotations/mod.rs
---
-annotation_presence.py:4:5: ANN201 Missing return type annotation for public function `foo`
+annotation_presence.py:5:5: ANN201 Missing return type annotation for public function `foo`
|
-4 | # Error
-5 | def foo(a, b):
+5 | # Error
+6 | def foo(a, b):
| ^^^ ANN201
-6 | pass
+7 | pass
|
-annotation_presence.py:4:9: ANN001 Missing type annotation for function argument `a`
+annotation_presence.py:5:9: ANN001 Missing type annotation for function argument `a`
|
-4 | # Error
-5 | def foo(a, b):
+5 | # Error
+6 | def foo(a, b):
| ^ ANN001
-6 | pass
+7 | pass
|
-annotation_presence.py:4:12: ANN001 Missing type annotation for function argument `b`
+annotation_presence.py:5:12: ANN001 Missing type annotation for function argument `b`
|
-4 | # Error
-5 | def foo(a, b):
+5 | # Error
+6 | def foo(a, b):
| ^ ANN001
-6 | pass
+7 | pass
|
-annotation_presence.py:9:5: ANN201 Missing return type annotation for public function `foo`
+annotation_presence.py:10:5: ANN201 Missing return type annotation for public function `foo`
|
- 9 | # Error
-10 | def foo(a: int, b):
+10 | # Error
+11 | def foo(a: int, b):
| ^^^ ANN201
-11 | pass
+12 | pass
|
-annotation_presence.py:9:17: ANN001 Missing type annotation for function argument `b`
+annotation_presence.py:10:17: ANN001 Missing type annotation for function argument `b`
|
- 9 | # Error
-10 | def foo(a: int, b):
+10 | # Error
+11 | def foo(a: int, b):
| ^ ANN001
-11 | pass
+12 | pass
|
-annotation_presence.py:14:17: ANN001 Missing type annotation for function argument `b`
+annotation_presence.py:15:17: ANN001 Missing type annotation for function argument `b`
|
-14 | # Error
-15 | def foo(a: int, b) -> int:
+15 | # Error
+16 | def foo(a: int, b) -> int:
| ^ ANN001
-16 | pass
+17 | pass
|
-annotation_presence.py:19:5: ANN201 Missing return type annotation for public function `foo`
+annotation_presence.py:20:5: ANN201 Missing return type annotation for public function `foo`
|
-19 | # Error
-20 | def foo(a: int, b: int):
+20 | # Error
+21 | def foo(a: int, b: int):
| ^^^ ANN201
-21 | pass
+22 | pass
|
-annotation_presence.py:24:5: ANN201 Missing return type annotation for public function `foo`
+annotation_presence.py:25:5: ANN201 Missing return type annotation for public function `foo`
|
-24 | # Error
-25 | def foo():
+25 | # Error
+26 | def foo():
| ^^^ ANN201
-26 | pass
+27 | pass
|
-annotation_presence.py:44:12: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `a`
+annotation_presence.py:45:12: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `a`
|
-44 | # ANN401
-45 | def foo(a: Any, *args: str, **kwargs: str) -> int:
+45 | # ANN401
+46 | def foo(a: Any, *args: str, **kwargs: str) -> int:
| ^^^ ANN401
-46 | pass
+47 | pass
|
-annotation_presence.py:49:47: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `foo`
+annotation_presence.py:50:47: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `foo`
|
-49 | # ANN401
-50 | def foo(a: int, *args: str, **kwargs: str) -> Any:
+50 | # ANN401
+51 | def foo(a: int, *args: str, **kwargs: str) -> Any:
| ^^^ ANN401
-51 | pass
+52 | pass
|
-annotation_presence.py:54:24: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `*args`
+annotation_presence.py:55:24: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `*args`
|
-54 | # ANN401
-55 | def foo(a: int, *args: Any, **kwargs: Any) -> int:
+55 | # ANN401
+56 | def foo(a: int, *args: Any, **kwargs: Any) -> int:
| ^^^ ANN401
-56 | pass
+57 | pass
|
-annotation_presence.py:54:39: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `**kwargs`
+annotation_presence.py:55:39: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `**kwargs`
|
-54 | # ANN401
-55 | def foo(a: int, *args: Any, **kwargs: Any) -> int:
+55 | # ANN401
+56 | def foo(a: int, *args: Any, **kwargs: Any) -> int:
| ^^^ ANN401
-56 | pass
+57 | pass
|
-annotation_presence.py:59:24: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `*args`
+annotation_presence.py:60:24: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `*args`
|
-59 | # ANN401
-60 | def foo(a: int, *args: Any, **kwargs: str) -> int:
+60 | # ANN401
+61 | def foo(a: int, *args: Any, **kwargs: str) -> int:
| ^^^ ANN401
-61 | pass
+62 | pass
|
-annotation_presence.py:64:39: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `**kwargs`
+annotation_presence.py:65:39: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `**kwargs`
|
-64 | # ANN401
-65 | def foo(a: int, *args: str, **kwargs: Any) -> int:
+65 | # ANN401
+66 | def foo(a: int, *args: str, **kwargs: Any) -> int:
| ^^^ ANN401
-66 | pass
+67 | pass
|
-annotation_presence.py:74:13: ANN101 Missing type annotation for `self` in method
+annotation_presence.py:75:13: ANN101 Missing type annotation for `self` in method
|
-74 | # ANN101
-75 | def foo(self, a: int, b: int) -> int:
+75 | # ANN101
+76 | def foo(self, a: int, b: int) -> int:
| ^^^^ ANN101
-76 | pass
+77 | pass
|
-annotation_presence.py:78:29: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `a`
+annotation_presence.py:79:29: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `a`
|
-78 | # ANN401
-79 | def foo(self: "Foo", a: Any, *params: str, **options: str) -> int:
+79 | # ANN401
+80 | def foo(self: "Foo", a: Any, *params: str, **options: str) -> int:
| ^^^ ANN401
-80 | pass
+81 | pass
|
-annotation_presence.py:82:67: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `foo`
+annotation_presence.py:83:67: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `foo`
|
-82 | # ANN401
-83 | def foo(self: "Foo", a: int, *params: str, **options: str) -> Any:
+83 | # ANN401
+84 | def foo(self: "Foo", a: int, *params: str, **options: str) -> Any:
| ^^^ ANN401
-84 | pass
+85 | pass
|
-annotation_presence.py:86:43: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `*params`
+annotation_presence.py:87:43: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `*params`
|
-86 | # ANN401
-87 | def foo(self: "Foo", a: int, *params: Any, **options: Any) -> int:
+87 | # ANN401
+88 | def foo(self: "Foo", a: int, *params: Any, **options: Any) -> int:
| ^^^ ANN401
-88 | pass
+89 | pass
|
-annotation_presence.py:86:59: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `**options`
+annotation_presence.py:87:59: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `**options`
|
-86 | # ANN401
-87 | def foo(self: "Foo", a: int, *params: Any, **options: Any) -> int:
+87 | # ANN401
+88 | def foo(self: "Foo", a: int, *params: Any, **options: Any) -> int:
| ^^^ ANN401
-88 | pass
+89 | pass
|
-annotation_presence.py:90:43: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `*params`
+annotation_presence.py:91:43: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `*params`
|
-90 | # ANN401
-91 | def foo(self: "Foo", a: int, *params: Any, **options: str) -> int:
+91 | # ANN401
+92 | def foo(self: "Foo", a: int, *params: Any, **options: str) -> int:
| ^^^ ANN401
-92 | pass
+93 | pass
|
-annotation_presence.py:94:59: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `**options`
+annotation_presence.py:95:59: ANN401 Dynamically typed expressions (typing.Any) are disallowed in `**options`
|
-94 | # ANN401
-95 | def foo(self: "Foo", a: int, *params: str, **options: Any) -> int:
+95 | # ANN401
+96 | def foo(self: "Foo", a: int, *params: str, **options: Any) -> int:
| ^^^ ANN401
-96 | pass
+97 | pass
|
-annotation_presence.py:104:13: ANN102 Missing type annotation for `cls` in classmethod
+annotation_presence.py:130:13: ANN102 Missing type annotation for `cls` in classmethod
|
-104 | # ANN102
-105 | @classmethod
-106 | def foo(cls, a: int, b: int) -> int:
+130 | # ANN102
+131 | @classmethod
+132 | def foo(cls, a: int, b: int) -> int:
| ^^^ ANN102
-107 | pass
+133 | pass
|
-annotation_presence.py:108:13: ANN101 Missing type annotation for `self` in method
+annotation_presence.py:134:13: ANN101 Missing type annotation for `self` in method
|
-108 | # ANN101
-109 | def foo(self, /, a: int, b: int) -> int:
+134 | # ANN101
+135 | def foo(self, /, a: int, b: int) -> int:
| ^^^^ ANN101
-110 | pass
+136 | pass
|
diff --git a/crates/ruff/src/rules/flake8_async/mod.rs b/crates/ruff/src/rules/flake8_async/mod.rs
new file mode 100644
index 0000000000000..7865ddf6b0a66
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_async/mod.rs
@@ -0,0 +1,28 @@
+//! Rules from [flake8-async](https://pypi.org/project/flake8-async/).
+pub(crate) mod rules;
+
+#[cfg(test)]
+mod tests {
+ use std::path::Path;
+
+ use anyhow::Result;
+ use test_case::test_case;
+
+ use crate::assert_messages;
+ use crate::registry::Rule;
+ use crate::settings::Settings;
+ use crate::test::test_path;
+
+ #[test_case(Rule::BlockingHttpCallInAsyncFunction, Path::new("ASYNC100.py"))]
+ #[test_case(Rule::OpenSleepOrSubprocessInAsyncFunction, Path::new("ASYNC101.py"))]
+ #[test_case(Rule::BlockingOsCallInAsyncFunction, Path::new("ASYNC102.py"))]
+ fn rules(rule_code: Rule, path: &Path) -> Result<()> {
+ let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
+ let diagnostics = test_path(
+ Path::new("flake8_async").join(path).as_path(),
+ &Settings::for_rule(rule_code),
+ )?;
+ assert_messages!(snapshot, diagnostics);
+ Ok(())
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_async/rules/blocking_http_call.rs b/crates/ruff/src/rules/flake8_async/rules/blocking_http_call.rs
new file mode 100644
index 0000000000000..e7d72f8e4579d
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_async/rules/blocking_http_call.rs
@@ -0,0 +1,81 @@
+use rustpython_parser::ast;
+use rustpython_parser::ast::{Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+
+/// ## What it does
+/// Checks that async functions do not contain blocking HTTP calls.
+///
+/// ## Why is this bad?
+/// Blocking an async function via a blocking HTTP call will block the entire
+/// event loop, preventing it from executing other tasks while waiting for the
+/// HTTP response, negating the benefits of asynchronous programming.
+///
+/// Instead of making a blocking HTTP call, use an asynchronous HTTP client
+/// library such as `aiohttp` or `httpx`.
+///
+/// ## Example
+/// ```python
+/// async def fetch():
+/// urllib.request.urlopen("https://example.com/foo/bar").read()
+/// ```
+///
+/// Use instead:
+/// ```python
+/// async def fetch():
+/// async with aiohttp.ClientSession() as session:
+/// async with session.get("https://example.com/foo/bar") as resp:
+/// ...
+/// ```
+#[violation]
+pub struct BlockingHttpCallInAsyncFunction;
+
+impl Violation for BlockingHttpCallInAsyncFunction {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("Async functions should not call blocking HTTP methods")
+ }
+}
+
+const BLOCKING_HTTP_CALLS: &[&[&str]] = &[
+ &["urllib", "request", "urlopen"],
+ &["httpx", "get"],
+ &["httpx", "post"],
+ &["httpx", "delete"],
+ &["httpx", "patch"],
+ &["httpx", "put"],
+ &["httpx", "head"],
+ &["httpx", "connect"],
+ &["httpx", "options"],
+ &["httpx", "trace"],
+ &["requests", "get"],
+ &["requests", "post"],
+ &["requests", "delete"],
+ &["requests", "patch"],
+ &["requests", "put"],
+ &["requests", "head"],
+ &["requests", "connect"],
+ &["requests", "options"],
+ &["requests", "trace"],
+];
+
+/// ASYNC100
+pub(crate) fn blocking_http_call(checker: &mut Checker, expr: &Expr) {
+ if checker.semantic_model().in_async_context() {
+ if let Expr::Call(ast::ExprCall { func, .. }) = expr {
+ let call_path = checker.semantic_model().resolve_call_path(func);
+ let is_blocking =
+ call_path.map_or(false, |path| BLOCKING_HTTP_CALLS.contains(&path.as_slice()));
+
+ if is_blocking {
+ checker.diagnostics.push(Diagnostic::new(
+ BlockingHttpCallInAsyncFunction,
+ func.range(),
+ ));
+ }
+ }
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_async/rules/blocking_os_call.rs b/crates/ruff/src/rules/flake8_async/rules/blocking_os_call.rs
new file mode 100644
index 0000000000000..ab0316f276083
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_async/rules/blocking_os_call.rs
@@ -0,0 +1,73 @@
+use rustpython_parser::ast;
+use rustpython_parser::ast::{Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+
+/// ## What it does
+/// Checks that async functions do not contain calls to blocking synchronous
+/// process calls via the `os` module.
+///
+/// ## Why is this bad?
+/// Blocking an async function via a blocking call will block the entire
+/// event loop, preventing it from executing other tasks while waiting for the
+/// call to complete, negating the benefits of asynchronous programming.
+///
+/// Instead of making a blocking call, use an equivalent asynchronous library
+/// or function.
+///
+/// ## Example
+/// ```python
+/// async def foo():
+/// os.popen()
+/// ```
+///
+/// Use instead:
+/// ```python
+/// def foo():
+/// os.popen()
+/// ```
+#[violation]
+pub struct BlockingOsCallInAsyncFunction;
+
+impl Violation for BlockingOsCallInAsyncFunction {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("Async functions should not call synchronous `os` methods")
+ }
+}
+
+const UNSAFE_OS_METHODS: &[&[&str]] = &[
+ &["os", "popen"],
+ &["os", "posix_spawn"],
+ &["os", "posix_spawnp"],
+ &["os", "spawnl"],
+ &["os", "spawnle"],
+ &["os", "spawnlp"],
+ &["os", "spawnlpe"],
+ &["os", "spawnv"],
+ &["os", "spawnve"],
+ &["os", "spawnvp"],
+ &["os", "spawnvpe"],
+ &["os", "system"],
+];
+
+/// ASYNC102
+pub(crate) fn blocking_os_call(checker: &mut Checker, expr: &Expr) {
+ if checker.semantic_model().in_async_context() {
+ if let Expr::Call(ast::ExprCall { func, .. }) = expr {
+ let is_unsafe_os_method = checker
+ .semantic_model()
+ .resolve_call_path(func)
+ .map_or(false, |path| UNSAFE_OS_METHODS.contains(&path.as_slice()));
+
+ if is_unsafe_os_method {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(BlockingOsCallInAsyncFunction, func.range()));
+ }
+ }
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_async/rules/mod.rs b/crates/ruff/src/rules/flake8_async/rules/mod.rs
new file mode 100644
index 0000000000000..0f6e8faaca1e6
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_async/rules/mod.rs
@@ -0,0 +1,9 @@
+pub(crate) use blocking_http_call::{blocking_http_call, BlockingHttpCallInAsyncFunction};
+pub(crate) use blocking_os_call::{blocking_os_call, BlockingOsCallInAsyncFunction};
+pub(crate) use open_sleep_or_subprocess_call::{
+ open_sleep_or_subprocess_call, OpenSleepOrSubprocessInAsyncFunction,
+};
+
+mod blocking_http_call;
+mod blocking_os_call;
+mod open_sleep_or_subprocess_call;
diff --git a/crates/ruff/src/rules/flake8_async/rules/open_sleep_or_subprocess_call.rs b/crates/ruff/src/rules/flake8_async/rules/open_sleep_or_subprocess_call.rs
new file mode 100644
index 0000000000000..1a252118c76d3
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_async/rules/open_sleep_or_subprocess_call.rs
@@ -0,0 +1,79 @@
+use rustpython_parser::ast;
+use rustpython_parser::ast::{Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+
+/// ## What it does
+/// Checks that async functions do not contain calls to `open`, `time.sleep`,
+/// or `subprocess` methods.
+///
+/// ## Why is this bad?
+/// Blocking an async function via a blocking call will block the entire
+/// event loop, preventing it from executing other tasks while waiting for the
+/// call to complete, negating the benefits of asynchronous programming.
+///
+/// Instead of making a blocking call, use an equivalent asynchronous library
+/// or function.
+///
+/// ## Example
+/// ```python
+/// async def foo():
+/// time.sleep(1000)
+/// ```
+///
+/// Use instead:
+/// ```python
+/// async def foo():
+/// await asyncio.sleep(1000)
+/// ```
+#[violation]
+pub struct OpenSleepOrSubprocessInAsyncFunction;
+
+impl Violation for OpenSleepOrSubprocessInAsyncFunction {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("Async functions should not call `open`, `time.sleep`, or `subprocess` methods")
+ }
+}
+
+const OPEN_SLEEP_OR_SUBPROCESS_CALL: &[&[&str]] = &[
+ &["", "open"],
+ &["time", "sleep"],
+ &["subprocess", "run"],
+ &["subprocess", "Popen"],
+ // Deprecated subprocess calls:
+ &["subprocess", "call"],
+ &["subprocess", "check_call"],
+ &["subprocess", "check_output"],
+ &["subprocess", "getoutput"],
+ &["subprocess", "getstatusoutput"],
+ &["os", "wait"],
+ &["os", "wait3"],
+ &["os", "wait4"],
+ &["os", "waitid"],
+ &["os", "waitpid"],
+];
+
+/// ASYNC101
+pub(crate) fn open_sleep_or_subprocess_call(checker: &mut Checker, expr: &Expr) {
+ if checker.semantic_model().in_async_context() {
+ if let Expr::Call(ast::ExprCall { func, .. }) = expr {
+ let is_open_sleep_or_subprocess_call = checker
+ .semantic_model()
+ .resolve_call_path(func)
+ .map_or(false, |path| {
+ OPEN_SLEEP_OR_SUBPROCESS_CALL.contains(&path.as_slice())
+ });
+
+ if is_open_sleep_or_subprocess_call {
+ checker.diagnostics.push(Diagnostic::new(
+ OpenSleepOrSubprocessInAsyncFunction,
+ func.range(),
+ ));
+ }
+ }
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC100_ASYNC100.py.snap b/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC100_ASYNC100.py.snap
new file mode 100644
index 0000000000000..233e7b4c93431
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC100_ASYNC100.py.snap
@@ -0,0 +1,39 @@
+---
+source: crates/ruff/src/rules/flake8_async/mod.rs
+---
+ASYNC100.py:7:5: ASYNC100 Async functions should not call blocking HTTP methods
+ |
+7 | async def foo():
+8 | urllib.request.urlopen("http://example.com/foo/bar").read()
+ | ^^^^^^^^^^^^^^^^^^^^^^ ASYNC100
+ |
+
+ASYNC100.py:11:5: ASYNC100 Async functions should not call blocking HTTP methods
+ |
+11 | async def foo():
+12 | requests.get()
+ | ^^^^^^^^^^^^ ASYNC100
+ |
+
+ASYNC100.py:15:5: ASYNC100 Async functions should not call blocking HTTP methods
+ |
+15 | async def foo():
+16 | httpx.get()
+ | ^^^^^^^^^ ASYNC100
+ |
+
+ASYNC100.py:19:5: ASYNC100 Async functions should not call blocking HTTP methods
+ |
+19 | async def foo():
+20 | requests.post()
+ | ^^^^^^^^^^^^^ ASYNC100
+ |
+
+ASYNC100.py:23:5: ASYNC100 Async functions should not call blocking HTTP methods
+ |
+23 | async def foo():
+24 | httpx.post()
+ | ^^^^^^^^^^ ASYNC100
+ |
+
+
diff --git a/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC101_ASYNC101.py.snap b/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC101_ASYNC101.py.snap
new file mode 100644
index 0000000000000..d7af63516ebd6
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC101_ASYNC101.py.snap
@@ -0,0 +1,46 @@
+---
+source: crates/ruff/src/rules/flake8_async/mod.rs
+---
+ASYNC101.py:7:5: ASYNC101 Async functions should not call `open`, `time.sleep`, or `subprocess` methods
+ |
+7 | async def foo():
+8 | open("foo")
+ | ^^^^ ASYNC101
+ |
+
+ASYNC101.py:11:5: ASYNC101 Async functions should not call `open`, `time.sleep`, or `subprocess` methods
+ |
+11 | async def foo():
+12 | time.sleep(1)
+ | ^^^^^^^^^^ ASYNC101
+ |
+
+ASYNC101.py:15:5: ASYNC101 Async functions should not call `open`, `time.sleep`, or `subprocess` methods
+ |
+15 | async def foo():
+16 | subprocess.run("foo")
+ | ^^^^^^^^^^^^^^ ASYNC101
+ |
+
+ASYNC101.py:19:5: ASYNC101 Async functions should not call `open`, `time.sleep`, or `subprocess` methods
+ |
+19 | async def foo():
+20 | subprocess.call("foo")
+ | ^^^^^^^^^^^^^^^ ASYNC101
+ |
+
+ASYNC101.py:27:5: ASYNC101 Async functions should not call `open`, `time.sleep`, or `subprocess` methods
+ |
+27 | async def foo():
+28 | os.wait4(10)
+ | ^^^^^^^^ ASYNC101
+ |
+
+ASYNC101.py:31:5: ASYNC101 Async functions should not call `open`, `time.sleep`, or `subprocess` methods
+ |
+31 | async def foo():
+32 | os.wait(12)
+ | ^^^^^^^ ASYNC101
+ |
+
+
diff --git a/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC102_ASYNC102.py.snap b/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC102_ASYNC102.py.snap
new file mode 100644
index 0000000000000..f48cd3405b017
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_async/snapshots/ruff__rules__flake8_async__tests__ASYNC102_ASYNC102.py.snap
@@ -0,0 +1,18 @@
+---
+source: crates/ruff/src/rules/flake8_async/mod.rs
+---
+ASYNC102.py:5:5: ASYNC102 Async functions should not call synchronous `os` methods
+ |
+5 | async def foo():
+6 | os.popen()
+ | ^^^^^^^^ ASYNC102
+ |
+
+ASYNC102.py:9:5: ASYNC102 Async functions should not call synchronous `os` methods
+ |
+ 9 | async def foo():
+10 | os.spawnl()
+ | ^^^^^^^^^ ASYNC102
+ |
+
+
diff --git a/crates/ruff/src/rules/flake8_bandit/helpers.rs b/crates/ruff/src/rules/flake8_bandit/helpers.rs
index e578cb8430cd0..1de1cc92e16ac 100644
--- a/crates/ruff/src/rules/flake8_bandit/helpers.rs
+++ b/crates/ruff/src/rules/flake8_bandit/helpers.rs
@@ -1,47 +1,41 @@
use once_cell::sync::Lazy;
use regex::Regex;
-use rustpython_parser::ast::{Constant, Expr, ExprKind};
+use rustpython_parser::ast::{self, Constant, Expr};
-use crate::checkers::ast::Checker;
+use ruff_python_semantic::model::SemanticModel;
static PASSWORD_CANDIDATE_REGEX: Lazy = Lazy::new(|| {
Regex::new(r"(^|_)(?i)(pas+wo?r?d|pass(phrase)?|pwd|token|secrete?)($|_)").unwrap()
});
-pub fn string_literal(expr: &Expr) -> Option<&str> {
- match &expr.node {
- ExprKind::Constant {
+pub(crate) fn string_literal(expr: &Expr) -> Option<&str> {
+ match expr {
+ Expr::Constant(ast::ExprConstant {
value: Constant::Str(string),
..
- } => Some(string),
+ }) => Some(string),
_ => None,
}
}
-pub fn matches_password_name(string: &str) -> bool {
+pub(crate) fn matches_password_name(string: &str) -> bool {
PASSWORD_CANDIDATE_REGEX.is_match(string)
}
-pub fn is_untyped_exception(type_: Option<&Expr>, checker: &Checker) -> bool {
+pub(crate) fn is_untyped_exception(type_: Option<&Expr>, model: &SemanticModel) -> bool {
type_.map_or(true, |type_| {
- if let ExprKind::Tuple { elts, .. } = &type_.node {
+ if let Expr::Tuple(ast::ExprTuple { elts, .. }) = &type_ {
elts.iter().any(|type_| {
- checker
- .ctx
- .resolve_call_path(type_)
- .map_or(false, |call_path| {
- call_path.as_slice() == ["", "Exception"]
- || call_path.as_slice() == ["", "BaseException"]
- })
- })
- } else {
- checker
- .ctx
- .resolve_call_path(type_)
- .map_or(false, |call_path| {
+ model.resolve_call_path(type_).map_or(false, |call_path| {
call_path.as_slice() == ["", "Exception"]
|| call_path.as_slice() == ["", "BaseException"]
})
+ })
+ } else {
+ model.resolve_call_path(type_).map_or(false, |call_path| {
+ call_path.as_slice() == ["", "Exception"]
+ || call_path.as_slice() == ["", "BaseException"]
+ })
}
})
}
diff --git a/crates/ruff/src/rules/flake8_bandit/mod.rs b/crates/ruff/src/rules/flake8_bandit/mod.rs
index 91cd61a150715..4abd69f58af28 100644
--- a/crates/ruff/src/rules/flake8_bandit/mod.rs
+++ b/crates/ruff/src/rules/flake8_bandit/mod.rs
@@ -7,42 +7,43 @@ pub mod settings;
mod tests {
use std::path::Path;
- use crate::assert_messages;
use anyhow::Result;
-
use test_case::test_case;
+ use crate::assert_messages;
use crate::registry::Rule;
use crate::settings::Settings;
use crate::test::test_path;
- #[test_case(Rule::Assert, Path::new("S101.py"); "S101")]
- #[test_case(Rule::BadFilePermissions, Path::new("S103.py"); "S103")]
- #[test_case(Rule::CallWithShellEqualsTrue, Path::new("S604.py"); "S604")]
- #[test_case(Rule::ExecBuiltin, Path::new("S102.py"); "S102")]
- #[test_case(Rule::HardcodedBindAllInterfaces, Path::new("S104.py"); "S104")]
- #[test_case(Rule::HardcodedPasswordDefault, Path::new("S107.py"); "S107")]
- #[test_case(Rule::HardcodedPasswordFuncArg, Path::new("S106.py"); "S106")]
- #[test_case(Rule::HardcodedPasswordString, Path::new("S105.py"); "S105")]
- #[test_case(Rule::HardcodedSQLExpression, Path::new("S608.py"); "S608")]
- #[test_case(Rule::HardcodedTempFile, Path::new("S108.py"); "S108")]
- #[test_case(Rule::HashlibInsecureHashFunction, Path::new("S324.py"); "S324")]
- #[test_case(Rule::Jinja2AutoescapeFalse, Path::new("S701.py"); "S701")]
- #[test_case(Rule::LoggingConfigInsecureListen, Path::new("S612.py"); "S612")]
- #[test_case(Rule::RequestWithNoCertValidation, Path::new("S501.py"); "S501")]
- #[test_case(Rule::RequestWithoutTimeout, Path::new("S113.py"); "S113")]
- #[test_case(Rule::SnmpInsecureVersion, Path::new("S508.py"); "S508")]
- #[test_case(Rule::SnmpWeakCryptography, Path::new("S509.py"); "S509")]
- #[test_case(Rule::StartProcessWithAShell, Path::new("S605.py"); "S605")]
- #[test_case(Rule::StartProcessWithNoShell, Path::new("S606.py"); "S606")]
- #[test_case(Rule::StartProcessWithPartialPath, Path::new("S607.py"); "S607")]
- #[test_case(Rule::SubprocessPopenWithShellEqualsTrue, Path::new("S602.py"); "S602")]
- #[test_case(Rule::SubprocessWithoutShellEqualsTrue, Path::new("S603.py"); "S603")]
- #[test_case(Rule::SuspiciousPickleUsage, Path::new("S301.py"); "S301")]
- #[test_case(Rule::SuspiciousTelnetUsage, Path::new("S312.py"); "S312")]
- #[test_case(Rule::TryExceptContinue, Path::new("S112.py"); "S112")]
- #[test_case(Rule::TryExceptPass, Path::new("S110.py"); "S110")]
- #[test_case(Rule::UnsafeYAMLLoad, Path::new("S506.py"); "S506")]
+ #[test_case(Rule::Assert, Path::new("S101.py"))]
+ #[test_case(Rule::BadFilePermissions, Path::new("S103.py"))]
+ #[test_case(Rule::CallWithShellEqualsTrue, Path::new("S604.py"))]
+ #[test_case(Rule::ExecBuiltin, Path::new("S102.py"))]
+ #[test_case(Rule::HardcodedBindAllInterfaces, Path::new("S104.py"))]
+ #[test_case(Rule::HardcodedPasswordDefault, Path::new("S107.py"))]
+ #[test_case(Rule::HardcodedPasswordFuncArg, Path::new("S106.py"))]
+ #[test_case(Rule::HardcodedPasswordString, Path::new("S105.py"))]
+ #[test_case(Rule::HardcodedSQLExpression, Path::new("S608.py"))]
+ #[test_case(Rule::HardcodedTempFile, Path::new("S108.py"))]
+ #[test_case(Rule::HashlibInsecureHashFunction, Path::new("S324.py"))]
+ #[test_case(Rule::Jinja2AutoescapeFalse, Path::new("S701.py"))]
+ #[test_case(Rule::LoggingConfigInsecureListen, Path::new("S612.py"))]
+ #[test_case(Rule::ParamikoCall, Path::new("S601.py"))]
+ #[test_case(Rule::RequestWithNoCertValidation, Path::new("S501.py"))]
+ #[test_case(Rule::RequestWithoutTimeout, Path::new("S113.py"))]
+ #[test_case(Rule::SnmpInsecureVersion, Path::new("S508.py"))]
+ #[test_case(Rule::SnmpWeakCryptography, Path::new("S509.py"))]
+ #[test_case(Rule::StartProcessWithAShell, Path::new("S605.py"))]
+ #[test_case(Rule::StartProcessWithNoShell, Path::new("S606.py"))]
+ #[test_case(Rule::StartProcessWithPartialPath, Path::new("S607.py"))]
+ #[test_case(Rule::SubprocessPopenWithShellEqualsTrue, Path::new("S602.py"))]
+ #[test_case(Rule::SubprocessWithoutShellEqualsTrue, Path::new("S603.py"))]
+ #[test_case(Rule::SuspiciousPickleUsage, Path::new("S301.py"))]
+ #[test_case(Rule::SuspiciousTelnetUsage, Path::new("S312.py"))]
+ #[test_case(Rule::TryExceptContinue, Path::new("S112.py"))]
+ #[test_case(Rule::TryExceptPass, Path::new("S110.py"))]
+ #[test_case(Rule::UnixCommandWildcardInjection, Path::new("S609.py"))]
+ #[test_case(Rule::UnsafeYAMLLoad, Path::new("S506.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/assert_used.rs b/crates/ruff/src/rules/flake8_bandit/rules/assert_used.rs
index 5678602b245fe..28fba960c0490 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/assert_used.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/assert_used.rs
@@ -1,5 +1,5 @@
use ruff_text_size::{TextLen, TextRange};
-use rustpython_parser::ast::Stmt;
+use rustpython_parser::ast::{Ranged, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -36,6 +36,6 @@ impl Violation for Assert {
}
/// S101
-pub fn assert_used(stmt: &Stmt) -> Diagnostic {
+pub(crate) fn assert_used(stmt: &Stmt) -> Diagnostic {
Diagnostic::new(Assert, TextRange::at(stmt.start(), "assert".text_len()))
}
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/bad_file_permissions.rs b/crates/ruff/src/rules/flake8_bandit/rules/bad_file_permissions.rs
index 65daba361e73a..7bcf81dff6b48 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/bad_file_permissions.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/bad_file_permissions.rs
@@ -1,7 +1,7 @@
use num_traits::ToPrimitive;
use once_cell::sync::Lazy;
use rustc_hash::FxHashMap;
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword, Operator};
+use rustpython_parser::ast::{self, Constant, Expr, Keyword, Operator, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -12,7 +12,7 @@ use crate::checkers::ast::Checker;
#[violation]
pub struct BadFilePermissions {
- pub mask: u16,
+ mask: u16,
}
impl Violation for BadFilePermissions {
@@ -69,15 +69,20 @@ static PYSTAT_MAPPING: Lazy> = Lazy::new(|| {
});
fn get_int_value(expr: &Expr) -> Option {
- match &expr.node {
- ExprKind::Constant {
+ match expr {
+ Expr::Constant(ast::ExprConstant {
value: Constant::Int(value),
..
- } => value.to_u16(),
- ExprKind::Attribute { .. } => {
+ }) => value.to_u16(),
+ Expr::Attribute(_) => {
compose_call_path(expr).and_then(|path| PYSTAT_MAPPING.get(path.as_str()).copied())
}
- ExprKind::BinOp { left, op, right } => {
+ Expr::BinOp(ast::ExprBinOp {
+ left,
+ op,
+ right,
+ range: _,
+ }) => {
if let (Some(left_value), Some(right_value)) =
(get_int_value(left), get_int_value(right))
{
@@ -96,14 +101,14 @@ fn get_int_value(expr: &Expr) -> Option {
}
/// S103
-pub fn bad_file_permissions(
+pub(crate) fn bad_file_permissions(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
if checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| call_path.as_slice() == ["os", "chmod"])
{
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/exec_used.rs b/crates/ruff/src/rules/flake8_bandit/rules/exec_used.rs
index d54dee0850d41..3ff3db8dedc87 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/exec_used.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/exec_used.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, ExprKind};
+use rustpython_parser::ast::{self, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -14,8 +14,8 @@ impl Violation for ExecBuiltin {
}
/// S102
-pub fn exec_used(expr: &Expr, func: &Expr) -> Option {
- let ExprKind::Name { id, .. } = &func.node else {
+pub(crate) fn exec_used(expr: &Expr, func: &Expr) -> Option {
+ let Expr::Name(ast::ExprName { id, .. }) = func else {
return None;
};
if id != "exec" {
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_bind_all_interfaces.rs b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_bind_all_interfaces.rs
index a74288a8416c8..86f68e10b8c42 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_bind_all_interfaces.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_bind_all_interfaces.rs
@@ -1,6 +1,7 @@
+use ruff_text_size::TextRange;
+
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
-use ruff_text_size::TextRange;
#[violation]
pub struct HardcodedBindAllInterfaces;
@@ -13,7 +14,7 @@ impl Violation for HardcodedBindAllInterfaces {
}
/// S104
-pub fn hardcoded_bind_all_interfaces(value: &str, range: TextRange) -> Option {
+pub(crate) fn hardcoded_bind_all_interfaces(value: &str, range: TextRange) -> Option {
if value == "0.0.0.0" {
Some(Diagnostic::new(HardcodedBindAllInterfaces, range))
} else {
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_default.rs b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_default.rs
index 76669b993cf83..0f61c414dfd89 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_default.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_default.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Arg, Arguments, Expr};
+use rustpython_parser::ast::{Arg, Arguments, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -7,33 +7,36 @@ use super::super::helpers::{matches_password_name, string_literal};
#[violation]
pub struct HardcodedPasswordDefault {
- pub string: String,
+ name: String,
}
impl Violation for HardcodedPasswordDefault {
#[derive_message_formats]
fn message(&self) -> String {
- let HardcodedPasswordDefault { string } = self;
- format!("Possible hardcoded password: \"{}\"", string.escape_debug())
+ let HardcodedPasswordDefault { name } = self;
+ format!(
+ "Possible hardcoded password assigned to function default: \"{}\"",
+ name.escape_debug()
+ )
}
}
fn check_password_kwarg(arg: &Arg, default: &Expr) -> Option {
- let string = string_literal(default).filter(|string| !string.is_empty())?;
- let kwarg_name = &arg.node.arg;
+ string_literal(default).filter(|string| !string.is_empty())?;
+ let kwarg_name = &arg.arg;
if !matches_password_name(kwarg_name) {
return None;
}
Some(Diagnostic::new(
HardcodedPasswordDefault {
- string: string.to_string(),
+ name: kwarg_name.to_string(),
},
default.range(),
))
}
/// S107
-pub fn hardcoded_password_default(arguments: &Arguments) -> Vec {
+pub(crate) fn hardcoded_password_default(arguments: &Arguments) -> Vec {
let mut diagnostics: Vec = Vec::new();
let defaults_start =
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_func_arg.rs b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_func_arg.rs
index f64e6abd74cca..b98b5e5efa159 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_func_arg.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_func_arg.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::Keyword;
+use rustpython_parser::ast::{Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -7,30 +7,33 @@ use super::super::helpers::{matches_password_name, string_literal};
#[violation]
pub struct HardcodedPasswordFuncArg {
- pub string: String,
+ name: String,
}
impl Violation for HardcodedPasswordFuncArg {
#[derive_message_formats]
fn message(&self) -> String {
- let HardcodedPasswordFuncArg { string } = self;
- format!("Possible hardcoded password: \"{}\"", string.escape_debug())
+ let HardcodedPasswordFuncArg { name } = self;
+ format!(
+ "Possible hardcoded password assigned to argument: \"{}\"",
+ name.escape_debug()
+ )
}
}
/// S106
-pub fn hardcoded_password_func_arg(keywords: &[Keyword]) -> Vec {
+pub(crate) fn hardcoded_password_func_arg(keywords: &[Keyword]) -> Vec {
keywords
.iter()
.filter_map(|keyword| {
- let string = string_literal(&keyword.node.value).filter(|string| !string.is_empty())?;
- let arg = keyword.node.arg.as_ref()?;
+ string_literal(&keyword.value).filter(|string| !string.is_empty())?;
+ let arg = keyword.arg.as_ref()?;
if !matches_password_name(arg) {
return None;
}
Some(Diagnostic::new(
HardcodedPasswordFuncArg {
- string: string.to_string(),
+ name: arg.to_string(),
},
keyword.range(),
))
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_string.rs b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_string.rs
index 633c7c338ec9e..c69d5cdbe4d5b 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_string.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_password_string.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Constant, Expr, ExprKind};
+use rustpython_parser::ast::{self, Constant, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -7,49 +7,59 @@ use super::super::helpers::{matches_password_name, string_literal};
#[violation]
pub struct HardcodedPasswordString {
- pub string: String,
+ name: String,
}
impl Violation for HardcodedPasswordString {
#[derive_message_formats]
fn message(&self) -> String {
- let HardcodedPasswordString { string } = self;
- format!("Possible hardcoded password: \"{}\"", string.escape_debug())
+ let HardcodedPasswordString { name } = self;
+ format!(
+ "Possible hardcoded password assigned to: \"{}\"",
+ name.escape_debug()
+ )
}
}
-fn is_password_target(target: &Expr) -> bool {
- let target_name = match &target.node {
+fn password_target(target: &Expr) -> Option<&str> {
+ let target_name = match target {
// variable = "s3cr3t"
- ExprKind::Name { id, .. } => id,
+ Expr::Name(ast::ExprName { id, .. }) => id.as_str(),
// d["password"] = "s3cr3t"
- ExprKind::Subscript { slice, .. } => match &slice.node {
- ExprKind::Constant {
+ Expr::Subscript(ast::ExprSubscript { slice, .. }) => match slice.as_ref() {
+ Expr::Constant(ast::ExprConstant {
value: Constant::Str(string),
..
- } => string,
- _ => return false,
+ }) => string,
+ _ => return None,
},
// obj.password = "s3cr3t"
- ExprKind::Attribute { attr, .. } => attr,
- _ => return false,
+ Expr::Attribute(ast::ExprAttribute { attr, .. }) => attr,
+ _ => return None,
};
- matches_password_name(target_name)
+ if matches_password_name(target_name) {
+ Some(target_name)
+ } else {
+ None
+ }
}
/// S105
-pub fn compare_to_hardcoded_password_string(left: &Expr, comparators: &[Expr]) -> Vec {
+pub(crate) fn compare_to_hardcoded_password_string(
+ left: &Expr,
+ comparators: &[Expr],
+) -> Vec {
comparators
.iter()
.filter_map(|comp| {
- let string = string_literal(comp).filter(|string| !string.is_empty())?;
- if !is_password_target(left) {
+ string_literal(comp).filter(|string| !string.is_empty())?;
+ let Some(name) = password_target(left) else {
return None;
- }
+ };
Some(Diagnostic::new(
HardcodedPasswordString {
- string: string.to_string(),
+ name: name.to_string(),
},
comp.range(),
))
@@ -58,13 +68,19 @@ pub fn compare_to_hardcoded_password_string(left: &Expr, comparators: &[Expr]) -
}
/// S105
-pub fn assign_hardcoded_password_string(value: &Expr, targets: &[Expr]) -> Option {
- if let Some(string) = string_literal(value).filter(|string| !string.is_empty()) {
+pub(crate) fn assign_hardcoded_password_string(
+ value: &Expr,
+ targets: &[Expr],
+) -> Option {
+ if string_literal(value)
+ .filter(|string| !string.is_empty())
+ .is_some()
+ {
for target in targets {
- if is_password_target(target) {
+ if let Some(name) = password_target(target) {
return Some(Diagnostic::new(
HardcodedPasswordString {
- string: string.to_string(),
+ name: name.to_string(),
},
value.range(),
));
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs
index 72216ff9d4ae6..4595dce6c73d7 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs
@@ -1,17 +1,17 @@
use once_cell::sync::Lazy;
use regex::Regex;
-use rustpython_parser::ast::{Expr, ExprKind, Operator};
+use rustpython_parser::ast::{self, Expr, Operator, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::helpers::{any_over_expr, unparse_expr};
+use ruff_python_ast::helpers::any_over_expr;
use crate::checkers::ast::Checker;
use super::super::helpers::string_literal;
static SQL_REGEX: Lazy = Lazy::new(|| {
- Regex::new(r"(?i)(select\s.*from\s|delete\s+from\s|insert\s+into\s.*values\s|update\s.*set\s)")
+ Regex::new(r"(?i)\b(select\s.+\sfrom\s|delete\s+from\s|(insert|replace)\s.+\svalues\s|update\s.+\sset\s)")
.unwrap()
});
@@ -53,46 +53,46 @@ fn matches_sql_statement(string: &str) -> bool {
}
fn unparse_string_format_expression(checker: &mut Checker, expr: &Expr) -> Option {
- match &expr.node {
+ match expr {
// "select * from table where val = " + "str" + ...
// "select * from table where val = %s" % ...
- ExprKind::BinOp {
+ Expr::BinOp(ast::ExprBinOp {
op: Operator::Add | Operator::Mod,
..
- } => {
- let Some(parent) = checker.ctx.current_expr_parent() else {
+ }) => {
+ let Some(parent) = checker.semantic_model().expr_parent() else {
if any_over_expr(expr, &has_string_literal) {
- return Some(unparse_expr(expr, checker.stylist));
+ return Some(checker.generator().expr(expr));
}
return None;
};
// Only evaluate the full BinOp, not the nested components.
- let ExprKind::BinOp { .. } = &parent.node else {
+ let Expr::BinOp(_ )= parent else {
if any_over_expr(expr, &has_string_literal) {
- return Some(unparse_expr(expr, checker.stylist));
+ return Some(checker.generator().expr(expr));
}
return None;
};
None
}
- ExprKind::Call { func, .. } => {
- let ExprKind::Attribute{ attr, value, .. } = &func.node else {
+ Expr::Call(ast::ExprCall { func, .. }) => {
+ let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func.as_ref() else {
return None;
};
// "select * from table where val = {}".format(...)
if attr == "format" && string_literal(value).is_some() {
- return Some(unparse_expr(expr, checker.stylist));
+ return Some(checker.generator().expr(expr));
};
None
}
// f"select * from table where val = {val}"
- ExprKind::JoinedStr { .. } => Some(unparse_expr(expr, checker.stylist)),
+ Expr::JoinedStr(_) => Some(checker.generator().expr(expr)),
_ => None,
}
}
/// S608
-pub fn hardcoded_sql_expression(checker: &mut Checker, expr: &Expr) {
+pub(crate) fn hardcoded_sql_expression(checker: &mut Checker, expr: &Expr) {
match unparse_string_format_expression(checker, expr) {
Some(string) if matches_sql_statement(&string) => {
checker
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_tmp_directory.rs b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_tmp_directory.rs
index 840243c0a2484..8c27763f806bd 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_tmp_directory.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/hardcoded_tmp_directory.rs
@@ -1,11 +1,11 @@
-use rustpython_parser::ast::Expr;
+use rustpython_parser::ast::{Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
#[violation]
pub struct HardcodedTempFile {
- pub string: String,
+ string: String,
}
impl Violation for HardcodedTempFile {
@@ -20,7 +20,7 @@ impl Violation for HardcodedTempFile {
}
/// S108
-pub fn hardcoded_tmp_directory(
+pub(crate) fn hardcoded_tmp_directory(
expr: &Expr,
value: &str,
prefixes: &[String],
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/hashlib_insecure_hash_functions.rs b/crates/ruff/src/rules/flake8_bandit/rules/hashlib_insecure_hash_functions.rs
index 9ade65368c3d2..6948f6323b310 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/hashlib_insecure_hash_functions.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/hashlib_insecure_hash_functions.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{self, Constant, Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -10,7 +10,7 @@ use super::super::helpers::string_literal;
#[violation]
pub struct HashlibInsecureHashFunction {
- pub string: String,
+ string: String,
}
impl Violation for HashlibInsecureHashFunction {
@@ -26,11 +26,11 @@ const WEAK_HASHES: [&str; 4] = ["md4", "md5", "sha", "sha1"];
fn is_used_for_security(call_args: &SimpleCallArgs) -> bool {
match call_args.keyword_argument("usedforsecurity") {
Some(expr) => !matches!(
- &expr.node,
- ExprKind::Constant {
+ expr,
+ Expr::Constant(ast::ExprConstant {
value: Constant::Bool(false),
..
- }
+ })
),
_ => true,
}
@@ -42,22 +42,27 @@ enum HashlibCall {
}
/// S324
-pub fn hashlib_insecure_hash_functions(
+pub(crate) fn hashlib_insecure_hash_functions(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
- if let Some(hashlib_call) = checker.ctx.resolve_call_path(func).and_then(|call_path| {
- if call_path.as_slice() == ["hashlib", "new"] {
- Some(HashlibCall::New)
- } else {
- WEAK_HASHES
- .iter()
- .find(|hash| call_path.as_slice() == ["hashlib", hash])
- .map(|hash| HashlibCall::WeakHash(hash))
- }
- }) {
+ if let Some(hashlib_call) =
+ checker
+ .semantic_model()
+ .resolve_call_path(func)
+ .and_then(|call_path| {
+ if call_path.as_slice() == ["hashlib", "new"] {
+ Some(HashlibCall::New)
+ } else {
+ WEAK_HASHES
+ .iter()
+ .find(|hash| call_path.as_slice() == ["hashlib", hash])
+ .map(|hash| HashlibCall::WeakHash(hash))
+ }
+ })
+ {
match hashlib_call {
HashlibCall::New => {
let call_args = SimpleCallArgs::new(args, keywords);
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/jinja2_autoescape_false.rs b/crates/ruff/src/rules/flake8_bandit/rules/jinja2_autoescape_false.rs
index 519bfafd779c6..06a2e770812b8 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/jinja2_autoescape_false.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/jinja2_autoescape_false.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{self, Constant, Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -8,7 +8,7 @@ use crate::checkers::ast::Checker;
#[violation]
pub struct Jinja2AutoescapeFalse {
- pub value: bool,
+ value: bool,
}
impl Violation for Jinja2AutoescapeFalse {
@@ -30,14 +30,14 @@ impl Violation for Jinja2AutoescapeFalse {
}
/// S701
-pub fn jinja2_autoescape_false(
+pub(crate) fn jinja2_autoescape_false(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
if checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["jinja2", "Environment"]
@@ -46,14 +46,14 @@ pub fn jinja2_autoescape_false(
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(autoescape_arg) = call_args.keyword_argument("autoescape") {
- match &autoescape_arg.node {
- ExprKind::Constant {
+ match autoescape_arg {
+ Expr::Constant(ast::ExprConstant {
value: Constant::Bool(true),
..
- } => (),
- ExprKind::Call { func, .. } => {
- if let ExprKind::Name { id, .. } = &func.node {
- if id.as_str() != "select_autoescape" {
+ }) => (),
+ Expr::Call(ast::ExprCall { func, .. }) => {
+ if let Expr::Name(ast::ExprName { id, .. }) = func.as_ref() {
+ if id != "select_autoescape" {
checker.diagnostics.push(Diagnostic::new(
Jinja2AutoescapeFalse { value: true },
autoescape_arg.range(),
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/logging_config_insecure_listen.rs b/crates/ruff/src/rules/flake8_bandit/rules/logging_config_insecure_listen.rs
index d838fe9dd1b9a..6fc645bb88119 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/logging_config_insecure_listen.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/logging_config_insecure_listen.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, Keyword};
+use rustpython_parser::ast::{Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -17,14 +17,14 @@ impl Violation for LoggingConfigInsecureListen {
}
/// S612
-pub fn logging_config_insecure_listen(
+pub(crate) fn logging_config_insecure_listen(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
if checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["logging", "config", "listen"]
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/mod.rs b/crates/ruff/src/rules/flake8_bandit/rules/mod.rs
index aa836f6eba26e..90f1266e42621 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/mod.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/mod.rs
@@ -1,35 +1,38 @@
-pub use assert_used::{assert_used, Assert};
-pub use bad_file_permissions::{bad_file_permissions, BadFilePermissions};
-pub use exec_used::{exec_used, ExecBuiltin};
-pub use hardcoded_bind_all_interfaces::{
+pub(crate) use assert_used::{assert_used, Assert};
+pub(crate) use bad_file_permissions::{bad_file_permissions, BadFilePermissions};
+pub(crate) use exec_used::{exec_used, ExecBuiltin};
+pub(crate) use hardcoded_bind_all_interfaces::{
hardcoded_bind_all_interfaces, HardcodedBindAllInterfaces,
};
-pub use hardcoded_password_default::{hardcoded_password_default, HardcodedPasswordDefault};
-pub use hardcoded_password_func_arg::{hardcoded_password_func_arg, HardcodedPasswordFuncArg};
-pub use hardcoded_password_string::{
+pub(crate) use hardcoded_password_default::{hardcoded_password_default, HardcodedPasswordDefault};
+pub(crate) use hardcoded_password_func_arg::{
+ hardcoded_password_func_arg, HardcodedPasswordFuncArg,
+};
+pub(crate) use hardcoded_password_string::{
assign_hardcoded_password_string, compare_to_hardcoded_password_string, HardcodedPasswordString,
};
-pub use hardcoded_sql_expression::{hardcoded_sql_expression, HardcodedSQLExpression};
-pub use hardcoded_tmp_directory::{hardcoded_tmp_directory, HardcodedTempFile};
-pub use hashlib_insecure_hash_functions::{
+pub(crate) use hardcoded_sql_expression::{hardcoded_sql_expression, HardcodedSQLExpression};
+pub(crate) use hardcoded_tmp_directory::{hardcoded_tmp_directory, HardcodedTempFile};
+pub(crate) use hashlib_insecure_hash_functions::{
hashlib_insecure_hash_functions, HashlibInsecureHashFunction,
};
-pub use jinja2_autoescape_false::{jinja2_autoescape_false, Jinja2AutoescapeFalse};
-pub use logging_config_insecure_listen::{
+pub(crate) use jinja2_autoescape_false::{jinja2_autoescape_false, Jinja2AutoescapeFalse};
+pub(crate) use logging_config_insecure_listen::{
logging_config_insecure_listen, LoggingConfigInsecureListen,
};
-pub use request_with_no_cert_validation::{
+pub(crate) use paramiko_calls::{paramiko_call, ParamikoCall};
+pub(crate) use request_with_no_cert_validation::{
request_with_no_cert_validation, RequestWithNoCertValidation,
};
-pub use request_without_timeout::{request_without_timeout, RequestWithoutTimeout};
-pub use shell_injection::{
+pub(crate) use request_without_timeout::{request_without_timeout, RequestWithoutTimeout};
+pub(crate) use shell_injection::{
shell_injection, CallWithShellEqualsTrue, StartProcessWithAShell, StartProcessWithNoShell,
StartProcessWithPartialPath, SubprocessPopenWithShellEqualsTrue,
- SubprocessWithoutShellEqualsTrue,
+ SubprocessWithoutShellEqualsTrue, UnixCommandWildcardInjection,
};
-pub use snmp_insecure_version::{snmp_insecure_version, SnmpInsecureVersion};
-pub use snmp_weak_cryptography::{snmp_weak_cryptography, SnmpWeakCryptography};
-pub use suspicious_function_call::{
+pub(crate) use snmp_insecure_version::{snmp_insecure_version, SnmpInsecureVersion};
+pub(crate) use snmp_weak_cryptography::{snmp_weak_cryptography, SnmpWeakCryptography};
+pub(crate) use suspicious_function_call::{
suspicious_function_call, SuspiciousEvalUsage, SuspiciousFTPLibUsage,
SuspiciousInsecureCipherModeUsage, SuspiciousInsecureCipherUsage, SuspiciousInsecureHashUsage,
SuspiciousMarkSafeUsage, SuspiciousMarshalUsage, SuspiciousMktempUsage,
@@ -39,9 +42,9 @@ pub use suspicious_function_call::{
SuspiciousXMLExpatReaderUsage, SuspiciousXMLMiniDOMUsage, SuspiciousXMLPullDOMUsage,
SuspiciousXMLSaxUsage,
};
-pub use try_except_continue::{try_except_continue, TryExceptContinue};
-pub use try_except_pass::{try_except_pass, TryExceptPass};
-pub use unsafe_yaml_load::{unsafe_yaml_load, UnsafeYAMLLoad};
+pub(crate) use try_except_continue::{try_except_continue, TryExceptContinue};
+pub(crate) use try_except_pass::{try_except_pass, TryExceptPass};
+pub(crate) use unsafe_yaml_load::{unsafe_yaml_load, UnsafeYAMLLoad};
mod assert_used;
mod bad_file_permissions;
@@ -55,6 +58,7 @@ mod hardcoded_tmp_directory;
mod hashlib_insecure_hash_functions;
mod jinja2_autoescape_false;
mod logging_config_insecure_listen;
+mod paramiko_calls;
mod request_with_no_cert_validation;
mod request_without_timeout;
mod shell_injection;
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/paramiko_calls.rs b/crates/ruff/src/rules/flake8_bandit/rules/paramiko_calls.rs
new file mode 100644
index 0000000000000..e340a09ca4d41
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_bandit/rules/paramiko_calls.rs
@@ -0,0 +1,31 @@
+use rustpython_parser::ast::{Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+
+#[violation]
+pub struct ParamikoCall;
+
+impl Violation for ParamikoCall {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("Possible shell injection via Paramiko call; check inputs are properly sanitized")
+ }
+}
+
+/// S601
+pub(crate) fn paramiko_call(checker: &mut Checker, func: &Expr) {
+ if checker
+ .semantic_model()
+ .resolve_call_path(func)
+ .map_or(false, |call_path| {
+ call_path.as_slice() == ["paramiko", "exec_command"]
+ })
+ {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(ParamikoCall, func.range()));
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/request_with_no_cert_validation.rs b/crates/ruff/src/rules/flake8_bandit/rules/request_with_no_cert_validation.rs
index 3974eee8fdaef..fe45fb6e769a7 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/request_with_no_cert_validation.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/request_with_no_cert_validation.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{self, Constant, Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -8,7 +8,7 @@ use crate::checkers::ast::Checker;
#[violation]
pub struct RequestWithNoCertValidation {
- pub string: String,
+ string: String,
}
impl Violation for RequestWithNoCertValidation {
@@ -37,29 +37,33 @@ const HTTPX_METHODS: [&str; 11] = [
];
/// S501
-pub fn request_with_no_cert_validation(
+pub(crate) fn request_with_no_cert_validation(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
- if let Some(target) = checker.ctx.resolve_call_path(func).and_then(|call_path| {
- if call_path.len() == 2 {
- if call_path[0] == "requests" && REQUESTS_HTTP_VERBS.contains(&call_path[1]) {
- return Some("requests");
+ if let Some(target) = checker
+ .semantic_model()
+ .resolve_call_path(func)
+ .and_then(|call_path| {
+ if call_path.len() == 2 {
+ if call_path[0] == "requests" && REQUESTS_HTTP_VERBS.contains(&call_path[1]) {
+ return Some("requests");
+ }
+ if call_path[0] == "httpx" && HTTPX_METHODS.contains(&call_path[1]) {
+ return Some("httpx");
+ }
}
- if call_path[0] == "httpx" && HTTPX_METHODS.contains(&call_path[1]) {
- return Some("httpx");
- }
- }
- None
- }) {
+ None
+ })
+ {
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(verify_arg) = call_args.keyword_argument("verify") {
- if let ExprKind::Constant {
+ if let Expr::Constant(ast::ExprConstant {
value: Constant::Bool(false),
..
- } = &verify_arg.node
+ }) = &verify_arg
{
checker.diagnostics.push(Diagnostic::new(
RequestWithNoCertValidation {
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/request_without_timeout.rs b/crates/ruff/src/rules/flake8_bandit/rules/request_without_timeout.rs
index 0ac9bf93b4fa7..b08edbd4faf22 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/request_without_timeout.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/request_without_timeout.rs
@@ -1,8 +1,8 @@
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{self, Constant, Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::helpers::{unparse_constant, SimpleCallArgs};
+use ruff_python_ast::helpers::SimpleCallArgs;
use crate::checkers::ast::Checker;
@@ -27,14 +27,14 @@ impl Violation for RequestWithoutTimeout {
const HTTP_VERBS: [&str; 7] = ["get", "options", "head", "post", "put", "patch", "delete"];
/// S113
-pub fn request_without_timeout(
+pub(crate) fn request_without_timeout(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
if checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
HTTP_VERBS
@@ -44,11 +44,11 @@ pub fn request_without_timeout(
{
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(timeout_arg) = call_args.keyword_argument("timeout") {
- if let Some(timeout) = match &timeout_arg.node {
- ExprKind::Constant {
+ if let Some(timeout) = match timeout_arg {
+ Expr::Constant(ast::ExprConstant {
value: value @ Constant::None,
..
- } => Some(unparse_constant(value, checker.stylist)),
+ }) => Some(checker.generator().constant(value)),
_ => None,
} {
checker.diagnostics.push(Diagnostic::new(
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/shell_injection.rs b/crates/ruff/src/rules/flake8_bandit/rules/shell_injection.rs
index dec37b05ff022..6eb6c9f5e2149 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/shell_injection.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/shell_injection.rs
@@ -1,20 +1,16 @@
//! Checks relating to shell injection.
-use once_cell::sync::Lazy;
-use regex::Regex;
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{self, Constant, Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::Truthiness;
-use ruff_python_semantic::context::Context;
+use ruff_python_semantic::model::SemanticModel;
use crate::{
checkers::ast::Checker, registry::Rule, rules::flake8_bandit::helpers::string_literal,
};
-static FULL_PATH_REGEX: Lazy = Lazy::new(|| Regex::new(r"^([A-Za-z]:|[\\/.])").unwrap());
-
#[violation]
pub struct SubprocessPopenWithShellEqualsTrue {
seems_safe: bool,
@@ -89,114 +85,35 @@ impl Violation for StartProcessWithPartialPath {
}
}
-#[derive(Copy, Clone, Debug)]
-enum CallKind {
- Subprocess,
- Shell,
- NoShell,
-}
-
-/// Return the [`CallKind`] of the given function call.
-fn get_call_kind(func: &Expr, context: &Context) -> Option {
- context
- .resolve_call_path(func)
- .and_then(|call_path| match call_path.as_slice() {
- &[module, submodule] => match module {
- "os" => match submodule {
- "execl" | "execle" | "execlp" | "execlpe" | "execv" | "execve" | "execvp"
- | "execvpe" | "spawnl" | "spawnle" | "spawnlp" | "spawnlpe" | "spawnv"
- | "spawnve" | "spawnvp" | "spawnvpe" | "startfile" => Some(CallKind::NoShell),
- "system" | "popen" | "popen2" | "popen3" | "popen4" => Some(CallKind::Shell),
- _ => None,
- },
- "subprocess" => match submodule {
- "Popen" | "call" | "check_call" | "check_output" | "run" => {
- Some(CallKind::Subprocess)
- }
- _ => None,
- },
- "popen2" => match submodule {
- "popen2" | "popen3" | "popen4" | "Popen3" | "Popen4" => Some(CallKind::Shell),
- _ => None,
- },
- "commands" => match submodule {
- "getoutput" | "getstatusoutput" => Some(CallKind::Shell),
- _ => None,
- },
- _ => None,
- },
- _ => None,
- })
-}
-
-#[derive(Copy, Clone, Debug)]
-struct ShellKeyword<'a> {
- /// Whether the `shell` keyword argument is set and evaluates to `True`.
- truthiness: Truthiness,
- /// The `shell` keyword argument.
- keyword: &'a Keyword,
-}
-
-/// Return the `shell` keyword argument to the given function call, if any.
-fn find_shell_keyword<'a>(ctx: &Context, keywords: &'a [Keyword]) -> Option> {
- keywords
- .iter()
- .find(|keyword| {
- keyword
- .node
- .arg
- .as_ref()
- .map_or(false, |arg| arg == "shell")
- })
- .map(|keyword| ShellKeyword {
- truthiness: Truthiness::from_expr(&keyword.node.value, |id| ctx.is_builtin(id)),
- keyword,
- })
-}
-
-/// Return `true` if the value provided to the `shell` call seems safe. This is based on Bandit's
-/// definition: string literals are considered okay, but dynamically-computed values are not.
-fn shell_call_seems_safe(arg: &Expr) -> bool {
- matches!(
- arg.node,
- ExprKind::Constant {
- value: Constant::Str(_),
- ..
- }
- )
-}
+#[violation]
+pub struct UnixCommandWildcardInjection;
-/// Return the [`Expr`] as a string literal, if it's a string or a list of strings.
-fn try_string_literal(expr: &Expr) -> Option<&str> {
- match &expr.node {
- ExprKind::List { elts, .. } => {
- if elts.is_empty() {
- None
- } else {
- string_literal(&elts[0])
- }
- }
- _ => string_literal(expr),
+impl Violation for UnixCommandWildcardInjection {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("Possible wildcard injection in call due to `*` usage")
}
}
-/// S602, S603, S604, S605, S606, S607
-pub fn shell_injection(checker: &mut Checker, func: &Expr, args: &[Expr], keywords: &[Keyword]) {
- let call_kind = get_call_kind(func, &checker.ctx);
+/// S602, S603, S604, S605, S606, S607, S609
+pub(crate) fn shell_injection(
+ checker: &mut Checker,
+ func: &Expr,
+ args: &[Expr],
+ keywords: &[Keyword],
+) {
+ let call_kind = get_call_kind(func, checker.semantic_model());
+ let shell_keyword = find_shell_keyword(checker.semantic_model(), keywords);
if matches!(call_kind, Some(CallKind::Subprocess)) {
if let Some(arg) = args.first() {
- match find_shell_keyword(&checker.ctx, keywords) {
+ match shell_keyword {
// S602
Some(ShellKeyword {
truthiness: Truthiness::Truthy,
keyword,
}) => {
- if checker
- .settings
- .rules
- .enabled(Rule::SubprocessPopenWithShellEqualsTrue)
- {
+ if checker.enabled(Rule::SubprocessPopenWithShellEqualsTrue) {
checker.diagnostics.push(Diagnostic::new(
SubprocessPopenWithShellEqualsTrue {
seems_safe: shell_call_seems_safe(arg),
@@ -210,11 +127,7 @@ pub fn shell_injection(checker: &mut Checker, func: &Expr, args: &[Expr], keywor
truthiness: Truthiness::Falsey | Truthiness::Unknown,
keyword,
}) => {
- if checker
- .settings
- .rules
- .enabled(Rule::SubprocessWithoutShellEqualsTrue)
- {
+ if checker.enabled(Rule::SubprocessWithoutShellEqualsTrue) {
checker.diagnostics.push(Diagnostic::new(
SubprocessWithoutShellEqualsTrue,
keyword.range(),
@@ -223,11 +136,7 @@ pub fn shell_injection(checker: &mut Checker, func: &Expr, args: &[Expr], keywor
}
// S603
None => {
- if checker
- .settings
- .rules
- .enabled(Rule::SubprocessWithoutShellEqualsTrue)
- {
+ if checker.enabled(Rule::SubprocessWithoutShellEqualsTrue) {
checker.diagnostics.push(Diagnostic::new(
SubprocessWithoutShellEqualsTrue,
arg.range(),
@@ -239,14 +148,10 @@ pub fn shell_injection(checker: &mut Checker, func: &Expr, args: &[Expr], keywor
} else if let Some(ShellKeyword {
truthiness: Truthiness::Truthy,
keyword,
- }) = find_shell_keyword(&checker.ctx, keywords)
+ }) = shell_keyword
{
// S604
- if checker
- .settings
- .rules
- .enabled(Rule::CallWithShellEqualsTrue)
- {
+ if checker.enabled(Rule::CallWithShellEqualsTrue) {
checker
.diagnostics
.push(Diagnostic::new(CallWithShellEqualsTrue, keyword.range()));
@@ -254,9 +159,9 @@ pub fn shell_injection(checker: &mut Checker, func: &Expr, args: &[Expr], keywor
}
// S605
- if matches!(call_kind, Some(CallKind::Shell)) {
- if let Some(arg) = args.first() {
- if checker.settings.rules.enabled(Rule::StartProcessWithAShell) {
+ if checker.enabled(Rule::StartProcessWithAShell) {
+ if matches!(call_kind, Some(CallKind::Shell)) {
+ if let Some(arg) = args.first() {
checker.diagnostics.push(Diagnostic::new(
StartProcessWithAShell {
seems_safe: shell_call_seems_safe(arg),
@@ -268,12 +173,8 @@ pub fn shell_injection(checker: &mut Checker, func: &Expr, args: &[Expr], keywor
}
// S606
- if matches!(call_kind, Some(CallKind::NoShell)) {
- if checker
- .settings
- .rules
- .enabled(Rule::StartProcessWithNoShell)
- {
+ if checker.enabled(Rule::StartProcessWithNoShell) {
+ if matches!(call_kind, Some(CallKind::NoShell)) {
checker
.diagnostics
.push(Diagnostic::new(StartProcessWithNoShell, func.range()));
@@ -281,21 +182,195 @@ pub fn shell_injection(checker: &mut Checker, func: &Expr, args: &[Expr], keywor
}
// S607
- if call_kind.is_some() {
- if let Some(arg) = args.first() {
- if checker
- .settings
- .rules
- .enabled(Rule::StartProcessWithPartialPath)
- {
- if let Some(value) = try_string_literal(arg) {
- if FULL_PATH_REGEX.find(value).is_none() {
- checker
- .diagnostics
- .push(Diagnostic::new(StartProcessWithPartialPath, arg.range()));
- }
+ if checker.enabled(Rule::StartProcessWithPartialPath) {
+ if call_kind.is_some() {
+ if let Some(arg) = args.first() {
+ if is_partial_path(arg) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(StartProcessWithPartialPath, arg.range()));
+ }
+ }
+ }
+ }
+
+ // S609
+ if checker.enabled(Rule::UnixCommandWildcardInjection) {
+ if matches!(call_kind, Some(CallKind::Shell))
+ || matches!(
+ (call_kind, shell_keyword),
+ (
+ Some(CallKind::Subprocess),
+ Some(ShellKeyword {
+ truthiness: Truthiness::Truthy,
+ keyword: _,
+ })
+ )
+ )
+ {
+ if let Some(arg) = args.first() {
+ if is_wildcard_command(arg) {
+ checker
+ .diagnostics
+ .push(Diagnostic::new(UnixCommandWildcardInjection, func.range()));
}
}
}
}
}
+
+#[derive(Copy, Clone, Debug)]
+enum CallKind {
+ Subprocess,
+ Shell,
+ NoShell,
+}
+
+/// Return the [`CallKind`] of the given function call.
+fn get_call_kind(func: &Expr, model: &SemanticModel) -> Option {
+ model
+ .resolve_call_path(func)
+ .and_then(|call_path| match call_path.as_slice() {
+ &[module, submodule] => match module {
+ "os" => match submodule {
+ "execl" | "execle" | "execlp" | "execlpe" | "execv" | "execve" | "execvp"
+ | "execvpe" | "spawnl" | "spawnle" | "spawnlp" | "spawnlpe" | "spawnv"
+ | "spawnve" | "spawnvp" | "spawnvpe" | "startfile" => Some(CallKind::NoShell),
+ "system" | "popen" | "popen2" | "popen3" | "popen4" => Some(CallKind::Shell),
+ _ => None,
+ },
+ "subprocess" => match submodule {
+ "Popen" | "call" | "check_call" | "check_output" | "run" => {
+ Some(CallKind::Subprocess)
+ }
+ _ => None,
+ },
+ "popen2" => match submodule {
+ "popen2" | "popen3" | "popen4" | "Popen3" | "Popen4" => Some(CallKind::Shell),
+ _ => None,
+ },
+ "commands" => match submodule {
+ "getoutput" | "getstatusoutput" => Some(CallKind::Shell),
+ _ => None,
+ },
+ _ => None,
+ },
+ _ => None,
+ })
+}
+
+#[derive(Copy, Clone, Debug)]
+struct ShellKeyword<'a> {
+ /// Whether the `shell` keyword argument is set and evaluates to `True`.
+ truthiness: Truthiness,
+ /// The `shell` keyword argument.
+ keyword: &'a Keyword,
+}
+
+/// Return the `shell` keyword argument to the given function call, if any.
+fn find_shell_keyword<'a>(
+ model: &SemanticModel,
+ keywords: &'a [Keyword],
+) -> Option> {
+ keywords
+ .iter()
+ .find(|keyword| keyword.arg.as_ref().map_or(false, |arg| arg == "shell"))
+ .map(|keyword| ShellKeyword {
+ truthiness: Truthiness::from_expr(&keyword.value, |id| model.is_builtin(id)),
+ keyword,
+ })
+}
+
+/// Return `true` if the value provided to the `shell` call seems safe. This is based on Bandit's
+/// definition: string literals are considered okay, but dynamically-computed values are not.
+fn shell_call_seems_safe(arg: &Expr) -> bool {
+ matches!(
+ arg,
+ Expr::Constant(ast::ExprConstant {
+ value: Constant::Str(_),
+ ..
+ })
+ )
+}
+
+/// Return `true` if the string appears to be a full file path.
+///
+/// ## Examples
+/// ```python
+/// import subprocess
+///
+/// os.system("/bin/ls")
+/// os.system("./bin/ls")
+/// os.system(["/bin/ls"])
+/// os.system(["/bin/ls", "/tmp"])
+/// os.system(r"C:\\bin\ls")
+fn is_full_path(text: &str) -> bool {
+ let mut chars = text.chars();
+ let Some(first_char) = chars.next() else {
+ return false;
+ };
+
+ // Ex) `/bin/ls`
+ if first_char == '\\' || first_char == '/' || first_char == '.' {
+ return true;
+ }
+
+ // Ex) `C:`
+ if first_char.is_alphabetic() {
+ if let Some(second_char) = chars.next() {
+ if second_char == ':' {
+ return true;
+ }
+ }
+ }
+
+ false
+}
+
+/// Return `true` if the [`Expr`] is a string literal or list of string literals that starts with a
+/// partial path.
+fn is_partial_path(expr: &Expr) -> bool {
+ let string_literal = match expr {
+ Expr::List(ast::ExprList { elts, .. }) => elts.first().and_then(string_literal),
+ _ => string_literal(expr),
+ };
+ string_literal.map_or(false, |text| !is_full_path(text))
+}
+
+/// Return `true` if the [`Expr`] is a wildcard command.
+///
+/// ## Examples
+/// ```python
+/// import subprocess
+///
+/// subprocess.Popen("/bin/chown root: *", shell=True)
+/// subprocess.Popen(["/usr/local/bin/rsync", "*", "some_where:"], shell=True)
+/// ```
+fn is_wildcard_command(expr: &Expr) -> bool {
+ if let Expr::List(ast::ExprList { elts, .. }) = expr {
+ let mut has_star = false;
+ let mut has_command = false;
+ for elt in elts.iter() {
+ if let Some(text) = string_literal(elt) {
+ has_star |= text.contains('*');
+ has_command |= text.contains("chown")
+ || text.contains("chmod")
+ || text.contains("tar")
+ || text.contains("rsync");
+ }
+ if has_star && has_command {
+ break;
+ }
+ }
+ has_star && has_command
+ } else {
+ let string_literal = string_literal(expr);
+ string_literal.map_or(false, |text| {
+ text.contains('*')
+ && (text.contains("chown")
+ || text.contains("chmod")
+ || text.contains("tar")
+ || text.contains("rsync"))
+ })
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/snmp_insecure_version.rs b/crates/ruff/src/rules/flake8_bandit/rules/snmp_insecure_version.rs
index 1a32c9620b587..60fc3b33dda77 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/snmp_insecure_version.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/snmp_insecure_version.rs
@@ -1,5 +1,5 @@
use num_traits::{One, Zero};
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{self, Constant, Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -18,14 +18,14 @@ impl Violation for SnmpInsecureVersion {
}
/// S508
-pub fn snmp_insecure_version(
+pub(crate) fn snmp_insecure_version(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
if checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["pysnmp", "hlapi", "CommunityData"]
@@ -33,10 +33,10 @@ pub fn snmp_insecure_version(
{
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(mp_model_arg) = call_args.keyword_argument("mpModel") {
- if let ExprKind::Constant {
+ if let Expr::Constant(ast::ExprConstant {
value: Constant::Int(value),
..
- } = &mp_model_arg.node
+ }) = &mp_model_arg
{
if value.is_zero() || value.is_one() {
checker
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/snmp_weak_cryptography.rs b/crates/ruff/src/rules/flake8_bandit/rules/snmp_weak_cryptography.rs
index 72a586a8fbfa2..313c32a187e9d 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/snmp_weak_cryptography.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/snmp_weak_cryptography.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, Keyword};
+use rustpython_parser::ast::{Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -20,14 +20,14 @@ impl Violation for SnmpWeakCryptography {
}
/// S509
-pub fn snmp_weak_cryptography(
+pub(crate) fn snmp_weak_cryptography(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
if checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["pysnmp", "hlapi", "UsmUserData"]
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/suspicious_function_call.rs b/crates/ruff/src/rules/flake8_bandit/rules/suspicious_function_call.rs
index 2921c9569be0d..6c6eba80feb99 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/suspicious_function_call.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/suspicious_function_call.rs
@@ -1,7 +1,7 @@
//! Check for calls to suspicious functions, or calls into suspicious modules.
//!
//! See:
-use rustpython_parser::ast::{Expr, ExprKind};
+use rustpython_parser::ast::{self, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -220,7 +220,7 @@ impl Violation for SuspiciousFTPLibUsage {
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-pub enum Reason {
+pub(crate) enum Reason {
Pickle,
Marshal,
InsecureHash,
@@ -250,7 +250,7 @@ struct SuspiciousMembers<'a> {
}
impl<'a> SuspiciousMembers<'a> {
- pub const fn new(members: &'a [&'a [&'a str]], reason: Reason) -> Self {
+ pub(crate) const fn new(members: &'a [&'a [&'a str]], reason: Reason) -> Self {
Self { members, reason }
}
}
@@ -261,7 +261,7 @@ struct SuspiciousModule<'a> {
}
impl<'a> SuspiciousModule<'a> {
- pub const fn new(name: &'a str, reason: Reason) -> Self {
+ pub(crate) const fn new(name: &'a str, reason: Reason) -> Self {
Self { name, reason }
}
}
@@ -465,12 +465,12 @@ const SUSPICIOUS_MODULES: &[SuspiciousModule] = &[
];
/// S001
-pub fn suspicious_function_call(checker: &mut Checker, expr: &Expr) {
- let ExprKind::Call { func, .. } = &expr.node else {
+pub(crate) fn suspicious_function_call(checker: &mut Checker, expr: &Expr) {
+ let Expr::Call(ast::ExprCall { func, .. }) = expr else {
return;
};
- let Some(reason) = checker.ctx.resolve_call_path(func).and_then(|call_path| {
+ let Some(reason) = checker.semantic_model().resolve_call_path(func).and_then(|call_path| {
for module in SUSPICIOUS_MEMBERS {
for member in module.members {
if call_path.as_slice() == *member {
@@ -512,7 +512,7 @@ pub fn suspicious_function_call(checker: &mut Checker, expr: &Expr) {
Reason::FTPLib => SuspiciousFTPLibUsage.into(),
};
let diagnostic = Diagnostic::new::(diagnostic_kind, expr.range());
- if checker.settings.rules.enabled(diagnostic.kind.rule()) {
+ if checker.enabled(diagnostic.kind.rule()) {
checker.diagnostics.push(diagnostic);
}
}
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/try_except_continue.rs b/crates/ruff/src/rules/flake8_bandit/rules/try_except_continue.rs
index 9a48016b75139..a64c33bc39bec 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/try_except_continue.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/try_except_continue.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Excepthandler, Expr, Stmt, StmtKind};
+use rustpython_parser::ast::{Excepthandler, Expr, Ranged, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -17,7 +17,7 @@ impl Violation for TryExceptContinue {
}
/// S112
-pub fn try_except_continue(
+pub(crate) fn try_except_continue(
checker: &mut Checker,
excepthandler: &Excepthandler,
type_: Option<&Expr>,
@@ -26,8 +26,8 @@ pub fn try_except_continue(
check_typed_exception: bool,
) {
if body.len() == 1
- && body[0].node == StmtKind::Continue
- && (check_typed_exception || is_untyped_exception(type_, checker))
+ && body[0].is_continue_stmt()
+ && (check_typed_exception || is_untyped_exception(type_, checker.semantic_model()))
{
checker
.diagnostics
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/try_except_pass.rs b/crates/ruff/src/rules/flake8_bandit/rules/try_except_pass.rs
index 3700fe79d7407..c740399349316 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/try_except_pass.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/try_except_pass.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Excepthandler, Expr, Stmt, StmtKind};
+use rustpython_parser::ast::{Excepthandler, Expr, Ranged, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -17,7 +17,7 @@ impl Violation for TryExceptPass {
}
/// S110
-pub fn try_except_pass(
+pub(crate) fn try_except_pass(
checker: &mut Checker,
excepthandler: &Excepthandler,
type_: Option<&Expr>,
@@ -26,8 +26,8 @@ pub fn try_except_pass(
check_typed_exception: bool,
) {
if body.len() == 1
- && body[0].node == StmtKind::Pass
- && (check_typed_exception || is_untyped_exception(type_, checker))
+ && body[0].is_pass_stmt()
+ && (check_typed_exception || is_untyped_exception(type_, checker.semantic_model()))
{
checker
.diagnostics
diff --git a/crates/ruff/src/rules/flake8_bandit/rules/unsafe_yaml_load.rs b/crates/ruff/src/rules/flake8_bandit/rules/unsafe_yaml_load.rs
index cd05f639b6946..e5225eb0d49c1 100644
--- a/crates/ruff/src/rules/flake8_bandit/rules/unsafe_yaml_load.rs
+++ b/crates/ruff/src/rules/flake8_bandit/rules/unsafe_yaml_load.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{self, Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -31,25 +31,30 @@ impl Violation for UnsafeYAMLLoad {
}
/// S506
-pub fn unsafe_yaml_load(checker: &mut Checker, func: &Expr, args: &[Expr], keywords: &[Keyword]) {
+pub(crate) fn unsafe_yaml_load(
+ checker: &mut Checker,
+ func: &Expr,
+ args: &[Expr],
+ keywords: &[Keyword],
+) {
if checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| call_path.as_slice() == ["yaml", "load"])
{
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(loader_arg) = call_args.argument("Loader", 1) {
if !checker
- .ctx
+ .semantic_model()
.resolve_call_path(loader_arg)
.map_or(false, |call_path| {
call_path.as_slice() == ["yaml", "SafeLoader"]
|| call_path.as_slice() == ["yaml", "CSafeLoader"]
})
{
- let loader = match &loader_arg.node {
- ExprKind::Attribute { attr, .. } => Some(attr.to_string()),
- ExprKind::Name { id, .. } => Some(id.to_string()),
+ let loader = match loader_arg {
+ Expr::Attribute(ast::ExprAttribute { attr, .. }) => Some(attr.to_string()),
+ Expr::Name(ast::ExprName { id, .. }) => Some(id.to_string()),
_ => None,
};
checker.diagnostics.push(Diagnostic::new(
diff --git a/crates/ruff/src/rules/flake8_bandit/settings.rs b/crates/ruff/src/rules/flake8_bandit/settings.rs
index 97ed127336831..d43bd48700ec8 100644
--- a/crates/ruff/src/rules/flake8_bandit/settings.rs
+++ b/crates/ruff/src/rules/flake8_bandit/settings.rs
@@ -1,24 +1,24 @@
//! Settings for the `flake8-bandit` plugin.
-use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use ruff_macros::{CacheKey, ConfigurationOptions};
+use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
fn default_tmp_dirs() -> Vec {
["/tmp", "/var/tmp", "/dev/shm"]
- .map(std::string::ToString::to_string)
+ .map(ToString::to_string)
.to_vec()
}
#[derive(
- Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, JsonSchema,
+ Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8BanditOptions"
)]
+#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "[\"/tmp\", \"/var/tmp\", \"/dev/shm\"]",
diff --git a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S105_S105.py.snap b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S105_S105.py.snap
index f4a844b1d16a5..58e7ab259c8ae 100644
--- a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S105_S105.py.snap
+++ b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S105_S105.py.snap
@@ -1,7 +1,7 @@
---
source: crates/ruff/src/rules/flake8_bandit/mod.rs
---
-S105.py:13:12: S105 Possible hardcoded password: "s3cr3t"
+S105.py:13:12: S105 Possible hardcoded password assigned to: "password"
|
13 | # Errors
14 | password = "s3cr3t"
@@ -10,7 +10,7 @@ S105.py:13:12: S105 Possible hardcoded password: "s3cr3t"
16 | passwd = "s3cr3t"
|
-S105.py:14:9: S105 Possible hardcoded password: "s3cr3t"
+S105.py:14:9: S105 Possible hardcoded password assigned to: "_pass"
|
14 | # Errors
15 | password = "s3cr3t"
@@ -20,7 +20,7 @@ S105.py:14:9: S105 Possible hardcoded password: "s3cr3t"
18 | pwd = "s3cr3t"
|
-S105.py:15:10: S105 Possible hardcoded password: "s3cr3t"
+S105.py:15:10: S105 Possible hardcoded password assigned to: "passwd"
|
15 | password = "s3cr3t"
16 | _pass = "s3cr3t"
@@ -30,7 +30,7 @@ S105.py:15:10: S105 Possible hardcoded password: "s3cr3t"
19 | secret = "s3cr3t"
|
-S105.py:16:7: S105 Possible hardcoded password: "s3cr3t"
+S105.py:16:7: S105 Possible hardcoded password assigned to: "pwd"
|
16 | _pass = "s3cr3t"
17 | passwd = "s3cr3t"
@@ -40,7 +40,7 @@ S105.py:16:7: S105 Possible hardcoded password: "s3cr3t"
20 | token = "s3cr3t"
|
-S105.py:17:10: S105 Possible hardcoded password: "s3cr3t"
+S105.py:17:10: S105 Possible hardcoded password assigned to: "secret"
|
17 | passwd = "s3cr3t"
18 | pwd = "s3cr3t"
@@ -50,7 +50,7 @@ S105.py:17:10: S105 Possible hardcoded password: "s3cr3t"
21 | secrete = "s3cr3t"
|
-S105.py:18:9: S105 Possible hardcoded password: "s3cr3t"
+S105.py:18:9: S105 Possible hardcoded password assigned to: "token"
|
18 | pwd = "s3cr3t"
19 | secret = "s3cr3t"
@@ -60,7 +60,7 @@ S105.py:18:9: S105 Possible hardcoded password: "s3cr3t"
22 | safe = password = "s3cr3t"
|
-S105.py:19:11: S105 Possible hardcoded password: "s3cr3t"
+S105.py:19:11: S105 Possible hardcoded password assigned to: "secrete"
|
19 | secret = "s3cr3t"
20 | token = "s3cr3t"
@@ -70,7 +70,7 @@ S105.py:19:11: S105 Possible hardcoded password: "s3cr3t"
23 | password = safe = "s3cr3t"
|
-S105.py:20:19: S105 Possible hardcoded password: "s3cr3t"
+S105.py:20:19: S105 Possible hardcoded password assigned to: "password"
|
20 | token = "s3cr3t"
21 | secrete = "s3cr3t"
@@ -80,7 +80,7 @@ S105.py:20:19: S105 Possible hardcoded password: "s3cr3t"
24 | PASSWORD = "s3cr3t"
|
-S105.py:21:19: S105 Possible hardcoded password: "s3cr3t"
+S105.py:21:19: S105 Possible hardcoded password assigned to: "password"
|
21 | secrete = "s3cr3t"
22 | safe = password = "s3cr3t"
@@ -90,7 +90,7 @@ S105.py:21:19: S105 Possible hardcoded password: "s3cr3t"
25 | PassWord = "s3cr3t"
|
-S105.py:22:12: S105 Possible hardcoded password: "s3cr3t"
+S105.py:22:12: S105 Possible hardcoded password assigned to: "PASSWORD"
|
22 | safe = password = "s3cr3t"
23 | password = safe = "s3cr3t"
@@ -99,7 +99,7 @@ S105.py:22:12: S105 Possible hardcoded password: "s3cr3t"
25 | PassWord = "s3cr3t"
|
-S105.py:23:12: S105 Possible hardcoded password: "s3cr3t"
+S105.py:23:12: S105 Possible hardcoded password assigned to: "PassWord"
|
23 | password = safe = "s3cr3t"
24 | PASSWORD = "s3cr3t"
@@ -109,7 +109,7 @@ S105.py:23:12: S105 Possible hardcoded password: "s3cr3t"
27 | d["password"] = "s3cr3t"
|
-S105.py:25:17: S105 Possible hardcoded password: "s3cr3t"
+S105.py:25:17: S105 Possible hardcoded password assigned to: "password"
|
25 | PassWord = "s3cr3t"
26 |
@@ -119,7 +119,7 @@ S105.py:25:17: S105 Possible hardcoded password: "s3cr3t"
29 | d["passwd"] = "s3cr3t"
|
-S105.py:26:13: S105 Possible hardcoded password: "s3cr3t"
+S105.py:26:13: S105 Possible hardcoded password assigned to: "pass"
|
26 | d["password"] = "s3cr3t"
27 | d["pass"] = "s3cr3t"
@@ -128,7 +128,7 @@ S105.py:26:13: S105 Possible hardcoded password: "s3cr3t"
29 | d["pwd"] = "s3cr3t"
|
-S105.py:27:15: S105 Possible hardcoded password: "s3cr3t"
+S105.py:27:15: S105 Possible hardcoded password assigned to: "passwd"
|
27 | d["password"] = "s3cr3t"
28 | d["pass"] = "s3cr3t"
@@ -138,7 +138,7 @@ S105.py:27:15: S105 Possible hardcoded password: "s3cr3t"
31 | d["secret"] = "s3cr3t"
|
-S105.py:28:12: S105 Possible hardcoded password: "s3cr3t"
+S105.py:28:12: S105 Possible hardcoded password assigned to: "pwd"
|
28 | d["pass"] = "s3cr3t"
29 | d["passwd"] = "s3cr3t"
@@ -148,7 +148,7 @@ S105.py:28:12: S105 Possible hardcoded password: "s3cr3t"
32 | d["token"] = "s3cr3t"
|
-S105.py:29:15: S105 Possible hardcoded password: "s3cr3t"
+S105.py:29:15: S105 Possible hardcoded password assigned to: "secret"
|
29 | d["passwd"] = "s3cr3t"
30 | d["pwd"] = "s3cr3t"
@@ -158,7 +158,7 @@ S105.py:29:15: S105 Possible hardcoded password: "s3cr3t"
33 | d["secrete"] = "s3cr3t"
|
-S105.py:30:14: S105 Possible hardcoded password: "s3cr3t"
+S105.py:30:14: S105 Possible hardcoded password assigned to: "token"
|
30 | d["pwd"] = "s3cr3t"
31 | d["secret"] = "s3cr3t"
@@ -168,7 +168,7 @@ S105.py:30:14: S105 Possible hardcoded password: "s3cr3t"
34 | safe = d["password"] = "s3cr3t"
|
-S105.py:31:16: S105 Possible hardcoded password: "s3cr3t"
+S105.py:31:16: S105 Possible hardcoded password assigned to: "secrete"
|
31 | d["secret"] = "s3cr3t"
32 | d["token"] = "s3cr3t"
@@ -178,7 +178,7 @@ S105.py:31:16: S105 Possible hardcoded password: "s3cr3t"
35 | d["password"] = safe = "s3cr3t"
|
-S105.py:32:24: S105 Possible hardcoded password: "s3cr3t"
+S105.py:32:24: S105 Possible hardcoded password assigned to: "password"
|
32 | d["token"] = "s3cr3t"
33 | d["secrete"] = "s3cr3t"
@@ -187,7 +187,7 @@ S105.py:32:24: S105 Possible hardcoded password: "s3cr3t"
35 | d["password"] = safe = "s3cr3t"
|
-S105.py:33:24: S105 Possible hardcoded password: "s3cr3t"
+S105.py:33:24: S105 Possible hardcoded password assigned to: "password"
|
33 | d["secrete"] = "s3cr3t"
34 | safe = d["password"] = "s3cr3t"
@@ -195,7 +195,7 @@ S105.py:33:24: S105 Possible hardcoded password: "s3cr3t"
| ^^^^^^^^ S105
|
-S105.py:37:16: S105 Possible hardcoded password: "s3cr3t"
+S105.py:37:16: S105 Possible hardcoded password assigned to: "password"
|
37 | class MyClass:
38 | password = "s3cr3t"
@@ -203,7 +203,7 @@ S105.py:37:16: S105 Possible hardcoded password: "s3cr3t"
39 | safe = password
|
-S105.py:41:20: S105 Possible hardcoded password: "s3cr3t"
+S105.py:41:20: S105 Possible hardcoded password assigned to: "password"
|
41 | MyClass.password = "s3cr3t"
| ^^^^^^^^ S105
@@ -211,7 +211,7 @@ S105.py:41:20: S105 Possible hardcoded password: "s3cr3t"
43 | MyClass.passwd = "s3cr3t"
|
-S105.py:42:17: S105 Possible hardcoded password: "s3cr3t"
+S105.py:42:17: S105 Possible hardcoded password assigned to: "_pass"
|
42 | MyClass.password = "s3cr3t"
43 | MyClass._pass = "s3cr3t"
@@ -220,7 +220,7 @@ S105.py:42:17: S105 Possible hardcoded password: "s3cr3t"
45 | MyClass.pwd = "s3cr3t"
|
-S105.py:43:18: S105 Possible hardcoded password: "s3cr3t"
+S105.py:43:18: S105 Possible hardcoded password assigned to: "passwd"
|
43 | MyClass.password = "s3cr3t"
44 | MyClass._pass = "s3cr3t"
@@ -230,7 +230,7 @@ S105.py:43:18: S105 Possible hardcoded password: "s3cr3t"
47 | MyClass.secret = "s3cr3t"
|
-S105.py:44:15: S105 Possible hardcoded password: "s3cr3t"
+S105.py:44:15: S105 Possible hardcoded password assigned to: "pwd"
|
44 | MyClass._pass = "s3cr3t"
45 | MyClass.passwd = "s3cr3t"
@@ -240,7 +240,7 @@ S105.py:44:15: S105 Possible hardcoded password: "s3cr3t"
48 | MyClass.token = "s3cr3t"
|
-S105.py:45:18: S105 Possible hardcoded password: "s3cr3t"
+S105.py:45:18: S105 Possible hardcoded password assigned to: "secret"
|
45 | MyClass.passwd = "s3cr3t"
46 | MyClass.pwd = "s3cr3t"
@@ -250,7 +250,7 @@ S105.py:45:18: S105 Possible hardcoded password: "s3cr3t"
49 | MyClass.secrete = "s3cr3t"
|
-S105.py:46:17: S105 Possible hardcoded password: "s3cr3t"
+S105.py:46:17: S105 Possible hardcoded password assigned to: "token"
|
46 | MyClass.pwd = "s3cr3t"
47 | MyClass.secret = "s3cr3t"
@@ -259,7 +259,7 @@ S105.py:46:17: S105 Possible hardcoded password: "s3cr3t"
49 | MyClass.secrete = "s3cr3t"
|
-S105.py:47:19: S105 Possible hardcoded password: "s3cr3t"
+S105.py:47:19: S105 Possible hardcoded password assigned to: "secrete"
|
47 | MyClass.secret = "s3cr3t"
48 | MyClass.token = "s3cr3t"
@@ -269,7 +269,7 @@ S105.py:47:19: S105 Possible hardcoded password: "s3cr3t"
51 | password == "s3cr3t"
|
-S105.py:49:13: S105 Possible hardcoded password: "s3cr3t"
+S105.py:49:13: S105 Possible hardcoded password assigned to: "password"
|
49 | MyClass.secrete = "s3cr3t"
50 |
@@ -279,7 +279,7 @@ S105.py:49:13: S105 Possible hardcoded password: "s3cr3t"
53 | passwd == "s3cr3t"
|
-S105.py:50:10: S105 Possible hardcoded password: "s3cr3t"
+S105.py:50:10: S105 Possible hardcoded password assigned to: "_pass"
|
50 | password == "s3cr3t"
51 | _pass == "s3cr3t"
@@ -288,7 +288,7 @@ S105.py:50:10: S105 Possible hardcoded password: "s3cr3t"
53 | pwd == "s3cr3t"
|
-S105.py:51:11: S105 Possible hardcoded password: "s3cr3t"
+S105.py:51:11: S105 Possible hardcoded password assigned to: "passwd"
|
51 | password == "s3cr3t"
52 | _pass == "s3cr3t"
@@ -298,7 +298,7 @@ S105.py:51:11: S105 Possible hardcoded password: "s3cr3t"
55 | secret == "s3cr3t"
|
-S105.py:52:8: S105 Possible hardcoded password: "s3cr3t"
+S105.py:52:8: S105 Possible hardcoded password assigned to: "pwd"
|
52 | _pass == "s3cr3t"
53 | passwd == "s3cr3t"
@@ -308,7 +308,7 @@ S105.py:52:8: S105 Possible hardcoded password: "s3cr3t"
56 | token == "s3cr3t"
|
-S105.py:53:11: S105 Possible hardcoded password: "s3cr3t"
+S105.py:53:11: S105 Possible hardcoded password assigned to: "secret"
|
53 | passwd == "s3cr3t"
54 | pwd == "s3cr3t"
@@ -318,7 +318,7 @@ S105.py:53:11: S105 Possible hardcoded password: "s3cr3t"
57 | secrete == "s3cr3t"
|
-S105.py:54:10: S105 Possible hardcoded password: "s3cr3t"
+S105.py:54:10: S105 Possible hardcoded password assigned to: "token"
|
54 | pwd == "s3cr3t"
55 | secret == "s3cr3t"
@@ -328,7 +328,7 @@ S105.py:54:10: S105 Possible hardcoded password: "s3cr3t"
58 | password == safe == "s3cr3t"
|
-S105.py:55:12: S105 Possible hardcoded password: "s3cr3t"
+S105.py:55:12: S105 Possible hardcoded password assigned to: "secrete"
|
55 | secret == "s3cr3t"
56 | token == "s3cr3t"
@@ -337,7 +337,7 @@ S105.py:55:12: S105 Possible hardcoded password: "s3cr3t"
58 | password == safe == "s3cr3t"
|
-S105.py:56:21: S105 Possible hardcoded password: "s3cr3t"
+S105.py:56:21: S105 Possible hardcoded password assigned to: "password"
|
56 | token == "s3cr3t"
57 | secrete == "s3cr3t"
@@ -347,7 +347,7 @@ S105.py:56:21: S105 Possible hardcoded password: "s3cr3t"
60 | if token == "1\n2":
|
-S105.py:58:13: S105 Possible hardcoded password: "1\n2"
+S105.py:58:13: S105 Possible hardcoded password assigned to: "token"
|
58 | password == safe == "s3cr3t"
59 |
@@ -356,7 +356,7 @@ S105.py:58:13: S105 Possible hardcoded password: "1\n2"
61 | pass
|
-S105.py:61:13: S105 Possible hardcoded password: "3\t4"
+S105.py:61:13: S105 Possible hardcoded password assigned to: "token"
|
61 | pass
62 |
@@ -365,7 +365,7 @@ S105.py:61:13: S105 Possible hardcoded password: "3\t4"
64 | pass
|
-S105.py:64:13: S105 Possible hardcoded password: "5\r6"
+S105.py:64:13: S105 Possible hardcoded password assigned to: "token"
|
64 | pass
65 |
diff --git a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S106_S106.py.snap b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S106_S106.py.snap
index a20ebddeef06e..150ebb7aa1538 100644
--- a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S106_S106.py.snap
+++ b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S106_S106.py.snap
@@ -1,7 +1,7 @@
---
source: crates/ruff/src/rules/flake8_bandit/mod.rs
---
-S106.py:14:9: S106 Possible hardcoded password: "s3cr3t"
+S106.py:14:9: S106 Possible hardcoded password assigned to argument: "password"
|
14 | # Error
15 | func(1, password="s3cr3t")
diff --git a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S107_S107.py.snap b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S107_S107.py.snap
index 39ac4461cba9d..ccfba2b0c9768 100644
--- a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S107_S107.py.snap
+++ b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S107_S107.py.snap
@@ -1,35 +1,35 @@
---
source: crates/ruff/src/rules/flake8_bandit/mod.rs
---
-S107.py:5:29: S107 Possible hardcoded password: "default"
+S107.py:5:29: S107 Possible hardcoded password assigned to function default: "password"
|
5 | def default(first, password="default"):
| ^^^^^^^^^ S107
6 | pass
|
-S107.py:13:45: S107 Possible hardcoded password: "posonly"
+S107.py:13:45: S107 Possible hardcoded password assigned to function default: "password"
|
13 | def default_posonly(first, /, pos, password="posonly"):
| ^^^^^^^^^ S107
14 | pass
|
-S107.py:21:39: S107 Possible hardcoded password: "kwonly"
+S107.py:21:39: S107 Possible hardcoded password assigned to function default: "password"
|
21 | def default_kwonly(first, *, password="kwonly"):
| ^^^^^^^^ S107
22 | pass
|
-S107.py:29:39: S107 Possible hardcoded password: "posonly"
+S107.py:29:39: S107 Possible hardcoded password assigned to function default: "secret"
|
29 | def default_all(first, /, pos, secret="posonly", *, password="kwonly"):
| ^^^^^^^^^ S107
30 | pass
|
-S107.py:29:62: S107 Possible hardcoded password: "kwonly"
+S107.py:29:62: S107 Possible hardcoded password assigned to function default: "password"
|
29 | def default_all(first, /, pos, secret="posonly", *, password="kwonly"):
| ^^^^^^^^ S107
diff --git a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S601_S601.py.snap b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S601_S601.py.snap
new file mode 100644
index 0000000000000..8e11bc8541f8a
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S601_S601.py.snap
@@ -0,0 +1,12 @@
+---
+source: crates/ruff/src/rules/flake8_bandit/mod.rs
+---
+S601.py:3:1: S601 Possible shell injection via Paramiko call; check inputs are properly sanitized
+ |
+3 | import paramiko
+4 |
+5 | paramiko.exec_command('something; really; unsafe')
+ | ^^^^^^^^^^^^^^^^^^^^^ S601
+ |
+
+
diff --git a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S608_S608.py.snap b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S608_S608.py.snap
index c182e6ea1c710..b9d7fcb74591c 100644
--- a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S608_S608.py.snap
+++ b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S608_S608.py.snap
@@ -406,9 +406,9 @@ S608.py:77:9: S608 Possible SQL injection vector through string-based query cons
|
77 | def query41():
78 | return (
-79 | "SELECT *"
+79 | "SELECT * "
| _________^
-80 | | "FROM table"
+80 | | "FROM table "
81 | | f"WHERE var = {var}"
| |____________________________^ S608
82 | )
@@ -448,8 +448,35 @@ S608.py:86:30: S608 Possible SQL injection vector through string-based query con
87 | query44 = cursor.execute("SELECT * FROM table WHERE var = {}".format(var))
88 | query45 = cursor.executemany("SELECT * FROM table WHERE var = %s" % var, [])
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ S608
-89 |
+89 |
90 | # # pass
|
+S608.py:98:9: S608 Possible SQL injection vector through string-based query construction
+ |
+ 98 | # # INSERT without INTO (e.g. MySQL and derivatives)
+ 99 | query = "INSERT table VALUES (%s)" % (var,)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ S608
+100 |
+101 | # # REPLACE (e.g. MySQL and derivatives, SQLite)
+ |
+
+S608.py:101:9: S608 Possible SQL injection vector through string-based query construction
+ |
+101 | # # REPLACE (e.g. MySQL and derivatives, SQLite)
+102 | query = "REPLACE INTO table VALUES (%s)" % (var,)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ S608
+103 | query = "REPLACE table VALUES (%s)" % (var,)
+ |
+
+S608.py:102:9: S608 Possible SQL injection vector through string-based query construction
+ |
+102 | # # REPLACE (e.g. MySQL and derivatives, SQLite)
+103 | query = "REPLACE INTO table VALUES (%s)" % (var,)
+104 | query = "REPLACE table VALUES (%s)" % (var,)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ S608
+105 |
+106 | query = "Deselect something that is not SQL even though it has a ' from ' somewhere in %s." % "there"
+ |
+
diff --git a/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S609_S609.py.snap b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S609_S609.py.snap
new file mode 100644
index 0000000000000..d22cefbae2f87
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_bandit/snapshots/ruff__rules__flake8_bandit__tests__S609_S609.py.snap
@@ -0,0 +1,41 @@
+---
+source: crates/ruff/src/rules/flake8_bandit/mod.rs
+---
+S609.py:4:1: S609 Possible wildcard injection in call due to `*` usage
+ |
+4 | import subprocess
+5 |
+6 | os.popen("chmod +w foo*")
+ | ^^^^^^^^ S609
+7 | subprocess.Popen("/bin/chown root: *", shell=True)
+8 | subprocess.Popen(["/usr/local/bin/rsync", "*", "some_where:"], shell=True)
+ |
+
+S609.py:5:1: S609 Possible wildcard injection in call due to `*` usage
+ |
+5 | os.popen("chmod +w foo*")
+6 | subprocess.Popen("/bin/chown root: *", shell=True)
+ | ^^^^^^^^^^^^^^^^ S609
+7 | subprocess.Popen(["/usr/local/bin/rsync", "*", "some_where:"], shell=True)
+8 | subprocess.Popen("/usr/local/bin/rsync * no_injection_here:")
+ |
+
+S609.py:6:1: S609 Possible wildcard injection in call due to `*` usage
+ |
+ 6 | os.popen("chmod +w foo*")
+ 7 | subprocess.Popen("/bin/chown root: *", shell=True)
+ 8 | subprocess.Popen(["/usr/local/bin/rsync", "*", "some_where:"], shell=True)
+ | ^^^^^^^^^^^^^^^^ S609
+ 9 | subprocess.Popen("/usr/local/bin/rsync * no_injection_here:")
+10 | os.system("tar cf foo.tar bar/*")
+ |
+
+S609.py:8:1: S609 Possible wildcard injection in call due to `*` usage
+ |
+ 8 | subprocess.Popen(["/usr/local/bin/rsync", "*", "some_where:"], shell=True)
+ 9 | subprocess.Popen("/usr/local/bin/rsync * no_injection_here:")
+10 | os.system("tar cf foo.tar bar/*")
+ | ^^^^^^^^^ S609
+ |
+
+
diff --git a/crates/ruff/src/rules/flake8_blind_except/mod.rs b/crates/ruff/src/rules/flake8_blind_except/mod.rs
index ba972c84711c5..ce2611ffaa984 100644
--- a/crates/ruff/src/rules/flake8_blind_except/mod.rs
+++ b/crates/ruff/src/rules/flake8_blind_except/mod.rs
@@ -6,14 +6,13 @@ mod tests {
use std::path::Path;
use anyhow::Result;
-
use test_case::test_case;
use crate::registry::Rule;
use crate::test::test_path;
use crate::{assert_messages, settings};
- #[test_case(Rule::BlindExcept, Path::new("BLE.py"); "BLE001")]
+ #[test_case(Rule::BlindExcept, Path::new("BLE.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(
diff --git a/crates/ruff/src/rules/flake8_blind_except/rules.rs b/crates/ruff/src/rules/flake8_blind_except/rules/blind_except.rs
similarity index 71%
rename from crates/ruff/src/rules/flake8_blind_except/rules.rs
rename to crates/ruff/src/rules/flake8_blind_except/rules/blind_except.rs
index 97772b3e8a1cb..ef4f52524ece7 100644
--- a/crates/ruff/src/rules/flake8_blind_except/rules.rs
+++ b/crates/ruff/src/rules/flake8_blind_except/rules/blind_except.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, ExprKind, Stmt, StmtKind};
+use rustpython_parser::ast::{self, Expr, Ranged, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -9,7 +9,7 @@ use crate::checkers::ast::Checker;
#[violation]
pub struct BlindExcept {
- pub name: String,
+ name: String,
}
impl Violation for BlindExcept {
@@ -21,7 +21,7 @@ impl Violation for BlindExcept {
}
/// BLE001
-pub fn blind_except(
+pub(crate) fn blind_except(
checker: &mut Checker,
type_: Option<&Expr>,
name: Option<&str>,
@@ -30,17 +30,17 @@ pub fn blind_except(
let Some(type_) = type_ else {
return;
};
- let ExprKind::Name { id, .. } = &type_.node else {
+ let Expr::Name(ast::ExprName { id, .. }) = &type_ else {
return;
};
for exception in ["BaseException", "Exception"] {
- if id == exception && checker.ctx.is_builtin(exception) {
+ if id == exception && checker.semantic_model().is_builtin(exception) {
// If the exception is re-raised, don't flag an error.
if body.iter().any(|stmt| {
- if let StmtKind::Raise { exc, .. } = &stmt.node {
+ if let Stmt::Raise(ast::StmtRaise { exc, .. }) = stmt {
if let Some(exc) = exc {
- if let ExprKind::Name { id, .. } = &exc.node {
- name.map_or(false, |name| name == id)
+ if let Expr::Name(ast::ExprName { id, .. }) = exc.as_ref() {
+ name.map_or(false, |name| id == name)
} else {
false
}
@@ -56,16 +56,17 @@ pub fn blind_except(
// If the exception is logged, don't flag an error.
if body.iter().any(|stmt| {
- if let StmtKind::Expr { value } = &stmt.node {
- if let ExprKind::Call { func, keywords, .. } = &value.node {
- if logging::is_logger_candidate(&checker.ctx, func) {
- if let ExprKind::Attribute { attr, .. } = &func.node {
+ if let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt {
+ if let Expr::Call(ast::ExprCall { func, keywords, .. }) = value.as_ref() {
+ if logging::is_logger_candidate(func, checker.semantic_model()) {
+ if let Some(attribute) = func.as_attribute_expr() {
+ let attr = attribute.attr.as_str();
if attr == "exception" {
return true;
}
if attr == "error" {
if let Some(keyword) = find_keyword(keywords, "exc_info") {
- if is_const_true(&keyword.node.value) {
+ if is_const_true(&keyword.value) {
return true;
}
}
diff --git a/crates/ruff/src/rules/flake8_blind_except/rules/mod.rs b/crates/ruff/src/rules/flake8_blind_except/rules/mod.rs
new file mode 100644
index 0000000000000..520b3ece06db7
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_blind_except/rules/mod.rs
@@ -0,0 +1,3 @@
+pub(crate) use blind_except::{blind_except, BlindExcept};
+
+mod blind_except;
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/helpers.rs b/crates/ruff/src/rules/flake8_boolean_trap/helpers.rs
new file mode 100644
index 0000000000000..2c397470b7e36
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_boolean_trap/helpers.rs
@@ -0,0 +1,70 @@
+use rustpython_parser::ast::{self, Constant, Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, DiagnosticKind};
+
+use crate::checkers::ast::Checker;
+
+pub(super) const FUNC_CALL_NAME_ALLOWLIST: &[&str] = &[
+ "append",
+ "assertEqual",
+ "assertEquals",
+ "assertNotEqual",
+ "assertNotEquals",
+ "bool",
+ "bytes",
+ "count",
+ "failIfEqual",
+ "failUnlessEqual",
+ "float",
+ "fromkeys",
+ "get",
+ "getattr",
+ "getboolean",
+ "getfloat",
+ "getint",
+ "index",
+ "insert",
+ "int",
+ "param",
+ "pop",
+ "remove",
+ "set_blocking",
+ "set_enabled",
+ "setattr",
+ "__setattr__",
+ "setdefault",
+ "str",
+];
+
+pub(super) const FUNC_DEF_NAME_ALLOWLIST: &[&str] = &["__setitem__"];
+
+/// Returns `true` if an argument is allowed to use a boolean trap. To return
+/// `true`, the function name must be explicitly allowed, and the argument must
+/// be either the first or second argument in the call.
+pub(super) fn allow_boolean_trap(func: &Expr) -> bool {
+ if let Expr::Attribute(ast::ExprAttribute { attr, .. }) = func {
+ return FUNC_CALL_NAME_ALLOWLIST.contains(&attr.as_ref());
+ }
+
+ if let Expr::Name(ast::ExprName { id, .. }) = func {
+ return FUNC_CALL_NAME_ALLOWLIST.contains(&id.as_ref());
+ }
+
+ false
+}
+
+const fn is_boolean_arg(arg: &Expr) -> bool {
+ matches!(
+ &arg,
+ Expr::Constant(ast::ExprConstant {
+ value: Constant::Bool(_),
+ ..
+ })
+ )
+}
+
+pub(super) fn add_if_boolean(checker: &mut Checker, arg: &Expr, kind: DiagnosticKind) {
+ if is_boolean_arg(arg) {
+ checker.diagnostics.push(Diagnostic::new(kind, arg.range()));
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/mod.rs b/crates/ruff/src/rules/flake8_boolean_trap/mod.rs
index a90031bcee6a4..ab0d6f87418fb 100644
--- a/crates/ruff/src/rules/flake8_boolean_trap/mod.rs
+++ b/crates/ruff/src/rules/flake8_boolean_trap/mod.rs
@@ -1,4 +1,5 @@
//! Rules from [flake8-boolean-trap](https://pypi.org/project/flake8-boolean-trap/).
+mod helpers;
pub(crate) mod rules;
#[cfg(test)]
@@ -6,16 +7,15 @@ mod tests {
use std::path::Path;
use anyhow::Result;
-
use test_case::test_case;
use crate::registry::Rule;
use crate::test::test_path;
use crate::{assert_messages, settings};
- #[test_case(Rule::BooleanPositionalArgInFunctionDefinition, Path::new("FBT.py"); "FBT001")]
- #[test_case(Rule::BooleanDefaultValueInFunctionDefinition, Path::new("FBT.py"); "FBT002")]
- #[test_case(Rule::BooleanPositionalValueInFunctionCall, Path::new("FBT.py"); "FBT003")]
+ #[test_case(Rule::BooleanPositionalArgInFunctionDefinition, Path::new("FBT.py"))]
+ #[test_case(Rule::BooleanDefaultValueInFunctionDefinition, Path::new("FBT.py"))]
+ #[test_case(Rule::BooleanPositionalValueInFunctionCall, Path::new("FBT.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/rules.rs b/crates/ruff/src/rules/flake8_boolean_trap/rules.rs
deleted file mode 100644
index f09ece952f205..0000000000000
--- a/crates/ruff/src/rules/flake8_boolean_trap/rules.rs
+++ /dev/null
@@ -1,172 +0,0 @@
-use rustpython_parser::ast::{Arguments, Constant, Expr, ExprKind};
-
-use ruff_diagnostics::Violation;
-use ruff_diagnostics::{Diagnostic, DiagnosticKind};
-use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::call_path::collect_call_path;
-
-use crate::checkers::ast::Checker;
-
-#[violation]
-pub struct BooleanPositionalArgInFunctionDefinition;
-
-impl Violation for BooleanPositionalArgInFunctionDefinition {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("Boolean positional arg in function definition")
- }
-}
-
-#[violation]
-pub struct BooleanDefaultValueInFunctionDefinition;
-
-impl Violation for BooleanDefaultValueInFunctionDefinition {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("Boolean default value in function definition")
- }
-}
-
-#[violation]
-pub struct BooleanPositionalValueInFunctionCall;
-
-impl Violation for BooleanPositionalValueInFunctionCall {
- #[derive_message_formats]
- fn message(&self) -> String {
- format!("Boolean positional value in function call")
- }
-}
-
-const FUNC_CALL_NAME_ALLOWLIST: &[&str] = &[
- "assertEqual",
- "assertEquals",
- "assertNotEqual",
- "assertNotEquals",
- "bytes",
- "failIfEqual",
- "failUnlessEqual",
- "float",
- "fromkeys",
- "get",
- "getattr",
- "getboolean",
- "getfloat",
- "getint",
- "index",
- "int",
- "param",
- "pop",
- "setattr",
- "setdefault",
- "str",
-];
-
-const FUNC_DEF_NAME_ALLOWLIST: &[&str] = &["__setitem__"];
-
-/// Returns `true` if an argument is allowed to use a boolean trap. To return
-/// `true`, the function name must be explicitly allowed, and the argument must
-/// be either the first or second argument in the call.
-fn allow_boolean_trap(func: &Expr) -> bool {
- if let ExprKind::Attribute { attr, .. } = &func.node {
- return FUNC_CALL_NAME_ALLOWLIST.contains(&attr.as_ref());
- }
-
- if let ExprKind::Name { id, .. } = &func.node {
- return FUNC_CALL_NAME_ALLOWLIST.contains(&id.as_ref());
- }
-
- false
-}
-
-const fn is_boolean_arg(arg: &Expr) -> bool {
- matches!(
- &arg.node,
- ExprKind::Constant {
- value: Constant::Bool(_),
- ..
- }
- )
-}
-
-fn add_if_boolean(checker: &mut Checker, arg: &Expr, kind: DiagnosticKind) {
- if is_boolean_arg(arg) {
- checker.diagnostics.push(Diagnostic::new(kind, arg.range()));
- }
-}
-
-pub fn check_positional_boolean_in_def(
- checker: &mut Checker,
- name: &str,
- decorator_list: &[Expr],
- arguments: &Arguments,
-) {
- if FUNC_DEF_NAME_ALLOWLIST.contains(&name) {
- return;
- }
-
- if decorator_list.iter().any(|expr| {
- collect_call_path(expr).map_or(false, |call_path| call_path.as_slice() == [name, "setter"])
- }) {
- return;
- }
-
- for arg in arguments.posonlyargs.iter().chain(arguments.args.iter()) {
- if arg.node.annotation.is_none() {
- continue;
- }
- let Some(expr) = &arg.node.annotation else {
- continue;
- };
-
- // check for both bool (python class) and 'bool' (string annotation)
- let hint = match &expr.node {
- ExprKind::Name { id, .. } => id == "bool",
- ExprKind::Constant {
- value: Constant::Str(value),
- ..
- } => value == "bool",
- _ => false,
- };
- if !hint {
- continue;
- }
- checker.diagnostics.push(Diagnostic::new(
- BooleanPositionalArgInFunctionDefinition,
- arg.range(),
- ));
- }
-}
-
-pub fn check_boolean_default_value_in_function_definition(
- checker: &mut Checker,
- name: &str,
- decorator_list: &[Expr],
- arguments: &Arguments,
-) {
- if FUNC_DEF_NAME_ALLOWLIST.contains(&name) {
- return;
- }
-
- if decorator_list.iter().any(|expr| {
- collect_call_path(expr).map_or(false, |call_path| call_path.as_slice() == [name, "setter"])
- }) {
- return;
- }
-
- for arg in &arguments.defaults {
- add_if_boolean(checker, arg, BooleanDefaultValueInFunctionDefinition.into());
- }
-}
-
-pub fn check_boolean_positional_value_in_function_call(
- checker: &mut Checker,
- args: &[Expr],
- func: &Expr,
-) {
- if allow_boolean_trap(func) {
- return;
- }
- for arg in args {
- add_if_boolean(checker, arg, BooleanPositionalValueInFunctionCall.into());
- }
-}
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/rules/check_boolean_default_value_in_function_definition.rs b/crates/ruff/src/rules/flake8_boolean_trap/rules/check_boolean_default_value_in_function_definition.rs
new file mode 100644
index 0000000000000..610a6c8491f30
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_boolean_trap/rules/check_boolean_default_value_in_function_definition.rs
@@ -0,0 +1,80 @@
+use rustpython_parser::ast::{Arguments, Expr};
+
+use ruff_diagnostics::Violation;
+
+use ruff_macros::{derive_message_formats, violation};
+use ruff_python_ast::call_path::collect_call_path;
+
+use crate::checkers::ast::Checker;
+use crate::rules::flake8_boolean_trap::helpers::add_if_boolean;
+
+use super::super::helpers::FUNC_DEF_NAME_ALLOWLIST;
+
+/// ## What it does
+/// Checks for the use of booleans as default values in function definitions.
+///
+/// ## Why is this bad?
+/// Calling a function with boolean default means that the keyword argument
+/// argument can be omitted, which makes the function call ambiguous.
+///
+/// Instead, define the relevant argument as keyword-only.
+///
+/// ## Example
+/// ```python
+/// from math import ceil, floor
+///
+///
+/// def round_number(number: float, *, up: bool = True) -> int:
+/// return ceil(number) if up else floor(number)
+///
+///
+/// round_number(1.5)
+/// round_number(1.5, up=False)
+/// ```
+///
+/// Use instead:
+/// ```python
+/// from math import ceil, floor
+///
+///
+/// def round_number(number: float, *, up: bool) -> int:
+/// return ceil(number) if up else floor(number)
+///
+///
+/// round_number(1.5, up=True)
+/// round_number(1.5, up=False)
+/// ```
+///
+/// ## References
+/// - [Python documentation](https://docs.python.org/3/reference/expressions.html#calls)
+/// - [_How to Avoid “The Boolean Trap”_ by Adam Johnson](https://adamj.eu/tech/2021/07/10/python-type-hints-how-to-avoid-the-boolean-trap/)
+#[violation]
+pub struct BooleanDefaultValueInFunctionDefinition;
+
+impl Violation for BooleanDefaultValueInFunctionDefinition {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("Boolean default value in function definition")
+ }
+}
+
+pub(crate) fn check_boolean_default_value_in_function_definition(
+ checker: &mut Checker,
+ name: &str,
+ decorator_list: &[Expr],
+ arguments: &Arguments,
+) {
+ if FUNC_DEF_NAME_ALLOWLIST.contains(&name) {
+ return;
+ }
+
+ if decorator_list.iter().any(|expr| {
+ collect_call_path(expr).map_or(false, |call_path| call_path.as_slice() == [name, "setter"])
+ }) {
+ return;
+ }
+
+ for arg in &arguments.defaults {
+ add_if_boolean(checker, arg, BooleanDefaultValueInFunctionDefinition.into());
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/rules/check_boolean_positional_value_in_function_call.rs b/crates/ruff/src/rules/flake8_boolean_trap/rules/check_boolean_positional_value_in_function_call.rs
new file mode 100644
index 0000000000000..5b39f0f24a308
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_boolean_trap/rules/check_boolean_positional_value_in_function_call.rs
@@ -0,0 +1,60 @@
+use rustpython_parser::ast::Expr;
+
+use ruff_diagnostics::Violation;
+
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+use crate::rules::flake8_boolean_trap::helpers::{add_if_boolean, allow_boolean_trap};
+
+/// ## What it does
+/// Checks for boolean positional arguments in function calls.
+///
+/// ## Why is this bad?
+/// Calling a function with boolean positional arguments is confusing as the
+/// meaning of the boolean value is not clear to the caller, and to future
+/// readers of the code.
+///
+/// ## Example
+/// ```python
+/// def foo(flag: bool) -> None:
+/// ...
+///
+///
+/// foo(True)
+/// ```
+///
+/// Use instead:
+/// ```python
+/// def foo(flag: bool) -> None:
+/// ...
+///
+///
+/// foo(flag=True)
+/// ```
+///
+/// ## References
+/// - [Python documentation](https://docs.python.org/3/reference/expressions.html#calls)
+/// - [_How to Avoid “The Boolean Trap”_ by Adam Johnson](https://adamj.eu/tech/2021/07/10/python-type-hints-how-to-avoid-the-boolean-trap/)
+#[violation]
+pub struct BooleanPositionalValueInFunctionCall;
+
+impl Violation for BooleanPositionalValueInFunctionCall {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("Boolean positional value in function call")
+ }
+}
+
+pub(crate) fn check_boolean_positional_value_in_function_call(
+ checker: &mut Checker,
+ args: &[Expr],
+ func: &Expr,
+) {
+ if allow_boolean_trap(func) {
+ return;
+ }
+ for arg in args {
+ add_if_boolean(checker, arg, BooleanPositionalValueInFunctionCall.into());
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/rules/check_positional_boolean_in_def.rs b/crates/ruff/src/rules/flake8_boolean_trap/rules/check_positional_boolean_in_def.rs
new file mode 100644
index 0000000000000..3a6f2d4df36eb
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_boolean_trap/rules/check_positional_boolean_in_def.rs
@@ -0,0 +1,120 @@
+use rustpython_parser::ast::{self, Arguments, Constant, Expr, Ranged};
+
+use ruff_diagnostics::Diagnostic;
+use ruff_diagnostics::Violation;
+use ruff_macros::{derive_message_formats, violation};
+use ruff_python_ast::call_path::collect_call_path;
+
+use crate::checkers::ast::Checker;
+use crate::rules::flake8_boolean_trap::helpers::FUNC_DEF_NAME_ALLOWLIST;
+
+/// ## What it does
+/// Checks for boolean positional arguments in function definitions.
+///
+/// ## Why is this bad?
+/// Calling a function with boolean positional arguments is confusing as the
+/// meaning of the boolean value is not clear to the caller, and to future
+/// readers of the code.
+///
+/// The use of a boolean will also limit the function to only two possible
+/// behaviors, which makes the function difficult to extend in the future.
+///
+/// ## Example
+/// ```python
+/// from math import ceil, floor
+///
+///
+/// def round_number(number: float, up: bool) -> int:
+/// return ceil(number) if up else floor(number)
+///
+///
+/// round_number(1.5, True) # What does `True` mean?
+/// round_number(1.5, False) # What does `False` mean?
+/// ```
+///
+/// Instead, refactor into separate implementations:
+/// ```python
+/// from math import ceil, floor
+///
+///
+/// def round_up(number: float) -> int:
+/// return ceil(number)
+///
+///
+/// def round_down(number: float) -> int:
+/// return floor(number)
+///
+///
+/// round_up(1.5)
+/// round_down(1.5)
+/// ```
+///
+/// Or, refactor to use an `Enum`:
+/// ```python
+/// from enum import Enum
+///
+///
+/// class RoundingMethod(Enum):
+/// UP = 1
+/// DOWN = 2
+///
+///
+/// def round_number(value: float, method: RoundingMethod) -> float:
+/// ...
+/// ```
+///
+/// ## References
+/// - [Python documentation](https://docs.python.org/3/reference/expressions.html#calls)
+/// - [_How to Avoid “The Boolean Trap”_ by Adam Johnson](https://adamj.eu/tech/2021/07/10/python-type-hints-how-to-avoid-the-boolean-trap/)
+#[violation]
+pub struct BooleanPositionalArgInFunctionDefinition;
+
+impl Violation for BooleanPositionalArgInFunctionDefinition {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ format!("Boolean positional arg in function definition")
+ }
+}
+
+pub(crate) fn check_positional_boolean_in_def(
+ checker: &mut Checker,
+ name: &str,
+ decorator_list: &[Expr],
+ arguments: &Arguments,
+) {
+ if FUNC_DEF_NAME_ALLOWLIST.contains(&name) {
+ return;
+ }
+
+ if decorator_list.iter().any(|expr| {
+ collect_call_path(expr).map_or(false, |call_path| call_path.as_slice() == [name, "setter"])
+ }) {
+ return;
+ }
+
+ for arg in arguments.posonlyargs.iter().chain(arguments.args.iter()) {
+ if arg.annotation.is_none() {
+ continue;
+ }
+ let Some(expr) = &arg.annotation else {
+ continue;
+ };
+
+ // check for both bool (python class) and 'bool' (string annotation)
+ let hint = match expr.as_ref() {
+ Expr::Name(name) => &name.id == "bool",
+ Expr::Constant(ast::ExprConstant {
+ value: Constant::Str(value),
+ ..
+ }) => value == "bool",
+ _ => false,
+ };
+ if !hint {
+ continue;
+ }
+ checker.diagnostics.push(Diagnostic::new(
+ BooleanPositionalArgInFunctionDefinition,
+ arg.range(),
+ ));
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/rules/mod.rs b/crates/ruff/src/rules/flake8_boolean_trap/rules/mod.rs
new file mode 100644
index 0000000000000..a0e9b8bd66727
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_boolean_trap/rules/mod.rs
@@ -0,0 +1,13 @@
+pub(crate) use check_boolean_default_value_in_function_definition::{
+ check_boolean_default_value_in_function_definition, BooleanDefaultValueInFunctionDefinition,
+};
+pub(crate) use check_boolean_positional_value_in_function_call::{
+ check_boolean_positional_value_in_function_call, BooleanPositionalValueInFunctionCall,
+};
+pub(crate) use check_positional_boolean_in_def::{
+ check_positional_boolean_in_def, BooleanPositionalArgInFunctionDefinition,
+};
+
+mod check_boolean_default_value_in_function_definition;
+mod check_boolean_positional_value_in_function_call;
+mod check_positional_boolean_in_def;
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/snapshots/ruff__rules__flake8_boolean_trap__tests__FBT001_FBT.py.snap b/crates/ruff/src/rules/flake8_boolean_trap/snapshots/ruff__rules__flake8_boolean_trap__tests__FBT001_FBT.py.snap
index c940e93364d0f..4f54d13871d74 100644
--- a/crates/ruff/src/rules/flake8_boolean_trap/snapshots/ruff__rules__flake8_boolean_trap__tests__FBT001_FBT.py.snap
+++ b/crates/ruff/src/rules/flake8_boolean_trap/snapshots/ruff__rules__flake8_boolean_trap__tests__FBT001_FBT.py.snap
@@ -81,12 +81,12 @@ FBT.py:19:5: FBT001 Boolean positional arg in function definition
23 | kwonly_nonvalued_nohint,
|
-FBT.py:81:19: FBT001 Boolean positional arg in function definition
+FBT.py:85:19: FBT001 Boolean positional arg in function definition
|
-81 | # FBT001: Boolean positional arg in function definition
-82 | def foo(self, value: bool) -> None:
+85 | # FBT001: Boolean positional arg in function definition
+86 | def foo(self, value: bool) -> None:
| ^^^^^^^^^^^ FBT001
-83 | pass
+87 | pass
|
diff --git a/crates/ruff/src/rules/flake8_boolean_trap/snapshots/ruff__rules__flake8_boolean_trap__tests__FBT003_FBT.py.snap b/crates/ruff/src/rules/flake8_boolean_trap/snapshots/ruff__rules__flake8_boolean_trap__tests__FBT003_FBT.py.snap
index 43058488d336c..1aeb008182e70 100644
--- a/crates/ruff/src/rules/flake8_boolean_trap/snapshots/ruff__rules__flake8_boolean_trap__tests__FBT003_FBT.py.snap
+++ b/crates/ruff/src/rules/flake8_boolean_trap/snapshots/ruff__rules__flake8_boolean_trap__tests__FBT003_FBT.py.snap
@@ -28,4 +28,12 @@ FBT.py:57:17: FBT003 Boolean positional value in function call
61 | mylist.index(True)
|
+FBT.py:69:38: FBT003 Boolean positional value in function call
+ |
+69 | os.set_blocking(0, False)
+70 | g_action.set_enabled(True)
+71 | settings.set_enable_developer_extras(True)
+ | ^^^^ FBT003
+ |
+
diff --git a/crates/ruff/src/rules/flake8_bugbear/mod.rs b/crates/ruff/src/rules/flake8_bugbear/mod.rs
index 2a090ae515bc2..2bf567630621c 100644
--- a/crates/ruff/src/rules/flake8_bugbear/mod.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/mod.rs
@@ -6,50 +6,49 @@ pub mod settings;
mod tests {
use std::path::Path;
- use crate::assert_messages;
use anyhow::Result;
-
use test_case::test_case;
+ use crate::assert_messages;
use crate::registry::Rule;
use crate::settings::Settings;
use crate::test::test_path;
- #[test_case(Rule::UnaryPrefixIncrement, Path::new("B002.py"); "B002")]
- #[test_case(Rule::AssignmentToOsEnviron, Path::new("B003.py"); "B003")]
- #[test_case(Rule::UnreliableCallableCheck, Path::new("B004.py"); "B004")]
- #[test_case(Rule::StripWithMultiCharacters, Path::new("B005.py"); "B005")]
- #[test_case(Rule::MutableArgumentDefault, Path::new("B006_B008.py"); "B006")]
- #[test_case(Rule::UnusedLoopControlVariable, Path::new("B007.py"); "B007")]
- #[test_case(Rule::FunctionCallInDefaultArgument, Path::new("B006_B008.py"); "B008")]
- #[test_case(Rule::GetAttrWithConstant, Path::new("B009_B010.py"); "B009")]
- #[test_case(Rule::SetAttrWithConstant, Path::new("B009_B010.py"); "B010")]
- #[test_case(Rule::AssertFalse, Path::new("B011.py"); "B011")]
- #[test_case(Rule::JumpStatementInFinally, Path::new("B012.py"); "B012")]
- #[test_case(Rule::RedundantTupleInExceptionHandler, Path::new("B013.py"); "B013")]
- #[test_case(Rule::DuplicateHandlerException, Path::new("B014.py"); "B014")]
- #[test_case(Rule::UselessComparison, Path::new("B015.py"); "B015")]
- #[test_case(Rule::CannotRaiseLiteral, Path::new("B016.py"); "B016")]
- #[test_case(Rule::AssertRaisesException, Path::new("B017.py"); "B017")]
- #[test_case(Rule::UselessExpression, Path::new("B018.py"); "B018")]
- #[test_case(Rule::CachedInstanceMethod, Path::new("B019.py"); "B019")]
- #[test_case(Rule::LoopVariableOverridesIterator, Path::new("B020.py"); "B020")]
- #[test_case(Rule::FStringDocstring, Path::new("B021.py"); "B021")]
- #[test_case(Rule::UselessContextlibSuppress, Path::new("B022.py"); "B022")]
- #[test_case(Rule::FunctionUsesLoopVariable, Path::new("B023.py"); "B023")]
- #[test_case(Rule::AbstractBaseClassWithoutAbstractMethod, Path::new("B024.py"); "B024")]
- #[test_case(Rule::DuplicateTryBlockException, Path::new("B025.py"); "B025")]
- #[test_case(Rule::StarArgUnpackingAfterKeywordArg, Path::new("B026.py"); "B026")]
- #[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.py"); "B027")]
- #[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.pyi"); "B027_pyi")]
- #[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027_extended.py"); "B027_extended")]
- #[test_case(Rule::NoExplicitStacklevel, Path::new("B028.py"); "B028")]
- #[test_case(Rule::ExceptWithEmptyTuple, Path::new("B029.py"); "B029")]
- #[test_case(Rule::ExceptWithNonExceptionClasses, Path::new("B030.py"); "B030")]
- #[test_case(Rule::ReuseOfGroupbyGenerator, Path::new("B031.py"); "B031")]
- #[test_case(Rule::UnintentionalTypeAnnotation, Path::new("B032.py"); "B032")]
- #[test_case(Rule::RaiseWithoutFromInsideExcept, Path::new("B904.py"); "B904")]
- #[test_case(Rule::ZipWithoutExplicitStrict, Path::new("B905.py"); "B905")]
+ #[test_case(Rule::AbstractBaseClassWithoutAbstractMethod, Path::new("B024.py"))]
+ #[test_case(Rule::AssertFalse, Path::new("B011.py"))]
+ #[test_case(Rule::AssertRaisesException, Path::new("B017.py"))]
+ #[test_case(Rule::AssignmentToOsEnviron, Path::new("B003.py"))]
+ #[test_case(Rule::CachedInstanceMethod, Path::new("B019.py"))]
+ #[test_case(Rule::CannotRaiseLiteral, Path::new("B016.py"))]
+ #[test_case(Rule::DuplicateHandlerException, Path::new("B014.py"))]
+ #[test_case(Rule::DuplicateTryBlockException, Path::new("B025.py"))]
+ #[test_case(Rule::DuplicateValue, Path::new("B033.py"))]
+ #[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.py"))]
+ #[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.pyi"))]
+ #[test_case(Rule::ExceptWithEmptyTuple, Path::new("B029.py"))]
+ #[test_case(Rule::ExceptWithNonExceptionClasses, Path::new("B030.py"))]
+ #[test_case(Rule::FStringDocstring, Path::new("B021.py"))]
+ #[test_case(Rule::FunctionCallInDefaultArgument, Path::new("B006_B008.py"))]
+ #[test_case(Rule::FunctionUsesLoopVariable, Path::new("B023.py"))]
+ #[test_case(Rule::GetAttrWithConstant, Path::new("B009_B010.py"))]
+ #[test_case(Rule::JumpStatementInFinally, Path::new("B012.py"))]
+ #[test_case(Rule::LoopVariableOverridesIterator, Path::new("B020.py"))]
+ #[test_case(Rule::MutableArgumentDefault, Path::new("B006_B008.py"))]
+ #[test_case(Rule::NoExplicitStacklevel, Path::new("B028.py"))]
+ #[test_case(Rule::RaiseWithoutFromInsideExcept, Path::new("B904.py"))]
+ #[test_case(Rule::RedundantTupleInExceptionHandler, Path::new("B013.py"))]
+ #[test_case(Rule::ReuseOfGroupbyGenerator, Path::new("B031.py"))]
+ #[test_case(Rule::SetAttrWithConstant, Path::new("B009_B010.py"))]
+ #[test_case(Rule::StarArgUnpackingAfterKeywordArg, Path::new("B026.py"))]
+ #[test_case(Rule::StripWithMultiCharacters, Path::new("B005.py"))]
+ #[test_case(Rule::UnaryPrefixIncrement, Path::new("B002.py"))]
+ #[test_case(Rule::UnintentionalTypeAnnotation, Path::new("B032.py"))]
+ #[test_case(Rule::UnreliableCallableCheck, Path::new("B004.py"))]
+ #[test_case(Rule::UnusedLoopControlVariable, Path::new("B007.py"))]
+ #[test_case(Rule::UselessComparison, Path::new("B015.py"))]
+ #[test_case(Rule::UselessContextlibSuppress, Path::new("B022.py"))]
+ #[test_case(Rule::UselessExpression, Path::new("B018.py"))]
+ #[test_case(Rule::ZipWithoutExplicitStrict, Path::new("B905.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs b/crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs
index f49db94cee7a5..59ea9cc66579a 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs
@@ -1,21 +1,17 @@
-use anyhow::{anyhow, Result};
-use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword, Stmt, StmtKind};
+use rustpython_parser::ast::{self, Constant, Expr, Keyword, Ranged, Stmt};
-use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix, Violation};
+use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::source_code::{Locator, Stylist};
-use ruff_python_ast::whitespace::indentation;
+use ruff_python_ast::helpers::identifier_range;
use ruff_python_semantic::analyze::visibility::{is_abstract, is_overload};
-use ruff_python_semantic::context::Context;
+use ruff_python_semantic::model::SemanticModel;
-use crate::autofix::actions::get_or_import_symbol;
use crate::checkers::ast::Checker;
-use crate::importer::Importer;
use crate::registry::Rule;
#[violation]
pub struct AbstractBaseClassWithoutAbstractMethod {
- pub name: String,
+ name: String,
}
impl Violation for AbstractBaseClassWithoutAbstractMethod {
@@ -25,13 +21,12 @@ impl Violation for AbstractBaseClassWithoutAbstractMethod {
format!("`{name}` is an abstract base class, but it has no abstract methods")
}
}
-
#[violation]
pub struct EmptyMethodWithoutAbstractDecorator {
- pub name: String,
+ name: String,
}
-impl AlwaysAutofixableViolation for EmptyMethodWithoutAbstractDecorator {
+impl Violation for EmptyMethodWithoutAbstractDecorator {
#[derive_message_formats]
fn message(&self) -> String {
let EmptyMethodWithoutAbstractDecorator { name } = self;
@@ -39,36 +34,31 @@ impl AlwaysAutofixableViolation for EmptyMethodWithoutAbstractDecorator {
"`{name}` is an empty method in an abstract base class, but has no abstract decorator"
)
}
-
- fn autofix_title(&self) -> String {
- "Add the `@abstractmethod` decorator".to_string()
- }
}
-fn is_abc_class(context: &Context, bases: &[Expr], keywords: &[Keyword]) -> bool {
+fn is_abc_class(model: &SemanticModel, bases: &[Expr], keywords: &[Keyword]) -> bool {
keywords.iter().any(|keyword| {
- keyword
- .node
- .arg
- .as_ref()
- .map_or(false, |arg| arg == "metaclass")
- && context
- .resolve_call_path(&keyword.node.value)
+ keyword.arg.as_ref().map_or(false, |arg| arg == "metaclass")
+ && model
+ .resolve_call_path(&keyword.value)
.map_or(false, |call_path| {
call_path.as_slice() == ["abc", "ABCMeta"]
})
}) || bases.iter().any(|base| {
- context
+ model
.resolve_call_path(base)
.map_or(false, |call_path| call_path.as_slice() == ["abc", "ABC"])
})
}
fn is_empty_body(body: &[Stmt]) -> bool {
- body.iter().all(|stmt| match &stmt.node {
- StmtKind::Pass => true,
- StmtKind::Expr { value } => match &value.node {
- ExprKind::Constant { value, .. } => {
+ body.iter().all(|stmt| match stmt {
+ Stmt::Pass(_) => true,
+ Stmt::Expr(ast::StmtExpr {
+ value,
+ range: _range,
+ }) => match value.as_ref() {
+ Expr::Constant(ast::ExprConstant { value, .. }) => {
matches!(value, Constant::Str(..) | Constant::Ellipsis)
}
_ => false,
@@ -77,29 +67,9 @@ fn is_empty_body(body: &[Stmt]) -> bool {
})
}
-fn fix_abstractmethod_missing(
- context: &Context,
- importer: &Importer,
- locator: &Locator,
- stylist: &Stylist,
- stmt: &Stmt,
-) -> Result {
- let indent = indentation(locator, stmt).ok_or(anyhow!("Unable to detect indentation"))?;
- let (import_edit, binding) =
- get_or_import_symbol("abc", "abstractmethod", context, importer, locator)?;
- let reference_edit = Edit::insertion(
- format!(
- "@{binding}{line_ending}{indent}",
- line_ending = stylist.line_ending().as_str(),
- ),
- stmt.range().start(),
- );
- Ok(Fix::from_iter([import_edit, reference_edit]))
-}
-
/// B024
/// B027
-pub fn abstract_base_class(
+pub(crate) fn abstract_base_class(
checker: &mut Checker,
stmt: &Stmt,
name: &str,
@@ -110,7 +80,7 @@ pub fn abstract_base_class(
if bases.len() + keywords.len() != 1 {
return;
}
- if !is_abc_class(&checker.ctx, bases, keywords) {
+ if !is_abc_class(checker.semantic_model(), bases, keywords) {
return;
}
@@ -118,73 +88,53 @@ pub fn abstract_base_class(
for stmt in body {
// https://github.com/PyCQA/flake8-bugbear/issues/293
// Ignore abc's that declares a class attribute that must be set
- if let StmtKind::AnnAssign { .. } | StmtKind::Assign { .. } = &stmt.node {
+ if let Stmt::AnnAssign(_) | Stmt::Assign(_) = stmt {
has_abstract_method = true;
continue;
}
let (
- StmtKind::FunctionDef {
+ Stmt::FunctionDef(ast::StmtFunctionDef {
decorator_list,
body,
name: method_name,
..
- } | StmtKind::AsyncFunctionDef {
+ }) | Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef {
decorator_list,
body,
name: method_name,
..
- }
- ) = &stmt.node else {
+ })
+ ) = stmt else {
continue;
};
- let has_abstract_decorator = is_abstract(&checker.ctx, decorator_list);
+ let has_abstract_decorator = is_abstract(checker.semantic_model(), decorator_list);
has_abstract_method |= has_abstract_decorator;
- if !checker
- .settings
- .rules
- .enabled(Rule::EmptyMethodWithoutAbstractDecorator)
- {
+ if !checker.enabled(Rule::EmptyMethodWithoutAbstractDecorator) {
continue;
}
if !has_abstract_decorator
&& is_empty_body(body)
- && !is_overload(&checker.ctx, decorator_list)
+ && !is_overload(checker.semantic_model(), decorator_list)
{
- let mut diagnostic = Diagnostic::new(
+ checker.diagnostics.push(Diagnostic::new(
EmptyMethodWithoutAbstractDecorator {
name: format!("{name}.{method_name}"),
},
stmt.range(),
- );
- if checker.patch(Rule::EmptyMethodWithoutAbstractDecorator) {
- diagnostic.try_set_fix(|| {
- fix_abstractmethod_missing(
- &checker.ctx,
- &checker.importer,
- checker.locator,
- checker.stylist,
- stmt,
- )
- });
- }
- checker.diagnostics.push(diagnostic);
+ ));
}
}
- if checker
- .settings
- .rules
- .enabled(Rule::AbstractBaseClassWithoutAbstractMethod)
- {
+ if checker.enabled(Rule::AbstractBaseClassWithoutAbstractMethod) {
if !has_abstract_method {
checker.diagnostics.push(Diagnostic::new(
AbstractBaseClassWithoutAbstractMethod {
name: name.to_string(),
},
- stmt.range(),
+ identifier_range(stmt, checker.locator),
));
}
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/assert_false.rs b/crates/ruff/src/rules/flake8_bugbear/rules/assert_false.rs
index 801a1fae98615..817483b00eb1b 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/assert_false.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/assert_false.rs
@@ -1,9 +1,8 @@
-use ruff_text_size::TextSize;
-use rustpython_parser::ast::{Constant, Expr, ExprContext, ExprKind, Stmt, StmtKind};
+use ruff_text_size::TextRange;
+use rustpython_parser::ast::{self, Constant, Expr, ExprContext, Ranged, Stmt};
-use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
+use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::helpers::unparse_stmt;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
@@ -23,50 +22,42 @@ impl AlwaysAutofixableViolation for AssertFalse {
}
fn assertion_error(msg: Option<&Expr>) -> Stmt {
- Stmt::new(
- TextSize::default(),
- TextSize::default(),
- StmtKind::Raise {
- exc: Some(Box::new(Expr::new(
- TextSize::default(),
- TextSize::default(),
- ExprKind::Call {
- func: Box::new(Expr::new(
- TextSize::default(),
- TextSize::default(),
- ExprKind::Name {
- id: "AssertionError".to_string(),
- ctx: ExprContext::Load,
- },
- )),
- args: if let Some(msg) = msg {
- vec![msg.clone()]
- } else {
- vec![]
- },
- keywords: vec![],
- },
- ))),
- cause: None,
- },
- )
+ Stmt::Raise(ast::StmtRaise {
+ range: TextRange::default(),
+ exc: Some(Box::new(Expr::Call(ast::ExprCall {
+ func: Box::new(Expr::Name(ast::ExprName {
+ id: "AssertionError".into(),
+ ctx: ExprContext::Load,
+ range: TextRange::default(),
+ })),
+ args: if let Some(msg) = msg {
+ vec![msg.clone()]
+ } else {
+ vec![]
+ },
+ keywords: vec![],
+ range: TextRange::default(),
+ }))),
+ cause: None,
+ })
}
/// B011
-pub fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option<&Expr>) {
- let ExprKind::Constant {
+pub(crate) fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option<&Expr>) {
+ let Expr::Constant(ast::ExprConstant {
value: Constant::Bool(false),
..
- } = &test.node else {
+ } )= &test else {
return;
};
let mut diagnostic = Diagnostic::new(AssertFalse, test.range());
if checker.patch(diagnostic.kind.rule()) {
- diagnostic.set_fix(Edit::range_replacement(
- unparse_stmt(&assertion_error(msg), checker.stylist),
+ #[allow(deprecated)]
+ diagnostic.set_fix(Fix::suggested(Edit::range_replacement(
+ checker.generator().stmt(&assertion_error(msg)),
stmt.range(),
- ));
+ )));
}
checker.diagnostics.push(diagnostic);
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/assert_raises_exception.rs b/crates/ruff/src/rules/flake8_bugbear/rules/assert_raises_exception.rs
index 74ecfb14add51..be7a22f54e7d4 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/assert_raises_exception.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/assert_raises_exception.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{ExprKind, Stmt, Withitem};
+use rustpython_parser::ast::{self, Expr, Ranged, Stmt, Withitem};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -6,7 +6,7 @@ use ruff_macros::{derive_message_formats, violation};
use crate::checkers::ast::Checker;
#[derive(Debug, PartialEq, Eq)]
-pub enum AssertionKind {
+pub(crate) enum AssertionKind {
AssertRaises,
PytestRaises,
}
@@ -50,12 +50,12 @@ impl Violation for AssertRaisesException {
}
/// B017
-pub fn assert_raises_exception(checker: &mut Checker, stmt: &Stmt, items: &[Withitem]) {
+pub(crate) fn assert_raises_exception(checker: &mut Checker, stmt: &Stmt, items: &[Withitem]) {
let Some(item) = items.first() else {
return;
};
let item_context = &item.context_expr;
- let ExprKind::Call { func, args, keywords } = &item_context.node else {
+ let Expr::Call(ast::ExprCall { func, args, keywords, range: _ }) = &item_context else {
return;
};
if args.len() != 1 {
@@ -66,7 +66,7 @@ pub fn assert_raises_exception(checker: &mut Checker, stmt: &Stmt, items: &[With
}
if !checker
- .ctx
+ .semantic_model()
.resolve_call_path(args.first().unwrap())
.map_or(false, |call_path| call_path.as_slice() == ["", "Exception"])
{
@@ -74,21 +74,18 @@ pub fn assert_raises_exception(checker: &mut Checker, stmt: &Stmt, items: &[With
}
let kind = {
- if matches!(&func.node, ExprKind::Attribute { attr, .. } if attr == "assertRaises") {
+ if matches!(func.as_ref(), Expr::Attribute(ast::ExprAttribute { attr, .. }) if attr == "assertRaises")
+ {
AssertionKind::AssertRaises
} else if checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["pytest", "raises"]
})
- && !keywords.iter().any(|keyword| {
- keyword
- .node
- .arg
- .as_ref()
- .map_or(false, |arg| arg == "match")
- })
+ && !keywords
+ .iter()
+ .any(|keyword| keyword.arg.as_ref().map_or(false, |arg| arg == "match"))
{
AssertionKind::PytestRaises
} else {
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/assignment_to_os_environ.rs b/crates/ruff/src/rules/flake8_bugbear/rules/assignment_to_os_environ.rs
index c5aea9ca3d8a6..fcaa3d63739c3 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/assignment_to_os_environ.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/assignment_to_os_environ.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, ExprKind};
+use rustpython_parser::ast::{self, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -15,20 +15,20 @@ impl Violation for AssignmentToOsEnviron {
}
}
/// B003
-pub fn assignment_to_os_environ(checker: &mut Checker, targets: &[Expr]) {
+pub(crate) fn assignment_to_os_environ(checker: &mut Checker, targets: &[Expr]) {
if targets.len() != 1 {
return;
}
let target = &targets[0];
- let ExprKind::Attribute { value, attr, .. } = &target.node else {
+ let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = target else {
return;
};
if attr != "environ" {
return;
}
- let ExprKind::Name { id, .. } = &value.node else {
- return;
- };
+ let Expr::Name(ast::ExprName { id, .. } )= value.as_ref() else {
+ return;
+ };
if id != "os" {
return;
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/cached_instance_method.rs b/crates/ruff/src/rules/flake8_bugbear/rules/cached_instance_method.rs
index 12cca253550b1..cb19ec0904289 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/cached_instance_method.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/cached_instance_method.rs
@@ -1,8 +1,8 @@
-use rustpython_parser::ast::{Expr, ExprKind};
+use rustpython_parser::ast::{self, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_semantic::scope::ScopeKind;
+use ruff_python_semantic::model::SemanticModel;
use crate::checkers::ast::Checker;
@@ -18,25 +18,22 @@ impl Violation for CachedInstanceMethod {
}
}
-fn is_cache_func(checker: &Checker, expr: &Expr) -> bool {
- checker
- .ctx
- .resolve_call_path(expr)
- .map_or(false, |call_path| {
- call_path.as_slice() == ["functools", "lru_cache"]
- || call_path.as_slice() == ["functools", "cache"]
- })
+fn is_cache_func(model: &SemanticModel, expr: &Expr) -> bool {
+ model.resolve_call_path(expr).map_or(false, |call_path| {
+ call_path.as_slice() == ["functools", "lru_cache"]
+ || call_path.as_slice() == ["functools", "cache"]
+ })
}
/// B019
-pub fn cached_instance_method(checker: &mut Checker, decorator_list: &[Expr]) {
- if !matches!(checker.ctx.scope().kind, ScopeKind::Class(_)) {
+pub(crate) fn cached_instance_method(checker: &mut Checker, decorator_list: &[Expr]) {
+ if !checker.semantic_model().scope().kind.is_class() {
return;
}
for decorator in decorator_list {
// TODO(charlie): This should take into account `classmethod-decorators` and
// `staticmethod-decorators`.
- if let ExprKind::Name { id, .. } = &decorator.node {
+ if let Expr::Name(ast::ExprName { id, .. }) = decorator {
if id == "classmethod" || id == "staticmethod" {
return;
}
@@ -44,9 +41,9 @@ pub fn cached_instance_method(checker: &mut Checker, decorator_list: &[Expr]) {
}
for decorator in decorator_list {
if is_cache_func(
- checker,
- match &decorator.node {
- ExprKind::Call { func, .. } => func,
+ checker.semantic_model(),
+ match decorator {
+ Expr::Call(ast::ExprCall { func, .. }) => func,
_ => decorator,
},
) {
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/cannot_raise_literal.rs b/crates/ruff/src/rules/flake8_bugbear/rules/cannot_raise_literal.rs
index 9709649cb00e1..6b2df20d5b410 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/cannot_raise_literal.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/cannot_raise_literal.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, ExprKind};
+use rustpython_parser::ast::{Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -16,8 +16,8 @@ impl Violation for CannotRaiseLiteral {
}
/// B016
-pub fn cannot_raise_literal(checker: &mut Checker, expr: &Expr) {
- let ExprKind::Constant { .. } = &expr.node else {
+pub(crate) fn cannot_raise_literal(checker: &mut Checker, expr: &Expr) {
+ let Expr::Constant ( _) = expr else {
return;
};
checker
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/duplicate_exceptions.rs b/crates/ruff/src/rules/flake8_bugbear/rules/duplicate_exceptions.rs
index 3c6def1b6bb02..95eed78d56089 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/duplicate_exceptions.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/duplicate_exceptions.rs
@@ -1,21 +1,20 @@
use itertools::Itertools;
-use ruff_text_size::TextSize;
+use ruff_text_size::TextRange;
use rustc_hash::{FxHashMap, FxHashSet};
-use rustpython_parser::ast::{Excepthandler, ExcepthandlerKind, Expr, ExprContext, ExprKind};
+use rustpython_parser::ast::{self, Excepthandler, Expr, ExprContext, Ranged};
use ruff_diagnostics::{AlwaysAutofixableViolation, Violation};
-use ruff_diagnostics::{Diagnostic, Edit};
+use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::call_path;
use ruff_python_ast::call_path::CallPath;
-use ruff_python_ast::helpers::unparse_expr;
use crate::checkers::ast::Checker;
use crate::registry::{AsRule, Rule};
#[violation]
pub struct DuplicateTryBlockException {
- pub name: String,
+ name: String,
}
impl Violation for DuplicateTryBlockException {
@@ -49,14 +48,12 @@ impl AlwaysAutofixableViolation for DuplicateHandlerException {
}
fn type_pattern(elts: Vec<&Expr>) -> Expr {
- Expr::new(
- TextSize::default(),
- TextSize::default(),
- ExprKind::Tuple {
- elts: elts.into_iter().cloned().collect(),
- ctx: ExprContext::Load,
- },
- )
+ ast::ExprTuple {
+ elts: elts.into_iter().cloned().collect(),
+ ctx: ExprContext::Load,
+ range: TextRange::default(),
+ }
+ .into()
}
fn duplicate_handler_exceptions<'a>(
@@ -78,11 +75,7 @@ fn duplicate_handler_exceptions<'a>(
}
}
- if checker
- .settings
- .rules
- .enabled(Rule::DuplicateHandlerException)
- {
+ if checker.enabled(Rule::DuplicateHandlerException) {
// TODO(charlie): Handle "BaseException" and redundant exception aliases.
if !duplicates.is_empty() {
let mut diagnostic = Diagnostic::new(
@@ -96,14 +89,15 @@ fn duplicate_handler_exceptions<'a>(
expr.range(),
);
if checker.patch(diagnostic.kind.rule()) {
- diagnostic.set_fix(Edit::range_replacement(
+ #[allow(deprecated)]
+ diagnostic.set_fix(Fix::suggested(Edit::range_replacement(
if unique_elts.len() == 1 {
- unparse_expr(unique_elts[0], checker.stylist)
+ checker.generator().expr(unique_elts[0])
} else {
- unparse_expr(&type_pattern(unique_elts), checker.stylist)
+ checker.generator().expr(&type_pattern(unique_elts))
},
expr.range(),
- ));
+ )));
}
checker.diagnostics.push(diagnostic);
}
@@ -112,15 +106,15 @@ fn duplicate_handler_exceptions<'a>(
seen
}
-pub fn duplicate_exceptions(checker: &mut Checker, handlers: &[Excepthandler]) {
+pub(crate) fn duplicate_exceptions(checker: &mut Checker, handlers: &[Excepthandler]) {
let mut seen: FxHashSet = FxHashSet::default();
let mut duplicates: FxHashMap> = FxHashMap::default();
for handler in handlers {
- let ExcepthandlerKind::ExceptHandler { type_: Some(type_), .. } = &handler.node else {
+ let Excepthandler::ExceptHandler(ast::ExcepthandlerExceptHandler { type_: Some(type_), .. }) = handler else {
continue;
};
- match &type_.node {
- ExprKind::Attribute { .. } | ExprKind::Name { .. } => {
+ match type_.as_ref() {
+ Expr::Attribute(_) | Expr::Name(_) => {
if let Some(call_path) = call_path::collect_call_path(type_) {
if seen.contains(&call_path) {
duplicates.entry(call_path).or_default().push(type_);
@@ -129,7 +123,7 @@ pub fn duplicate_exceptions(checker: &mut Checker, handlers: &[Excepthandler]) {
}
}
}
- ExprKind::Tuple { elts, .. } => {
+ Expr::Tuple(ast::ExprTuple { elts, .. }) => {
for (name, expr) in duplicate_handler_exceptions(checker, type_, elts) {
if seen.contains(&name) {
duplicates.entry(name).or_default().push(expr);
@@ -142,11 +136,7 @@ pub fn duplicate_exceptions(checker: &mut Checker, handlers: &[Excepthandler]) {
}
}
- if checker
- .settings
- .rules
- .enabled(Rule::DuplicateTryBlockException)
- {
+ if checker.enabled(Rule::DuplicateTryBlockException) {
for (name, exprs) in duplicates {
for expr in exprs {
checker.diagnostics.push(Diagnostic::new(
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/duplicate_value.rs b/crates/ruff/src/rules/flake8_bugbear/rules/duplicate_value.rs
new file mode 100644
index 0000000000000..e66da4623c283
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/duplicate_value.rs
@@ -0,0 +1,57 @@
+use rustc_hash::FxHashSet;
+use rustpython_parser::ast::{self, Expr, Ranged};
+
+use ruff_diagnostics::{Diagnostic, Violation};
+use ruff_macros::{derive_message_formats, violation};
+use ruff_python_ast::comparable::ComparableExpr;
+
+use crate::checkers::ast::Checker;
+
+/// ## What it does
+/// Checks for set literals that contain duplicate items.
+///
+/// ## Why is this bad?
+/// In Python, sets are unordered collections of unique elements. Including a
+/// duplicate item in a set literal is redundant, as the duplicate item will be
+/// replaced with a single item at runtime.
+///
+/// ## Example
+/// ```python
+/// {1, 2, 3, 1}
+/// ```
+///
+/// Use instead:
+/// ```python
+/// {1, 2, 3}
+/// ```
+#[violation]
+pub struct DuplicateValue {
+ value: String,
+}
+
+impl Violation for DuplicateValue {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ let DuplicateValue { value } = self;
+ format!("Sets should not contain duplicate item `{value}`")
+ }
+}
+
+/// B033
+pub(crate) fn duplicate_value(checker: &mut Checker, elts: &Vec) {
+ let mut seen_values: FxHashSet = FxHashSet::default();
+ for elt in elts {
+ if let Expr::Constant(ast::ExprConstant { value, .. }) = elt {
+ let comparable_value: ComparableExpr = elt.into();
+
+ if !seen_values.insert(comparable_value) {
+ checker.diagnostics.push(Diagnostic::new(
+ DuplicateValue {
+ value: checker.generator().constant(value),
+ },
+ elt.range(),
+ ));
+ }
+ };
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/except_with_empty_tuple.rs b/crates/ruff/src/rules/flake8_bugbear/rules/except_with_empty_tuple.rs
index cf183851ecfe9..9ee4188a46ec2 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/except_with_empty_tuple.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/except_with_empty_tuple.rs
@@ -1,5 +1,5 @@
-use rustpython_parser::ast::Excepthandler;
-use rustpython_parser::ast::{ExcepthandlerKind, ExprKind};
+use rustpython_parser::ast::{self, Ranged};
+use rustpython_parser::ast::{Excepthandler, Expr};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -17,12 +17,12 @@ impl Violation for ExceptWithEmptyTuple {
}
/// B029
-pub fn except_with_empty_tuple(checker: &mut Checker, excepthandler: &Excepthandler) {
- let ExcepthandlerKind::ExceptHandler { type_, .. } = &excepthandler.node;
+pub(crate) fn except_with_empty_tuple(checker: &mut Checker, excepthandler: &Excepthandler) {
+ let Excepthandler::ExceptHandler(ast::ExcepthandlerExceptHandler { type_, .. }) = excepthandler;
let Some(type_) = type_ else {
return;
};
- let ExprKind::Tuple { elts, .. } = &type_.node else {
+ let Expr::Tuple(ast::ExprTuple { elts, .. }) = type_.as_ref() else {
return;
};
if elts.is_empty() {
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/except_with_non_exception_classes.rs b/crates/ruff/src/rules/flake8_bugbear/rules/except_with_non_exception_classes.rs
index dbdce82222720..4bfab07f94220 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/except_with_non_exception_classes.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/except_with_non_exception_classes.rs
@@ -1,6 +1,6 @@
use std::collections::VecDeque;
-use rustpython_parser::ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind};
+use rustpython_parser::ast::{self, Excepthandler, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -17,19 +17,20 @@ impl Violation for ExceptWithNonExceptionClasses {
}
}
-/// Given an [`Expr`], flatten any [`ExprKind::Starred`] expressions.
+/// Given an [`Expr`], flatten any [`Expr::Starred`] expressions.
/// This should leave any unstarred iterables alone (subsequently raising a
/// warning for B029).
fn flatten_starred_iterables(expr: &Expr) -> Vec<&Expr> {
- let ExprKind::Tuple { elts, .. } = &expr.node else {
+ let Expr::Tuple(ast::ExprTuple { elts, .. } )= expr else {
return vec![expr];
};
let mut flattened_exprs: Vec<&Expr> = Vec::with_capacity(elts.len());
let mut exprs_to_process: VecDeque<&Expr> = elts.iter().collect();
while let Some(expr) = exprs_to_process.pop_front() {
- match &expr.node {
- ExprKind::Starred { value, .. } => match &value.node {
- ExprKind::Tuple { elts, .. } | ExprKind::List { elts, .. } => {
+ match expr {
+ Expr::Starred(ast::ExprStarred { value, .. }) => match value.as_ref() {
+ Expr::Tuple(ast::ExprTuple { elts, .. })
+ | Expr::List(ast::ExprList { elts, .. }) => {
exprs_to_process.append(&mut elts.iter().collect());
}
_ => flattened_exprs.push(value),
@@ -41,18 +42,18 @@ fn flatten_starred_iterables(expr: &Expr) -> Vec<&Expr> {
}
/// B030
-pub fn except_with_non_exception_classes(checker: &mut Checker, excepthandler: &Excepthandler) {
- let ExcepthandlerKind::ExceptHandler { type_, .. } = &excepthandler.node;
+pub(crate) fn except_with_non_exception_classes(
+ checker: &mut Checker,
+ excepthandler: &Excepthandler,
+) {
+ let Excepthandler::ExceptHandler(ast::ExcepthandlerExceptHandler { type_, .. }) = excepthandler;
let Some(type_) = type_ else {
return;
};
for expr in flatten_starred_iterables(type_) {
if !matches!(
- &expr.node,
- ExprKind::Subscript { .. }
- | ExprKind::Attribute { .. }
- | ExprKind::Name { .. }
- | ExprKind::Call { .. },
+ expr,
+ Expr::Subscript(_) | Expr::Attribute(_) | Expr::Name(_) | Expr::Call(_),
) {
checker
.diagnostics
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/f_string_docstring.rs b/crates/ruff/src/rules/flake8_bugbear/rules/f_string_docstring.rs
index 423f7818ad089..5453b71569f7e 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/f_string_docstring.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/f_string_docstring.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{ExprKind, Stmt, StmtKind};
+use rustpython_parser::ast::{self, Expr, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -13,21 +13,20 @@ impl Violation for FStringDocstring {
#[derive_message_formats]
fn message(&self) -> String {
format!(
- "f-string used as docstring. This will be interpreted by python as a joined string \
- rather than a docstring."
+ "f-string used as docstring. Python will interpret this as a joined string, rather than a docstring."
)
}
}
/// B021
-pub fn f_string_docstring(checker: &mut Checker, body: &[Stmt]) {
+pub(crate) fn f_string_docstring(checker: &mut Checker, body: &[Stmt]) {
let Some(stmt) = body.first() else {
return;
};
- let StmtKind::Expr { value } = &stmt.node else {
+ let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt else {
return;
};
- let ExprKind::JoinedStr { .. } = value.node else {
+ let Expr::JoinedStr ( _) = value.as_ref() else {
return;
};
checker.diagnostics.push(Diagnostic::new(
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/function_call_argument_default.rs b/crates/ruff/src/rules/flake8_bugbear/rules/function_call_argument_default.rs
index 4d775e60f247e..a36b571c12fad 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/function_call_argument_default.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/function_call_argument_default.rs
@@ -1,14 +1,14 @@
use ruff_text_size::TextRange;
-use rustpython_parser::ast::{Arguments, Constant, Expr, ExprKind};
+use rustpython_parser::ast::{self, Arguments, Expr, Ranged};
use ruff_diagnostics::Violation;
use ruff_diagnostics::{Diagnostic, DiagnosticKind};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::call_path::from_qualified_name;
-use ruff_python_ast::call_path::{compose_call_path, CallPath};
+use ruff_python_ast::call_path::{compose_call_path, from_qualified_name, CallPath};
use ruff_python_ast::visitor;
use ruff_python_ast::visitor::Visitor;
use ruff_python_semantic::analyze::typing::is_immutable_func;
+use ruff_python_semantic::model::SemanticModel;
use crate::checkers::ast::Checker;
use crate::rules::flake8_bugbear::rules::mutable_argument_default::is_mutable_func;
@@ -16,7 +16,7 @@ use crate::rules::flake8_bugbear::rules::mutable_argument_default::is_mutable_fu
/// ## What it does
/// Checks for function calls in default function arguments.
///
-/// ## Why is it bad?
+/// ## Why is this bad?
/// Any function call that's used in a default argument will only be performed
/// once, at definition time. The returned value will then be reused by all
/// calls to the function, which can lead to unexpected behaviour.
@@ -29,6 +29,7 @@ use crate::rules::flake8_bugbear::rules::mutable_argument_default::is_mutable_fu
/// def create_list() -> list[int]:
/// return [1, 2, 3]
///
+///
/// def mutable_default(arg: list[int] = create_list()) -> list[int]:
/// arg.append(4)
/// return arg
@@ -49,6 +50,7 @@ use crate::rules::flake8_bugbear::rules::mutable_argument_default::is_mutable_fu
/// ```python
/// I_KNOW_THIS_IS_SHARED_STATE = create_list()
///
+///
/// def mutable_default(arg: list[int] = I_KNOW_THIS_IS_SHARED_STATE) -> list[int]:
/// arg.append(4)
/// return arg
@@ -71,9 +73,19 @@ impl Violation for FunctionCallInDefaultArgument {
}
struct ArgumentDefaultVisitor<'a> {
- checker: &'a Checker<'a>,
- diagnostics: Vec<(DiagnosticKind, TextRange)>,
+ model: &'a SemanticModel<'a>,
extend_immutable_calls: Vec>,
+ diagnostics: Vec<(DiagnosticKind, TextRange)>,
+}
+
+impl<'a> ArgumentDefaultVisitor<'a> {
+ fn new(model: &'a SemanticModel<'a>, extend_immutable_calls: Vec>) -> Self {
+ Self {
+ model,
+ extend_immutable_calls,
+ diagnostics: Vec::new(),
+ }
+ }
}
impl<'a, 'b> Visitor<'b> for ArgumentDefaultVisitor<'b>
@@ -81,11 +93,10 @@ where
'b: 'a,
{
fn visit_expr(&mut self, expr: &'b Expr) {
- match &expr.node {
- ExprKind::Call { func, args, .. } => {
- if !is_mutable_func(self.checker, func)
- && !is_immutable_func(&self.checker.ctx, func, &self.extend_immutable_calls)
- && !is_nan_or_infinity(func, args)
+ match expr {
+ Expr::Call(ast::ExprCall { func, .. }) => {
+ if !is_mutable_func(self.model, func)
+ && !is_immutable_func(self.model, func, &self.extend_immutable_calls)
{
self.diagnostics.push((
FunctionCallInDefaultArgument {
@@ -97,37 +108,14 @@ where
}
visitor::walk_expr(self, expr);
}
- ExprKind::Lambda { .. } => {}
+ Expr::Lambda(_) => {}
_ => visitor::walk_expr(self, expr),
}
}
}
-fn is_nan_or_infinity(expr: &Expr, args: &[Expr]) -> bool {
- let ExprKind::Name { id, .. } = &expr.node else {
- return false;
- };
- if id != "float" {
- return false;
- }
- let Some(arg) = args.first() else {
- return false;
- };
- let ExprKind::Constant {
- value: Constant::Str(value),
- ..
- } = &arg.node else {
- return false;
- };
- let lowercased = value.to_lowercase();
- matches!(
- lowercased.as_str(),
- "nan" | "+nan" | "-nan" | "inf" | "+inf" | "-inf" | "infinity" | "+infinity" | "-infinity"
- )
-}
-
/// B008
-pub fn function_call_argument_default(checker: &mut Checker, arguments: &Arguments) {
+pub(crate) fn function_call_argument_default(checker: &mut Checker, arguments: &Arguments) {
// Map immutable calls to (module, member) format.
let extend_immutable_calls: Vec = checker
.settings
@@ -137,11 +125,8 @@ pub fn function_call_argument_default(checker: &mut Checker, arguments: &Argumen
.map(|target| from_qualified_name(target))
.collect();
let diagnostics = {
- let mut visitor = ArgumentDefaultVisitor {
- checker,
- diagnostics: vec![],
- extend_immutable_calls,
- };
+ let mut visitor =
+ ArgumentDefaultVisitor::new(checker.semantic_model(), extend_immutable_calls);
for expr in arguments
.defaults
.iter()
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/function_uses_loop_variable.rs b/crates/ruff/src/rules/flake8_bugbear/rules/function_uses_loop_variable.rs
index 1651bf2ab9732..d6d061534fe39 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/function_uses_loop_variable.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/function_uses_loop_variable.rs
@@ -1,6 +1,5 @@
-use ruff_text_size::TextRange;
use rustc_hash::FxHashSet;
-use rustpython_parser::ast::{Comprehension, Expr, ExprContext, ExprKind, Stmt, StmtKind};
+use rustpython_parser::ast::{self, Comprehension, Expr, ExprContext, Ranged, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -13,7 +12,7 @@ use crate::checkers::ast::Checker;
#[violation]
pub struct FunctionUsesLoopVariable {
- pub name: String,
+ name: String,
}
impl Violation for FunctionUsesLoopVariable {
@@ -27,18 +26,18 @@ impl Violation for FunctionUsesLoopVariable {
#[derive(Default)]
struct LoadedNamesVisitor<'a> {
// Tuple of: name, defining expression, and defining range.
- loaded: Vec<(&'a str, &'a Expr, TextRange)>,
+ loaded: Vec<(&'a str, &'a Expr)>,
// Tuple of: name, defining expression, and defining range.
- stored: Vec<(&'a str, &'a Expr, TextRange)>,
+ stored: Vec<(&'a str, &'a Expr)>,
}
/// `Visitor` to collect all used identifiers in a statement.
impl<'a> Visitor<'a> for LoadedNamesVisitor<'a> {
fn visit_expr(&mut self, expr: &'a Expr) {
- match &expr.node {
- ExprKind::Name { id, ctx } => match ctx {
- ExprContext::Load => self.loaded.push((id, expr, expr.range())),
- ExprContext::Store => self.stored.push((id, expr, expr.range())),
+ match expr {
+ Expr::Name(ast::ExprName { id, ctx, range: _ }) => match ctx {
+ ExprContext::Load => self.loaded.push((id, expr)),
+ ExprContext::Store => self.stored.push((id, expr)),
ExprContext::Del => {}
},
_ => visitor::walk_expr(self, expr),
@@ -48,7 +47,7 @@ impl<'a> Visitor<'a> for LoadedNamesVisitor<'a> {
#[derive(Default)]
struct SuspiciousVariablesVisitor<'a> {
- names: Vec<(&'a str, &'a Expr, TextRange)>,
+ names: Vec<(&'a str, &'a Expr)>,
safe_functions: Vec<&'a Expr>,
}
@@ -56,9 +55,9 @@ struct SuspiciousVariablesVisitor<'a> {
/// functions, but not bound as arguments).
impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
- match &stmt.node {
- StmtKind::FunctionDef { args, body, .. }
- | StmtKind::AsyncFunctionDef { args, body, .. } => {
+ match stmt {
+ Stmt::FunctionDef(ast::StmtFunctionDef { args, body, .. })
+ | Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef { args, body, .. }) => {
// Collect all loaded variable names.
let mut visitor = LoadedNamesVisitor::default();
visitor.visit_body(body);
@@ -76,9 +75,12 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
);
return;
}
- StmtKind::Return { value: Some(value) } => {
+ Stmt::Return(ast::StmtReturn {
+ value: Some(value),
+ range: _,
+ }) => {
// Mark `return lambda: x` as safe.
- if matches!(value.node, ExprKind::Lambda { .. }) {
+ if value.is_lambda_expr() {
self.safe_functions.push(value);
}
}
@@ -88,43 +90,53 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
}
fn visit_expr(&mut self, expr: &'a Expr) {
- match &expr.node {
- ExprKind::Call {
+ match expr {
+ Expr::Call(ast::ExprCall {
func,
args,
keywords,
- } => {
- if let ExprKind::Name { id, .. } = &func.node {
- if id == "filter" || id == "reduce" || id == "map" {
- for arg in args {
- if matches!(arg.node, ExprKind::Lambda { .. }) {
- self.safe_functions.push(arg);
+ range: _,
+ }) => {
+ match func.as_ref() {
+ Expr::Name(ast::ExprName { id, .. }) => {
+ let id = id.as_str();
+ if id == "filter" || id == "reduce" || id == "map" {
+ for arg in args {
+ if matches!(arg, Expr::Lambda(_)) {
+ self.safe_functions.push(arg);
+ }
}
}
}
- }
- if let ExprKind::Attribute { value, attr, .. } = &func.node {
- if attr == "reduce" {
- if let ExprKind::Name { id, .. } = &value.node {
- if id == "functools" {
- for arg in args {
- if matches!(arg.node, ExprKind::Lambda { .. }) {
- self.safe_functions.push(arg);
+ Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
+ if attr == "reduce" {
+ if let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() {
+ if id == "functools" {
+ for arg in args {
+ if arg.is_lambda_expr() {
+ self.safe_functions.push(arg);
+ }
}
}
}
}
}
+ _ => {}
}
+
for keyword in keywords {
- if keyword.node.arg.as_ref().map_or(false, |arg| arg == "key")
- && matches!(keyword.node.value.node, ExprKind::Lambda { .. })
+ if keyword.arg.as_ref().map_or(false, |arg| arg == "key")
+ && matches!(keyword.value, Expr::Lambda(_))
{
- self.safe_functions.push(&keyword.node.value);
+ self.safe_functions.push(&keyword.value);
}
}
}
- ExprKind::Lambda { args, body } => {
+ Expr::Lambda(ast::ExprLambda {
+ args,
+ body,
+ range: _,
+ }) => {
if !self.safe_functions.contains(&expr) {
// Collect all loaded variable names.
let mut visitor = LoadedNamesVisitor::default();
@@ -159,14 +171,14 @@ struct NamesFromAssignmentsVisitor<'a> {
/// `Visitor` to collect all names used in an assignment expression.
impl<'a> Visitor<'a> for NamesFromAssignmentsVisitor<'a> {
fn visit_expr(&mut self, expr: &'a Expr) {
- match &expr.node {
- ExprKind::Name { id, .. } => {
+ match expr {
+ Expr::Name(ast::ExprName { id, .. }) => {
self.names.insert(id.as_str());
}
- ExprKind::Starred { value, .. } => {
+ Expr::Starred(ast::ExprStarred { value, .. }) => {
self.visit_expr(value);
}
- ExprKind::List { elts, .. } | ExprKind::Tuple { elts, .. } => {
+ Expr::List(ast::ExprList { elts, .. }) | Expr::Tuple(ast::ExprTuple { elts, .. }) => {
for expr in elts {
self.visit_expr(expr);
}
@@ -184,26 +196,23 @@ struct AssignedNamesVisitor<'a> {
/// `Visitor` to collect all used identifiers in a statement.
impl<'a> Visitor<'a> for AssignedNamesVisitor<'a> {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
- if matches!(
- &stmt.node,
- StmtKind::FunctionDef { .. } | StmtKind::AsyncFunctionDef { .. }
- ) {
+ if matches!(stmt, Stmt::FunctionDef(_) | Stmt::AsyncFunctionDef(_)) {
// Don't recurse.
return;
}
- match &stmt.node {
- StmtKind::Assign { targets, .. } => {
+ match stmt {
+ Stmt::Assign(ast::StmtAssign { targets, .. }) => {
let mut visitor = NamesFromAssignmentsVisitor::default();
for expr in targets {
visitor.visit_expr(expr);
}
self.names.extend(visitor.names);
}
- StmtKind::AugAssign { target, .. }
- | StmtKind::AnnAssign { target, .. }
- | StmtKind::For { target, .. }
- | StmtKind::AsyncFor { target, .. } => {
+ Stmt::AugAssign(ast::StmtAugAssign { target, .. })
+ | Stmt::AnnAssign(ast::StmtAnnAssign { target, .. })
+ | Stmt::For(ast::StmtFor { target, .. })
+ | Stmt::AsyncFor(ast::StmtAsyncFor { target, .. }) => {
let mut visitor = NamesFromAssignmentsVisitor::default();
visitor.visit_expr(target);
self.names.extend(visitor.names);
@@ -215,7 +224,7 @@ impl<'a> Visitor<'a> for AssignedNamesVisitor<'a> {
}
fn visit_expr(&mut self, expr: &'a Expr) {
- if matches!(&expr.node, ExprKind::Lambda { .. }) {
+ if matches!(expr, Expr::Lambda(_)) {
// Don't recurse.
return;
}
@@ -233,7 +242,7 @@ impl<'a> Visitor<'a> for AssignedNamesVisitor<'a> {
}
/// B023
-pub fn function_uses_loop_variable<'a>(checker: &mut Checker<'a>, node: &Node<'a>) {
+pub(crate) fn function_uses_loop_variable<'a>(checker: &mut Checker<'a>, node: &Node<'a>) {
// Identify any "suspicious" variables. These are defined as variables that are
// referenced in a function or lambda body, but aren't bound as arguments.
let suspicious_variables = {
@@ -258,7 +267,7 @@ pub fn function_uses_loop_variable<'a>(checker: &mut Checker<'a>, node: &Node<'a
// If a variable was used in a function or lambda body, and assigned in the
// loop, flag it.
- for (name, expr, range) in suspicious_variables {
+ for (name, expr) in suspicious_variables {
if reassigned_in_loop.contains(name) {
if !checker.flake8_bugbear_seen.contains(&expr) {
checker.flake8_bugbear_seen.push(expr);
@@ -266,7 +275,7 @@ pub fn function_uses_loop_variable<'a>(checker: &mut Checker<'a>, node: &Node<'a
FunctionUsesLoopVariable {
name: name.to_string(),
},
- range,
+ expr.range(),
));
}
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/getattr_with_constant.rs b/crates/ruff/src/rules/flake8_bugbear/rules/getattr_with_constant.rs
index 22019ae69f0f8..ff57ce600fa3c 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/getattr_with_constant.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/getattr_with_constant.rs
@@ -1,9 +1,8 @@
-use ruff_text_size::TextSize;
-use rustpython_parser::ast::{Constant, Expr, ExprContext, ExprKind};
+use ruff_text_size::TextRange;
+use rustpython_parser::ast::{self, Constant, Expr, ExprContext, Ranged};
-use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
+use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::helpers::unparse_expr;
use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private};
use crate::checkers::ast::Checker;
@@ -26,20 +25,23 @@ impl AlwaysAutofixableViolation for GetAttrWithConstant {
}
}
fn attribute(value: &Expr, attr: &str) -> Expr {
- Expr::new(
- TextSize::default(),
- TextSize::default(),
- ExprKind::Attribute {
- value: Box::new(value.clone()),
- attr: attr.to_string(),
- ctx: ExprContext::Load,
- },
- )
+ ast::ExprAttribute {
+ value: Box::new(value.clone()),
+ attr: attr.into(),
+ ctx: ExprContext::Load,
+ range: TextRange::default(),
+ }
+ .into()
}
/// B009
-pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
- let ExprKind::Name { id, .. } = &func.node else {
+pub(crate) fn getattr_with_constant(
+ checker: &mut Checker,
+ expr: &Expr,
+ func: &Expr,
+ args: &[Expr],
+) {
+ let Expr::Name(ast::ExprName { id, .. } )= func else {
return;
};
if id != "getattr" {
@@ -48,10 +50,10 @@ pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
let [obj, arg] = args else {
return;
};
- let ExprKind::Constant {
+ let Expr::Constant(ast::ExprConstant {
value: Constant::Str(value),
..
- } = &arg.node else {
+ } )= arg else {
return;
};
if !is_identifier(value) {
@@ -62,12 +64,11 @@ pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
}
let mut diagnostic = Diagnostic::new(GetAttrWithConstant, expr.range());
-
if checker.patch(diagnostic.kind.rule()) {
- diagnostic.set_fix(Edit::range_replacement(
- unparse_expr(&attribute(obj, value), checker.stylist),
+ diagnostic.set_fix(Fix::suggested(Edit::range_replacement(
+ checker.generator().expr(&attribute(obj, value)),
expr.range(),
- ));
+ )));
}
checker.diagnostics.push(diagnostic);
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/jump_statement_in_finally.rs b/crates/ruff/src/rules/flake8_bugbear/rules/jump_statement_in_finally.rs
index 1ce2d401cd467..df656cafedf31 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/jump_statement_in_finally.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/jump_statement_in_finally.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Stmt, StmtKind};
+use rustpython_parser::ast::{self, Ranged, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -7,7 +7,7 @@ use crate::checkers::ast::Checker;
#[violation]
pub struct JumpStatementInFinally {
- pub name: String,
+ name: String,
}
impl Violation for JumpStatementInFinally {
@@ -23,34 +23,31 @@ fn walk_stmt(checker: &mut Checker, body: &[Stmt], f: fn(&Stmt) -> bool) {
if f(stmt) {
checker.diagnostics.push(Diagnostic::new(
JumpStatementInFinally {
- name: match &stmt.node {
- StmtKind::Break { .. } => "break".to_string(),
- StmtKind::Continue { .. } => "continue".to_string(),
- StmtKind::Return { .. } => "return".to_string(),
- _ => unreachable!(
- "Expected StmtKind::Break | StmtKind::Continue | StmtKind::Return"
- ),
- },
+ name: match stmt {
+ Stmt::Break(_) => "break",
+ Stmt::Continue(_) => "continue",
+ Stmt::Return(_) => "return",
+ _ => unreachable!("Expected Stmt::Break | Stmt::Continue | Stmt::Return"),
+ }
+ .to_owned(),
},
stmt.range(),
));
}
- match &stmt.node {
- StmtKind::While { body, .. }
- | StmtKind::For { body, .. }
- | StmtKind::AsyncFor { body, .. } => {
- walk_stmt(checker, body, |stmt| {
- matches!(stmt.node, StmtKind::Return { .. })
- });
+ match stmt {
+ Stmt::While(ast::StmtWhile { body, .. })
+ | Stmt::For(ast::StmtFor { body, .. })
+ | Stmt::AsyncFor(ast::StmtAsyncFor { body, .. }) => {
+ walk_stmt(checker, body, Stmt::is_return_stmt);
}
- StmtKind::If { body, .. }
- | StmtKind::Try { body, .. }
- | StmtKind::TryStar { body, .. }
- | StmtKind::With { body, .. }
- | StmtKind::AsyncWith { body, .. } => {
+ Stmt::If(ast::StmtIf { body, .. })
+ | Stmt::Try(ast::StmtTry { body, .. })
+ | Stmt::TryStar(ast::StmtTryStar { body, .. })
+ | Stmt::With(ast::StmtWith { body, .. })
+ | Stmt::AsyncWith(ast::StmtAsyncWith { body, .. }) => {
walk_stmt(checker, body, f);
}
- StmtKind::Match { cases, .. } => {
+ Stmt::Match(ast::StmtMatch { cases, .. }) => {
for case in cases {
walk_stmt(checker, &case.body, f);
}
@@ -61,11 +58,8 @@ fn walk_stmt(checker: &mut Checker, body: &[Stmt], f: fn(&Stmt) -> bool) {
}
/// B012
-pub fn jump_statement_in_finally(checker: &mut Checker, finalbody: &[Stmt]) {
+pub(crate) fn jump_statement_in_finally(checker: &mut Checker, finalbody: &[Stmt]) {
walk_stmt(checker, finalbody, |stmt| {
- matches!(
- stmt.node,
- StmtKind::Break | StmtKind::Continue | StmtKind::Return { .. }
- )
+ matches!(stmt, Stmt::Break(_) | Stmt::Continue(_) | Stmt::Return(_))
});
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/loop_variable_overrides_iterator.rs b/crates/ruff/src/rules/flake8_bugbear/rules/loop_variable_overrides_iterator.rs
index 493b1afb83672..f8d37590dd2ac 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/loop_variable_overrides_iterator.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/loop_variable_overrides_iterator.rs
@@ -1,5 +1,5 @@
use rustc_hash::FxHashMap;
-use rustpython_parser::ast::{Expr, ExprKind};
+use rustpython_parser::ast::{self, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -10,7 +10,7 @@ use crate::checkers::ast::Checker;
#[violation]
pub struct LoopVariableOverridesIterator {
- pub name: String,
+ name: String,
}
impl Violation for LoopVariableOverridesIterator {
@@ -31,22 +31,26 @@ where
'b: 'a,
{
fn visit_expr(&mut self, expr: &'b Expr) {
- match &expr.node {
- ExprKind::Name { id, .. } => {
+ match expr {
+ Expr::Name(ast::ExprName { id, .. }) => {
self.names.insert(id, expr);
}
- ExprKind::ListComp { generators, .. }
- | ExprKind::DictComp { generators, .. }
- | ExprKind::SetComp { generators, .. }
- | ExprKind::GeneratorExp { generators, .. } => {
+ Expr::ListComp(ast::ExprListComp { generators, .. })
+ | Expr::DictComp(ast::ExprDictComp { generators, .. })
+ | Expr::SetComp(ast::ExprSetComp { generators, .. })
+ | Expr::GeneratorExp(ast::ExprGeneratorExp { generators, .. }) => {
for comp in generators {
self.visit_expr(&comp.iter);
}
}
- ExprKind::Lambda { args, body } => {
+ Expr::Lambda(ast::ExprLambda {
+ args,
+ body,
+ range: _,
+ }) => {
visitor::walk_expr(self, body);
for arg in &args.args {
- self.names.remove(arg.node.arg.as_str());
+ self.names.remove(arg.arg.as_str());
}
}
_ => visitor::walk_expr(self, expr),
@@ -55,7 +59,7 @@ where
}
/// B020
-pub fn loop_variable_overrides_iterator(checker: &mut Checker, target: &Expr, iter: &Expr) {
+pub(crate) fn loop_variable_overrides_iterator(checker: &mut Checker, target: &Expr, iter: &Expr) {
let target_names = {
let mut target_finder = NameFinder::default();
target_finder.visit_expr(target);
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/mod.rs b/crates/ruff/src/rules/flake8_bugbear/rules/mod.rs
index 86684d7e95291..f350160a57944 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/mod.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/mod.rs
@@ -1,53 +1,64 @@
-pub use abstract_base_class::{
+pub(crate) use abstract_base_class::{
abstract_base_class, AbstractBaseClassWithoutAbstractMethod,
EmptyMethodWithoutAbstractDecorator,
};
-pub use assert_false::{assert_false, AssertFalse};
-pub use assert_raises_exception::{assert_raises_exception, AssertRaisesException};
-pub use assignment_to_os_environ::{assignment_to_os_environ, AssignmentToOsEnviron};
-pub use cached_instance_method::{cached_instance_method, CachedInstanceMethod};
-pub use cannot_raise_literal::{cannot_raise_literal, CannotRaiseLiteral};
-pub use duplicate_exceptions::{
+pub(crate) use assert_false::{assert_false, AssertFalse};
+pub(crate) use assert_raises_exception::{assert_raises_exception, AssertRaisesException};
+pub(crate) use assignment_to_os_environ::{assignment_to_os_environ, AssignmentToOsEnviron};
+pub(crate) use cached_instance_method::{cached_instance_method, CachedInstanceMethod};
+pub(crate) use cannot_raise_literal::{cannot_raise_literal, CannotRaiseLiteral};
+pub(crate) use duplicate_exceptions::{
duplicate_exceptions, DuplicateHandlerException, DuplicateTryBlockException,
};
-pub use except_with_empty_tuple::{except_with_empty_tuple, ExceptWithEmptyTuple};
-pub use except_with_non_exception_classes::{
+pub(crate) use duplicate_value::{duplicate_value, DuplicateValue};
+pub(crate) use except_with_empty_tuple::{except_with_empty_tuple, ExceptWithEmptyTuple};
+pub(crate) use except_with_non_exception_classes::{
except_with_non_exception_classes, ExceptWithNonExceptionClasses,
};
-pub use f_string_docstring::{f_string_docstring, FStringDocstring};
-pub use function_call_argument_default::{
+pub(crate) use f_string_docstring::{f_string_docstring, FStringDocstring};
+pub(crate) use function_call_argument_default::{
function_call_argument_default, FunctionCallInDefaultArgument,
};
-pub use function_uses_loop_variable::{function_uses_loop_variable, FunctionUsesLoopVariable};
-pub use getattr_with_constant::{getattr_with_constant, GetAttrWithConstant};
-pub use jump_statement_in_finally::{jump_statement_in_finally, JumpStatementInFinally};
-pub use loop_variable_overrides_iterator::{
+pub(crate) use function_uses_loop_variable::{
+ function_uses_loop_variable, FunctionUsesLoopVariable,
+};
+pub(crate) use getattr_with_constant::{getattr_with_constant, GetAttrWithConstant};
+pub(crate) use jump_statement_in_finally::{jump_statement_in_finally, JumpStatementInFinally};
+pub(crate) use loop_variable_overrides_iterator::{
loop_variable_overrides_iterator, LoopVariableOverridesIterator,
};
-pub use mutable_argument_default::{mutable_argument_default, MutableArgumentDefault};
-pub use no_explicit_stacklevel::{no_explicit_stacklevel, NoExplicitStacklevel};
-pub use raise_without_from_inside_except::{
+pub(crate) use mutable_argument_default::{mutable_argument_default, MutableArgumentDefault};
+pub(crate) use no_explicit_stacklevel::{no_explicit_stacklevel, NoExplicitStacklevel};
+pub(crate) use raise_without_from_inside_except::{
raise_without_from_inside_except, RaiseWithoutFromInsideExcept,
};
-pub use redundant_tuple_in_exception_handler::{
+pub(crate) use redundant_tuple_in_exception_handler::{
redundant_tuple_in_exception_handler, RedundantTupleInExceptionHandler,
};
-pub use reuse_of_groupby_generator::{reuse_of_groupby_generator, ReuseOfGroupbyGenerator};
-pub use setattr_with_constant::{setattr_with_constant, SetAttrWithConstant};
-pub use star_arg_unpacking_after_keyword_arg::{
+pub(crate) use reuse_of_groupby_generator::{reuse_of_groupby_generator, ReuseOfGroupbyGenerator};
+pub(crate) use setattr_with_constant::{setattr_with_constant, SetAttrWithConstant};
+pub(crate) use star_arg_unpacking_after_keyword_arg::{
star_arg_unpacking_after_keyword_arg, StarArgUnpackingAfterKeywordArg,
};
-pub use strip_with_multi_characters::{strip_with_multi_characters, StripWithMultiCharacters};
-pub use unary_prefix_increment::{unary_prefix_increment, UnaryPrefixIncrement};
-pub use unintentional_type_annotation::{
+pub(crate) use strip_with_multi_characters::{
+ strip_with_multi_characters, StripWithMultiCharacters,
+};
+pub(crate) use unary_prefix_increment::{unary_prefix_increment, UnaryPrefixIncrement};
+pub(crate) use unintentional_type_annotation::{
unintentional_type_annotation, UnintentionalTypeAnnotation,
};
-pub use unreliable_callable_check::{unreliable_callable_check, UnreliableCallableCheck};
-pub use unused_loop_control_variable::{unused_loop_control_variable, UnusedLoopControlVariable};
-pub use useless_comparison::{useless_comparison, UselessComparison};
-pub use useless_contextlib_suppress::{useless_contextlib_suppress, UselessContextlibSuppress};
-pub use useless_expression::{useless_expression, UselessExpression};
-pub use zip_without_explicit_strict::{zip_without_explicit_strict, ZipWithoutExplicitStrict};
+pub(crate) use unreliable_callable_check::{unreliable_callable_check, UnreliableCallableCheck};
+pub(crate) use unused_loop_control_variable::{
+ unused_loop_control_variable, UnusedLoopControlVariable,
+};
+pub(crate) use useless_comparison::{useless_comparison, UselessComparison};
+pub(crate) use useless_contextlib_suppress::{
+ useless_contextlib_suppress, UselessContextlibSuppress,
+};
+pub(crate) use useless_expression::{useless_expression, UselessExpression};
+pub(crate) use zip_without_explicit_strict::{
+ zip_without_explicit_strict, ZipWithoutExplicitStrict,
+};
mod abstract_base_class;
mod assert_false;
@@ -56,6 +67,7 @@ mod assignment_to_os_environ;
mod cached_instance_method;
mod cannot_raise_literal;
mod duplicate_exceptions;
+mod duplicate_value;
mod except_with_empty_tuple;
mod except_with_non_exception_classes;
mod f_string_docstring;
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/mutable_argument_default.rs b/crates/ruff/src/rules/flake8_bugbear/rules/mutable_argument_default.rs
index 600468c80488d..19698f8120e5e 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/mutable_argument_default.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/mutable_argument_default.rs
@@ -1,8 +1,9 @@
-use rustpython_parser::ast::{Arguments, Expr, ExprKind};
+use rustpython_parser::ast::{self, Arguments, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_semantic::analyze::typing::is_immutable_annotation;
+use ruff_python_semantic::model::SemanticModel;
use crate::checkers::ast::Checker;
@@ -25,32 +26,29 @@ const MUTABLE_FUNCS: &[&[&str]] = &[
&["collections", "deque"],
];
-pub fn is_mutable_func(checker: &Checker, func: &Expr) -> bool {
- checker
- .ctx
- .resolve_call_path(func)
- .map_or(false, |call_path| {
- MUTABLE_FUNCS
- .iter()
- .any(|target| call_path.as_slice() == *target)
- })
+pub(crate) fn is_mutable_func(model: &SemanticModel, func: &Expr) -> bool {
+ model.resolve_call_path(func).map_or(false, |call_path| {
+ MUTABLE_FUNCS
+ .iter()
+ .any(|target| call_path.as_slice() == *target)
+ })
}
-fn is_mutable_expr(checker: &Checker, expr: &Expr) -> bool {
- match &expr.node {
- ExprKind::List { .. }
- | ExprKind::Dict { .. }
- | ExprKind::Set { .. }
- | ExprKind::ListComp { .. }
- | ExprKind::DictComp { .. }
- | ExprKind::SetComp { .. } => true,
- ExprKind::Call { func, .. } => is_mutable_func(checker, func),
+fn is_mutable_expr(model: &SemanticModel, expr: &Expr) -> bool {
+ match expr {
+ Expr::List(_)
+ | Expr::Dict(_)
+ | Expr::Set(_)
+ | Expr::ListComp(_)
+ | Expr::DictComp(_)
+ | Expr::SetComp(_) => true,
+ Expr::Call(ast::ExprCall { func, .. }) => is_mutable_func(model, func),
_ => false,
}
}
/// B006
-pub fn mutable_argument_default(checker: &mut Checker, arguments: &Arguments) {
+pub(crate) fn mutable_argument_default(checker: &mut Checker, arguments: &Arguments) {
// Scan in reverse order to right-align zip().
for (arg, default) in arguments
.kwonlyargs
@@ -66,12 +64,10 @@ pub fn mutable_argument_default(checker: &mut Checker, arguments: &Arguments) {
.zip(arguments.defaults.iter().rev()),
)
{
- if is_mutable_expr(checker, default)
- && !arg
- .node
- .annotation
- .as_ref()
- .map_or(false, |expr| is_immutable_annotation(&checker.ctx, expr))
+ if is_mutable_expr(checker.semantic_model(), default)
+ && !arg.annotation.as_ref().map_or(false, |expr| {
+ is_immutable_annotation(checker.semantic_model(), expr)
+ })
{
checker
.diagnostics
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/no_explicit_stacklevel.rs b/crates/ruff/src/rules/flake8_bugbear/rules/no_explicit_stacklevel.rs
index 266bb7ef454e0..a7d20c2801f15 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/no_explicit_stacklevel.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/no_explicit_stacklevel.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, Keyword};
+use rustpython_parser::ast::{Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -38,14 +38,14 @@ impl Violation for NoExplicitStacklevel {
}
/// B028
-pub fn no_explicit_stacklevel(
+pub(crate) fn no_explicit_stacklevel(
checker: &mut Checker,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
) {
if !checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["warnings", "warn"]
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/raise_without_from_inside_except.rs b/crates/ruff/src/rules/flake8_bugbear/rules/raise_without_from_inside_except.rs
index 60ff0b6b99ed4..ccb0d84c553da 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/raise_without_from_inside_except.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/raise_without_from_inside_except.rs
@@ -1,9 +1,9 @@
-use rustpython_parser::ast::{ExprKind, Stmt};
+use rustpython_parser::ast::{self, Expr, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::RaiseStatementVisitor;
-use ruff_python_ast::visitor;
+use ruff_python_ast::statement_visitor::StatementVisitor;
use ruff_python_stdlib::str::is_lower;
use crate::checkers::ast::Checker;
@@ -22,18 +22,18 @@ impl Violation for RaiseWithoutFromInsideExcept {
}
/// B904
-pub fn raise_without_from_inside_except(checker: &mut Checker, body: &[Stmt]) {
+pub(crate) fn raise_without_from_inside_except(checker: &mut Checker, body: &[Stmt]) {
let raises = {
let mut visitor = RaiseStatementVisitor::default();
- visitor::walk_body(&mut visitor, body);
+ visitor.visit_body(body);
visitor.raises
};
for (range, exc, cause) in raises {
if cause.is_none() {
if let Some(exc) = exc {
- match &exc.node {
- ExprKind::Name { id, .. } if is_lower(id) => {}
+ match exc {
+ Expr::Name(ast::ExprName { id, .. }) if is_lower(id) => {}
_ => {
checker
.diagnostics
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/redundant_tuple_in_exception_handler.rs b/crates/ruff/src/rules/flake8_bugbear/rules/redundant_tuple_in_exception_handler.rs
index 21412249a5f3b..9b0bbe6f29f45 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/redundant_tuple_in_exception_handler.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/redundant_tuple_in_exception_handler.rs
@@ -1,15 +1,14 @@
-use rustpython_parser::ast::{Excepthandler, ExcepthandlerKind, ExprKind};
+use rustpython_parser::ast::{self, Excepthandler, Expr, Ranged};
-use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
+use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::helpers::unparse_expr;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
#[violation]
pub struct RedundantTupleInExceptionHandler {
- pub name: String,
+ name: String,
}
impl AlwaysAutofixableViolation for RedundantTupleInExceptionHandler {
@@ -29,12 +28,15 @@ impl AlwaysAutofixableViolation for RedundantTupleInExceptionHandler {
}
/// B013
-pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[Excepthandler]) {
+pub(crate) fn redundant_tuple_in_exception_handler(
+ checker: &mut Checker,
+ handlers: &[Excepthandler],
+) {
for handler in handlers {
- let ExcepthandlerKind::ExceptHandler { type_: Some(type_), .. } = &handler.node else {
+ let Excepthandler::ExceptHandler(ast::ExcepthandlerExceptHandler { type_: Some(type_), .. }) = handler else {
continue;
};
- let ExprKind::Tuple { elts, .. } = &type_.node else {
+ let Expr::Tuple(ast::ExprTuple { elts, .. }) = type_.as_ref() else {
continue;
};
let [elt] = &elts[..] else {
@@ -42,15 +44,16 @@ pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[E
};
let mut diagnostic = Diagnostic::new(
RedundantTupleInExceptionHandler {
- name: unparse_expr(elt, checker.stylist),
+ name: checker.generator().expr(elt),
},
type_.range(),
);
if checker.patch(diagnostic.kind.rule()) {
- diagnostic.set_fix(Edit::range_replacement(
- unparse_expr(elt, checker.stylist),
+ #[allow(deprecated)]
+ diagnostic.set_fix(Fix::automatic(Edit::range_replacement(
+ checker.generator().expr(elt),
type_.range(),
- ));
+ )));
}
checker.diagnostics.push(diagnostic);
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/reuse_of_groupby_generator.rs b/crates/ruff/src/rules/flake8_bugbear/rules/reuse_of_groupby_generator.rs
index 3e53f7d205978..618af8019568b 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/reuse_of_groupby_generator.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/reuse_of_groupby_generator.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Comprehension, Expr, ExprKind, Stmt, StmtKind};
+use rustpython_parser::ast::{self, Comprehension, Expr, Ranged, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -10,7 +10,7 @@ use crate::checkers::ast::Checker;
/// Checks for multiple usage of the generator returned from
/// `itertools.groupby()`.
///
-/// ## Why is it bad?
+/// ## Why is this bad?
/// Using the generator more than once will do nothing on the second usage.
/// If that data is needed later, it should be stored as a list.
///
@@ -84,7 +84,7 @@ impl<'a> GroupNameFinder<'a> {
}
fn name_matches(&self, expr: &Expr) -> bool {
- if let ExprKind::Name { id, .. } = &expr.node {
+ if let Expr::Name(ast::ExprName { id, .. }) = expr {
id == self.group_name
} else {
false
@@ -112,10 +112,10 @@ where
if self.overridden {
return;
}
- match &stmt.node {
- StmtKind::For {
+ match stmt {
+ Stmt::For(ast::StmtFor {
target, iter, body, ..
- } => {
+ }) => {
if self.name_matches(target) {
self.overridden = true;
} else {
@@ -138,15 +138,20 @@ where
self.nested = false;
}
}
- StmtKind::While { body, .. } => {
+ Stmt::While(ast::StmtWhile { body, .. }) => {
self.nested = true;
visitor::walk_body(self, body);
self.nested = false;
}
- StmtKind::If { test, body, orelse } => {
+ Stmt::If(ast::StmtIf {
+ test,
+ body,
+ orelse,
+ range: _,
+ }) => {
// Determine whether we're on an `if` arm (as opposed to an `elif`).
let is_if_arm = !self.parent_ifs.iter().any(|parent| {
- if let StmtKind::If { orelse, .. } = &parent.node {
+ if let Stmt::If(ast::StmtIf { orelse, .. }) = parent {
orelse.len() == 1 && &orelse[0] == stmt
} else {
false
@@ -166,12 +171,12 @@ where
let has_else = orelse
.first()
- .map_or(false, |expr| !matches!(expr.node, StmtKind::If { .. }));
+ .map_or(false, |expr| !matches!(expr, Stmt::If(_)));
self.parent_ifs.push(stmt);
if has_else {
- // There's no `StmtKind::Else`; instead, the `else` contents are directly on
- // the `orelse` of the `StmtKind::If` node. We want to add a new counter for
+ // There's no `Stmt::Else`; instead, the `else` contents are directly on
+ // the `orelse` of the `Stmt::If` node. We want to add a new counter for
// the `orelse` branch, but first, we need to visit the `if` body manually.
self.visit_expr(test);
self.visit_body(body);
@@ -193,7 +198,11 @@ where
}
}
}
- StmtKind::Match { subject, cases } => {
+ Stmt::Match(ast::StmtMatch {
+ subject,
+ cases,
+ range: _,
+ }) => {
self.counter_stack.push(Vec::with_capacity(cases.len()));
self.visit_expr(subject);
for match_case in cases {
@@ -207,14 +216,14 @@ where
self.increment_usage_count(max_count);
}
}
- StmtKind::Assign { targets, value, .. } => {
+ Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
if targets.iter().any(|target| self.name_matches(target)) {
self.overridden = true;
} else {
self.visit_expr(value);
}
}
- StmtKind::AnnAssign { target, value, .. } => {
+ Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
if self.name_matches(target) {
self.overridden = true;
} else if let Some(expr) = value {
@@ -241,7 +250,7 @@ where
}
fn visit_expr(&mut self, expr: &'a Expr) {
- if let ExprKind::NamedExpr { target, .. } = &expr.node {
+ if let Expr::NamedExpr(ast::ExprNamedExpr { target, .. }) = expr {
if self.name_matches(target) {
self.overridden = true;
}
@@ -250,8 +259,17 @@ where
return;
}
- match &expr.node {
- ExprKind::ListComp { elt, generators } | ExprKind::SetComp { elt, generators } => {
+ match expr {
+ Expr::ListComp(ast::ExprListComp {
+ elt,
+ generators,
+ range: _,
+ })
+ | Expr::SetComp(ast::ExprSetComp {
+ elt,
+ generators,
+ range: _,
+ }) => {
for comprehension in generators {
self.visit_comprehension(comprehension);
}
@@ -261,11 +279,12 @@ where
self.nested = false;
}
}
- ExprKind::DictComp {
+ Expr::DictComp(ast::ExprDictComp {
key,
value,
generators,
- } => {
+ range: _,
+ }) => {
for comprehension in generators {
self.visit_comprehension(comprehension);
}
@@ -301,16 +320,16 @@ where
}
/// B031
-pub fn reuse_of_groupby_generator(
+pub(crate) fn reuse_of_groupby_generator(
checker: &mut Checker,
target: &Expr,
body: &[Stmt],
iter: &Expr,
) {
- let ExprKind::Call { func, .. } = &iter.node else {
+ let Expr::Call(ast::ExprCall { func, .. }) = &iter else {
return;
};
- let ExprKind::Tuple { elts, .. } = &target.node else {
+ let Expr::Tuple(ast::ExprTuple { elts, .. }) = target else {
// Ignore any `groupby()` invocation that isn't unpacked
return;
};
@@ -318,12 +337,12 @@ pub fn reuse_of_groupby_generator(
return;
}
// We have an invocation of groupby which is a simple unpacking
- let ExprKind::Name { id: group_name, .. } = &elts[1].node else {
+ let Expr::Name(ast::ExprName { id: group_name, .. }) = &elts[1] else {
return;
};
// Check if the function call is `itertools.groupby`
if !checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["itertools", "groupby"]
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/setattr_with_constant.rs b/crates/ruff/src/rules/flake8_bugbear/rules/setattr_with_constant.rs
index 705c1e03a3e72..9229339bf3de0 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/setattr_with_constant.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/setattr_with_constant.rs
@@ -1,10 +1,9 @@
-use ruff_text_size::TextSize;
-use rustpython_parser::ast::{Constant, Expr, ExprContext, ExprKind, Stmt, StmtKind};
+use ruff_text_size::TextRange;
+use rustpython_parser::ast::{self, Constant, Expr, ExprContext, Ranged, Stmt};
-use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
+use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::helpers::unparse_stmt;
-use ruff_python_ast::source_code::Stylist;
+use ruff_python_ast::source_code::Generator;
use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private};
use crate::checkers::ast::Checker;
@@ -27,30 +26,29 @@ impl AlwaysAutofixableViolation for SetAttrWithConstant {
}
}
-fn assignment(obj: &Expr, name: &str, value: &Expr, stylist: &Stylist) -> String {
- let stmt = Stmt::new(
- TextSize::default(),
- TextSize::default(),
- StmtKind::Assign {
- targets: vec![Expr::new(
- TextSize::default(),
- TextSize::default(),
- ExprKind::Attribute {
- value: Box::new(obj.clone()),
- attr: name.to_string(),
- ctx: ExprContext::Store,
- },
- )],
- value: Box::new(value.clone()),
- type_comment: None,
- },
- );
- unparse_stmt(&stmt, stylist)
+fn assignment(obj: &Expr, name: &str, value: &Expr, generator: Generator) -> String {
+ let stmt = Stmt::Assign(ast::StmtAssign {
+ targets: vec![Expr::Attribute(ast::ExprAttribute {
+ value: Box::new(obj.clone()),
+ attr: name.into(),
+ ctx: ExprContext::Store,
+ range: TextRange::default(),
+ })],
+ value: Box::new(value.clone()),
+ type_comment: None,
+ range: TextRange::default(),
+ });
+ generator.stmt(&stmt)
}
/// B010
-pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
- let ExprKind::Name { id, .. } = &func.node else {
+pub(crate) fn setattr_with_constant(
+ checker: &mut Checker,
+ expr: &Expr,
+ func: &Expr,
+ args: &[Expr],
+) {
+ let Expr::Name(ast::ExprName { id, .. }) = func else {
return;
};
if id != "setattr" {
@@ -59,10 +57,10 @@ pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
let [obj, name, value] = args else {
return;
};
- let ExprKind::Constant {
+ let Expr::Constant(ast::ExprConstant {
value: Constant::Str(name),
..
- } = &name.node else {
+ } )= name else {
return;
};
if !is_identifier(name) {
@@ -73,16 +71,19 @@ pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
}
// We can only replace a `setattr` call (which is an `Expr`) with an assignment
// (which is a `Stmt`) if the `Expr` is already being used as a `Stmt`
- // (i.e., it's directly within an `StmtKind::Expr`).
- if let StmtKind::Expr { value: child } = &checker.ctx.current_stmt().node {
+ // (i.e., it's directly within an `Stmt::Expr`).
+ if let Stmt::Expr(ast::StmtExpr {
+ value: child,
+ range: _,
+ }) = checker.semantic_model().stmt()
+ {
if expr == child.as_ref() {
let mut diagnostic = Diagnostic::new(SetAttrWithConstant, expr.range());
-
if checker.patch(diagnostic.kind.rule()) {
- diagnostic.set_fix(Edit::range_replacement(
- assignment(obj, name, value, checker.stylist),
+ diagnostic.set_fix(Fix::suggested(Edit::range_replacement(
+ assignment(obj, name, value, checker.generator()),
expr.range(),
- ));
+ )));
}
checker.diagnostics.push(diagnostic);
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/star_arg_unpacking_after_keyword_arg.rs b/crates/ruff/src/rules/flake8_bugbear/rules/star_arg_unpacking_after_keyword_arg.rs
index e4cc0733b7aff..287f67035f6d4 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/star_arg_unpacking_after_keyword_arg.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/star_arg_unpacking_after_keyword_arg.rs
@@ -7,7 +7,7 @@
//! by the unpacked sequence, and this change of ordering can surprise and
//! mislead readers.
-use rustpython_parser::ast::{Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{Expr, Keyword, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -25,7 +25,7 @@ impl Violation for StarArgUnpackingAfterKeywordArg {
}
/// B026
-pub fn star_arg_unpacking_after_keyword_arg(
+pub(crate) fn star_arg_unpacking_after_keyword_arg(
checker: &mut Checker,
args: &[Expr],
keywords: &[Keyword],
@@ -34,7 +34,7 @@ pub fn star_arg_unpacking_after_keyword_arg(
return;
};
for arg in args {
- let ExprKind::Starred { .. } = arg.node else {
+ let Expr::Starred (_) = arg else {
continue;
};
if arg.start() <= keyword.start() {
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/strip_with_multi_characters.rs b/crates/ruff/src/rules/flake8_bugbear/rules/strip_with_multi_characters.rs
index 551ca2eea2b3d..1fd78729c94a3 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/strip_with_multi_characters.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/strip_with_multi_characters.rs
@@ -1,5 +1,5 @@
use itertools::Itertools;
-use rustpython_parser::ast::{Constant, Expr, ExprKind};
+use rustpython_parser::ast::{self, Constant, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -17,8 +17,13 @@ impl Violation for StripWithMultiCharacters {
}
/// B005
-pub fn strip_with_multi_characters(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
- let ExprKind::Attribute { attr, .. } = &func.node else {
+pub(crate) fn strip_with_multi_characters(
+ checker: &mut Checker,
+ expr: &Expr,
+ func: &Expr,
+ args: &[Expr],
+) {
+ let Expr::Attribute(ast::ExprAttribute { attr, .. }) = func else {
return;
};
if !matches!(attr.as_str(), "strip" | "lstrip" | "rstrip") {
@@ -28,10 +33,10 @@ pub fn strip_with_multi_characters(checker: &mut Checker, expr: &Expr, func: &Ex
return;
}
- let ExprKind::Constant {
+ let Expr::Constant(ast::ExprConstant {
value: Constant::Str(value),
..
- } = &args[0].node else {
+ } )= &args[0] else {
return;
};
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/unary_prefix_increment.rs b/crates/ruff/src/rules/flake8_bugbear/rules/unary_prefix_increment.rs
index 066b7cf67753e..12f70b9044775 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/unary_prefix_increment.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/unary_prefix_increment.rs
@@ -17,7 +17,7 @@
//! n += 1
//! ```
-use rustpython_parser::ast::{Expr, ExprKind, Unaryop};
+use rustpython_parser::ast::{self, Expr, Ranged, Unaryop};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -35,11 +35,16 @@ impl Violation for UnaryPrefixIncrement {
}
/// B002
-pub fn unary_prefix_increment(checker: &mut Checker, expr: &Expr, op: &Unaryop, operand: &Expr) {
+pub(crate) fn unary_prefix_increment(
+ checker: &mut Checker,
+ expr: &Expr,
+ op: Unaryop,
+ operand: &Expr,
+) {
if !matches!(op, Unaryop::UAdd) {
return;
}
- let ExprKind::UnaryOp { op, .. } = &operand.node else {
+ let Expr::UnaryOp(ast::ExprUnaryOp { op, .. })= operand else {
return;
};
if !matches!(op, Unaryop::UAdd) {
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/unintentional_type_annotation.rs b/crates/ruff/src/rules/flake8_bugbear/rules/unintentional_type_annotation.rs
index d8b5b8793c164..0c5cad9903b19 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/unintentional_type_annotation.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/unintentional_type_annotation.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, ExprKind, Stmt};
+use rustpython_parser::ast::{self, Expr, Ranged, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -34,7 +34,7 @@ impl Violation for UnintentionalTypeAnnotation {
}
/// B032
-pub fn unintentional_type_annotation(
+pub(crate) fn unintentional_type_annotation(
checker: &mut Checker,
target: &Expr,
value: Option<&Expr>,
@@ -43,16 +43,16 @@ pub fn unintentional_type_annotation(
if value.is_some() {
return;
}
- match &target.node {
- ExprKind::Subscript { value, .. } => {
- if matches!(&value.node, ExprKind::Name { .. }) {
+ match target {
+ Expr::Subscript(ast::ExprSubscript { value, .. }) => {
+ if value.is_name_expr() {
checker
.diagnostics
.push(Diagnostic::new(UnintentionalTypeAnnotation, stmt.range()));
}
}
- ExprKind::Attribute { value, .. } => {
- if let ExprKind::Name { id, .. } = &value.node {
+ Expr::Attribute(ast::ExprAttribute { value, .. }) => {
+ if let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() {
if id != "self" {
checker
.diagnostics
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/unreliable_callable_check.rs b/crates/ruff/src/rules/flake8_bugbear/rules/unreliable_callable_check.rs
index 570b2f1a5a731..ec1cff463538d 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/unreliable_callable_check.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/unreliable_callable_check.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Constant, Expr, ExprKind};
+use rustpython_parser::ast::{self, Constant, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -19,8 +19,13 @@ impl Violation for UnreliableCallableCheck {
}
/// B004
-pub fn unreliable_callable_check(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
- let ExprKind::Name { id, .. } = &func.node else {
+pub(crate) fn unreliable_callable_check(
+ checker: &mut Checker,
+ expr: &Expr,
+ func: &Expr,
+ args: &[Expr],
+) {
+ let Expr::Name(ast::ExprName { id, .. }) = func else {
return;
};
if id != "getattr" && id != "hasattr" {
@@ -29,10 +34,10 @@ pub fn unreliable_callable_check(checker: &mut Checker, expr: &Expr, func: &Expr
if args.len() < 2 {
return;
};
- let ExprKind::Constant {
+ let Expr::Constant(ast::ExprConstant {
value: Constant::Str(s),
..
- } = &args[1].node else
+ }) = &args[1] else
{
return;
};
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/unused_loop_control_variable.rs b/crates/ruff/src/rules/flake8_bugbear/rules/unused_loop_control_variable.rs
index 7e53161e5d4f2..ad64d880b6925 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/unused_loop_control_variable.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/unused_loop_control_variable.rs
@@ -19,12 +19,11 @@
//! ```
use rustc_hash::FxHashMap;
-use rustpython_parser::ast::{Expr, ExprKind, Stmt};
+use rustpython_parser::ast::{self, Expr, Ranged, Stmt};
use serde::{Deserialize, Serialize};
-use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Violation};
+use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
use ruff_macros::{derive_message_formats, violation};
-use ruff_python_ast::types::RefEquality;
use ruff_python_ast::visitor::Visitor;
use ruff_python_ast::{helpers, visitor};
@@ -32,7 +31,7 @@ use crate::checkers::ast::Checker;
use crate::registry::AsRule;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, result_like::BoolLike)]
-pub enum Certainty {
+enum Certainty {
Certain,
Uncertain,
}
@@ -40,15 +39,15 @@ pub enum Certainty {
#[violation]
pub struct UnusedLoopControlVariable {
/// The name of the loop control variable.
- pub name: String,
+ name: String,
/// The name to which the variable should be renamed, if it can be
/// safely renamed.
- pub rename: Option,
+ rename: Option,
/// Whether the variable is certain to be unused in the loop body, or
/// merely suspect. A variable _may_ be used, but undetectably
/// so, if the loop incorporates by magic control flow (e.g.,
/// `locals()`).
- pub certainty: Certainty,
+ certainty: Certainty,
}
impl Violation for UnusedLoopControlVariable {
@@ -66,22 +65,16 @@ impl Violation for UnusedLoopControlVariable {
}
}
- fn autofix_title_formatter(&self) -> Option String> {
- let UnusedLoopControlVariable {
- certainty, rename, ..
- } = self;
- if certainty.to_bool() && rename.is_some() {
- Some(|UnusedLoopControlVariable { name, rename, .. }| {
- let rename = rename.as_ref().unwrap();
- format!("Rename unused `{name}` to `{rename}`")
- })
- } else {
- None
- }
+ fn autofix_title(&self) -> Option {
+ let UnusedLoopControlVariable { rename, name, .. } = self;
+
+ rename
+ .as_ref()
+ .map(|rename| format!("Rename unused `{name}` to `{rename}`"))
}
}
-/// Identify all `ExprKind::Name` nodes in an AST.
+/// Identify all `Expr::Name` nodes in an AST.
struct NameFinder<'a> {
/// A map from identifier to defining expression.
names: FxHashMap<&'a str, &'a Expr>,
@@ -100,7 +93,7 @@ where
'b: 'a,
{
fn visit_expr(&mut self, expr: &'a Expr) {
- if let ExprKind::Name { id, .. } = &expr.node {
+ if let Expr::Name(ast::ExprName { id, .. }) = expr {
self.names.insert(id, expr);
}
visitor::walk_expr(self, expr);
@@ -108,12 +101,7 @@ where
}
/// B007
-pub fn unused_loop_control_variable(
- checker: &mut Checker,
- stmt: &Stmt,
- target: &Expr,
- body: &[Stmt],
-) {
+pub(crate) fn unused_loop_control_variable(checker: &mut Checker, target: &Expr, body: &[Stmt]) {
let control_names = {
let mut finder = NameFinder::new();
finder.visit_expr(target);
@@ -141,7 +129,7 @@ pub fn unused_loop_control_variable(
// Avoid fixing any variables that _may_ be used, but undetectably so.
let certainty = Certainty::from(!helpers::uses_magic_variable_access(body, |id| {
- checker.ctx.is_builtin(id)
+ checker.semantic_model().is_builtin(id)
}));
// Attempt to rename the variable by prepending an underscore, but avoid
@@ -164,21 +152,18 @@ pub fn unused_loop_control_variable(
);
if let Some(rename) = rename {
if certainty.into() && checker.patch(diagnostic.kind.rule()) {
- // Find the `BindingKind::LoopVar` corresponding to the name.
- let scope = checker.ctx.scope();
- let binding = scope.bindings_for_name(name).find_map(|index| {
- let binding = &checker.ctx.bindings[*index];
- binding
- .source
- .as_ref()
- .and_then(|source| (source == &RefEquality(stmt)).then_some(binding))
- });
- if let Some(binding) = binding {
- if binding.kind.is_loop_var() {
- if !binding.used() {
- diagnostic.set_fix(Edit::range_replacement(rename, expr.range()));
- }
- }
+ // Avoid fixing if the variable, or any future bindings to the variable, are
+ // used _after_ the loop.
+ let scope = checker.semantic_model().scope();
+ if scope
+ .bindings_for_name(name)
+ .map(|binding_id| &checker.semantic_model().bindings[binding_id])
+ .all(|binding| !binding.is_used())
+ {
+ diagnostic.set_fix(Fix::suggested(Edit::range_replacement(
+ rename,
+ expr.range(),
+ )));
}
}
}
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/useless_comparison.rs b/crates/ruff/src/rules/flake8_bugbear/rules/useless_comparison.rs
index 4d4dc2fe6323c..746acc8ec9bef 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/useless_comparison.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/useless_comparison.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Expr, ExprKind};
+use rustpython_parser::ast::{Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -19,8 +19,8 @@ impl Violation for UselessComparison {
}
/// B015
-pub fn useless_comparison(checker: &mut Checker, expr: &Expr) {
- if matches!(expr.node, ExprKind::Compare { .. }) {
+pub(crate) fn useless_comparison(checker: &mut Checker, expr: &Expr) {
+ if matches!(expr, Expr::Compare(_)) {
checker
.diagnostics
.push(Diagnostic::new(UselessComparison, expr.range()));
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/useless_contextlib_suppress.rs b/crates/ruff/src/rules/flake8_bugbear/rules/useless_contextlib_suppress.rs
index 7b618caa9a504..456d47af047c2 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/useless_contextlib_suppress.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/useless_contextlib_suppress.rs
@@ -1,9 +1,10 @@
-use rustpython_parser::ast::Expr;
+use rustpython_parser::ast::{Expr, Ranged};
-use crate::checkers::ast::Checker;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
+use crate::checkers::ast::Checker;
+
#[violation]
pub struct UselessContextlibSuppress;
@@ -18,10 +19,15 @@ impl Violation for UselessContextlibSuppress {
}
/// B022
-pub fn useless_contextlib_suppress(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
+pub(crate) fn useless_contextlib_suppress(
+ checker: &mut Checker,
+ expr: &Expr,
+ func: &Expr,
+ args: &[Expr],
+) {
if args.is_empty()
&& checker
- .ctx
+ .semantic_model()
.resolve_call_path(func)
.map_or(false, |call_path| {
call_path.as_slice() == ["contextlib", "suppress"]
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/useless_expression.rs b/crates/ruff/src/rules/flake8_bugbear/rules/useless_expression.rs
index 3ff2acaec7a3f..00376dc8b04db 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/useless_expression.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/useless_expression.rs
@@ -1,4 +1,4 @@
-use rustpython_parser::ast::{Constant, Expr, ExprKind};
+use rustpython_parser::ast::{self, Constant, Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
@@ -7,7 +7,7 @@ use ruff_python_ast::helpers::contains_effect;
use crate::checkers::ast::Checker;
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
-pub enum Kind {
+pub(crate) enum Kind {
Expression,
Attribute,
}
@@ -34,29 +34,29 @@ impl Violation for UselessExpression {
}
/// B018
-pub fn useless_expression(checker: &mut Checker, value: &Expr) {
+pub(crate) fn useless_expression(checker: &mut Checker, value: &Expr) {
// Ignore comparisons, as they're handled by `useless_comparison`.
- if matches!(value.node, ExprKind::Compare { .. }) {
+ if matches!(value, Expr::Compare(_)) {
return;
}
// Ignore strings, to avoid false positives with docstrings.
if matches!(
- value.node,
- ExprKind::JoinedStr { .. }
- | ExprKind::Constant {
+ value,
+ Expr::JoinedStr(_)
+ | Expr::Constant(ast::ExprConstant {
value: Constant::Str(..) | Constant::Ellipsis,
..
- }
+ })
) {
return;
}
// Ignore statements that have side effects.
- if contains_effect(value, |id| checker.ctx.is_builtin(id)) {
+ if contains_effect(value, |id| checker.semantic_model().is_builtin(id)) {
// Flag attributes as useless expressions, even if they're attached to calls or other
// expressions.
- if matches!(value.node, ExprKind::Attribute { .. }) {
+ if matches!(value, Expr::Attribute(_)) {
checker.diagnostics.push(Diagnostic::new(
UselessExpression {
kind: Kind::Attribute,
diff --git a/crates/ruff/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs b/crates/ruff/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs
index ec3c3db340046..3bbc2efbaced6 100644
--- a/crates/ruff/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs
@@ -1,8 +1,11 @@
-use rustpython_parser::ast::{Expr, ExprKind, Keyword};
+use rustpython_parser::ast::{self, Expr, Keyword, Ranged};
-use crate::checkers::ast::Checker;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
+use ruff_python_ast::helpers::is_const_none;
+use ruff_python_semantic::model::SemanticModel;
+
+use crate::checkers::ast::Checker;
#[violation]
pub struct ZipWithoutExplicitStrict;
@@ -15,22 +18,22 @@ impl Violation for ZipWithoutExplicitStrict {
}
/// B905
-pub fn zip_without_explicit_strict(
+pub(crate) fn zip_without_explicit_strict(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
+ args: &[Expr],
kwargs: &[Keyword],
) {
- if let ExprKind::Name { id, .. } = &func.node {
+ if let Expr::Name(ast::ExprName { id, .. }) = func {
if id == "zip"
- && checker.ctx.is_builtin("zip")
- && !kwargs.iter().any(|keyword| {
- keyword
- .node
- .arg
- .as_ref()
- .map_or(false, |name| name == "strict")
- })
+ && checker.semantic_model().is_builtin("zip")
+ && !kwargs
+ .iter()
+ .any(|keyword| keyword.arg.as_ref().map_or(false, |name| name == "strict"))
+ && !args
+ .iter()
+ .any(|arg| is_infinite_iterator(arg, checker.semantic_model()))
{
checker
.diagnostics
@@ -38,3 +41,40 @@ pub fn zip_without_explicit_strict(
}
}
}
+
+/// Return `true` if the [`Expr`] appears to be an infinite iterator (e.g., a call to
+/// `itertools.cycle` or similar).
+fn is_infinite_iterator(arg: &Expr, model: &SemanticModel) -> bool {
+ let Expr::Call(ast::ExprCall { func, args, keywords, .. }) = &arg else {
+ return false;
+ };
+
+ return model
+ .resolve_call_path(func)
+ .map_or(false, |call_path| match call_path.as_slice() {
+ ["itertools", "cycle" | "count"] => true,
+ ["itertools", "repeat"] => {
+ // Ex) `itertools.repeat(1)`
+ if keywords.is_empty() && args.len() == 1 {
+ return true;
+ }
+
+ // Ex) `itertools.repeat(1, None)`
+ if args.len() == 2 && is_const_none(&args[1]) {
+ return true;
+ }
+
+ // Ex) `iterools.repeat(1, times=None)`
+ for keyword in keywords {
+ if keyword.arg.as_ref().map_or(false, |name| name == "times") {
+ if is_const_none(&keyword.value) {
+ return true;
+ }
+ }
+ }
+
+ false
+ }
+ _ => false,
+ });
+}
diff --git a/crates/ruff/src/rules/flake8_bugbear/settings.rs b/crates/ruff/src/rules/flake8_bugbear/settings.rs
index 566aa1519d2db..4de75f4d782b4 100644
--- a/crates/ruff/src/rules/flake8_bugbear/settings.rs
+++ b/crates/ruff/src/rules/flake8_bugbear/settings.rs
@@ -1,18 +1,18 @@
//! Settings for the `flake8-bugbear` plugin.
-use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use ruff_macros::{CacheKey, ConfigurationOptions};
+use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
- Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, JsonSchema,
+ Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8BugbearOptions"
)]
+#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"[]"#,
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B006_B006_B008.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B006_B006_B008.py.snap
index 2cb0b33275e85..ff021329cc4bf 100644
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B006_B006_B008.py.snap
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B006_B006_B008.py.snap
@@ -1,113 +1,113 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
---
-B006_B008.py:62:25: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:63:25: B006 Do not use mutable data structures for argument defaults
|
-62 | def this_is_wrong(value=[1, 2, 3]):
+63 | def this_is_wrong(value=[1, 2, 3]):
| ^^^^^^^^^ B006
-63 | ...
+64 | ...
|
-B006_B008.py:66:30: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:67:30: B006 Do not use mutable data structures for argument defaults
|
-66 | def this_is_also_wrong(value={}):
+67 | def this_is_also_wrong(value={}):
| ^^ B006
-67 | ...
+68 | ...
|
-B006_B008.py:70:20: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:71:20: B006 Do not use mutable data structures for argument defaults
|
-70 | def and_this(value=set()):
+71 | def and_this(value=set()):
| ^^^^^ B006
-71 | ...
+72 | ...
|
-B006_B008.py:74:20: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:75:20: B006 Do not use mutable data structures for argument defaults
|
-74 | def this_too(value=collections.OrderedDict()):
+75 | def this_too(value=collections.OrderedDict()):
| ^^^^^^^^^^^^^^^^^^^^^^^^^ B006
-75 | ...
+76 | ...
|
-B006_B008.py:78:32: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:79:32: B006 Do not use mutable data structures for argument defaults
|
-78 | async def async_this_too(value=collections.defaultdict()):
+79 | async def async_this_too(value=collections.defaultdict()):
| ^^^^^^^^^^^^^^^^^^^^^^^^^ B006
-79 | ...
+80 | ...
|
-B006_B008.py:82:26: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:83:26: B006 Do not use mutable data structures for argument defaults
|
-82 | def dont_forget_me(value=collections.deque()):
+83 | def dont_forget_me(value=collections.deque()):
| ^^^^^^^^^^^^^^^^^^^ B006
-83 | ...
+84 | ...
|
-B006_B008.py:87:46: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:88:46: B006 Do not use mutable data structures for argument defaults
|
-87 | # N.B. we're also flagging the function call in the comprehension
-88 | def list_comprehension_also_not_okay(default=[i**2 for i in range(3)]):
+88 | # N.B. we're also flagging the function call in the comprehension
+89 | def list_comprehension_also_not_okay(default=[i**2 for i in range(3)]):
| ^^^^^^^^^^^^^^^^^^^^^^^^ B006
-89 | pass
+90 | pass
|
-B006_B008.py:91:46: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:92:46: B006 Do not use mutable data structures for argument defaults
|
-91 | def dict_comprehension_also_not_okay(default={i: i**2 for i in range(3)}):
+92 | def dict_comprehension_also_not_okay(default={i: i**2 for i in range(3)}):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ B006
-92 | pass
+93 | pass
|
-B006_B008.py:95:45: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:96:45: B006 Do not use mutable data structures for argument defaults
|
-95 | def set_comprehension_also_not_okay(default={i**2 for i in range(3)}):
+96 | def set_comprehension_also_not_okay(default={i**2 for i in range(3)}):
| ^^^^^^^^^^^^^^^^^^^^^^^^ B006
-96 | pass
+97 | pass
|
-B006_B008.py:99:33: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:100:33: B006 Do not use mutable data structures for argument defaults
|
- 99 | def kwonlyargs_mutable(*, value=[]):
+100 | def kwonlyargs_mutable(*, value=[]):
| ^^ B006
-100 | ...
+101 | ...
|
-B006_B008.py:192:20: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:218:20: B006 Do not use mutable data structures for argument defaults
|
-192 | # B006 and B008
-193 | # We should handle arbitrary nesting of these B008.
-194 | def nested_combo(a=[float(3), dt.datetime.now()]):
+218 | # B006 and B008
+219 | # We should handle arbitrary nesting of these B008.
+220 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B006
-195 | pass
+221 | pass
|
-B006_B008.py:225:27: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:251:27: B006 Do not use mutable data structures for argument defaults
|
-225 | def mutable_annotations(
-226 | a: list[int] | None = [],
+251 | def mutable_annotations(
+252 | a: list[int] | None = [],
| ^^ B006
-227 | b: Optional[Dict[int, int]] = {},
-228 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
+253 | b: Optional[Dict[int, int]] = {},
+254 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
-B006_B008.py:226:35: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:252:35: B006 Do not use mutable data structures for argument defaults
|
-226 | def mutable_annotations(
-227 | a: list[int] | None = [],
-228 | b: Optional[Dict[int, int]] = {},
+252 | def mutable_annotations(
+253 | a: list[int] | None = [],
+254 | b: Optional[Dict[int, int]] = {},
| ^^ B006
-229 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
-230 | ):
+255 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
+256 | ):
|
-B006_B008.py:227:62: B006 Do not use mutable data structures for argument defaults
+B006_B008.py:253:62: B006 Do not use mutable data structures for argument defaults
|
-227 | a: list[int] | None = [],
-228 | b: Optional[Dict[int, int]] = {},
-229 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
+253 | a: list[int] | None = [],
+254 | b: Optional[Dict[int, int]] = {},
+255 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^ B006
-230 | ):
-231 | pass
+256 | ):
+257 | pass
|
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B007_B007.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B007_B007.py.snap
index 3960383665ace..f263105c6083e 100644
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B007_B007.py.snap
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B007_B007.py.snap
@@ -1,7 +1,7 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
---
-B007.py:6:5: B007 [*] Loop control variable `i` not used within loop body
+B007.py:6:5: B007 Loop control variable `i` not used within loop body
|
6 | print(i) # name no longer defined on Python 3; no warning yet
7 |
@@ -31,7 +31,7 @@ B007.py:18:13: B007 [*] Loop control variable `k` not used within loop body
20 20 |
21 21 |
-B007.py:30:5: B007 [*] Loop control variable `i` not used within loop body
+B007.py:30:5: B007 Loop control variable `i` not used within loop body
|
30 | for i, (j, (k, l)) in strange_generator(): # i, k not used
| ^ B007
@@ -65,6 +65,7 @@ B007.py:34:10: B007 Loop control variable `bar` may not be used within loop body
36 | if foo:
37 | print(FMT.format(**locals()))
|
+ = help: Rename unused `bar` to `_bar`
B007.py:38:10: B007 Loop control variable `bar` may not be used within loop body
|
@@ -75,6 +76,7 @@ B007.py:38:10: B007 Loop control variable `bar` may not be used within loop body
41 | if foo:
42 | print(FMT.format(**globals()))
|
+ = help: Rename unused `bar` to `_bar`
B007.py:42:10: B007 Loop control variable `bar` may not be used within loop body
|
@@ -85,6 +87,7 @@ B007.py:42:10: B007 Loop control variable `bar` may not be used within loop body
45 | if foo:
46 | print(FMT.format(**vars()))
|
+ = help: Rename unused `bar` to `_bar`
B007.py:46:10: B007 Loop control variable `bar` may not be used within loop body
|
@@ -94,6 +97,7 @@ B007.py:46:10: B007 Loop control variable `bar` may not be used within loop body
| ^^^ B007
49 | print(FMT.format(foo=foo, bar=eval("bar")))
|
+ = help: Rename unused `bar` to `_bar`
B007.py:52:14: B007 [*] Loop control variable `bar` not used within loop body
|
@@ -116,7 +120,7 @@ B007.py:52:14: B007 [*] Loop control variable `bar` not used within loop body
54 54 | break
55 55 |
-B007.py:59:14: B007 [*] Loop control variable `bar` not used within loop body
+B007.py:59:14: B007 Loop control variable `bar` not used within loop body
|
59 | def f():
60 | # Unfixable due to usage of `bar` outside of loop.
@@ -148,10 +152,10 @@ B007.py:68:14: B007 [*] Loop control variable `bar` not used within loop body
70 70 | break
71 71 |
-B007.py:77:14: B007 [*] Loop control variable `bar` not used within loop body
+B007.py:77:14: B007 Loop control variable `bar` not used within loop body
|
77 | def f():
-78 | # Fixable.
+78 | # Unfixable.
79 | for foo, bar, baz in (["1", "2", "3"],):
| ^^^ B007
80 | if foo or baz:
@@ -159,23 +163,24 @@ B007.py:77:14: B007 [*] Loop control variable `bar` not used within loop body
|
= help: Rename unused `bar` to `_bar`
-ℹ Suggested fix
-74 74 |
-75 75 | def f():
-76 76 | # Fixable.
-77 |- for foo, bar, baz in (["1", "2", "3"],):
- 77 |+ for foo, _bar, baz in (["1", "2", "3"],):
-78 78 | if foo or baz:
-79 79 | break
-80 80 |
-
-B007.py:87:5: B007 Loop control variable `line_` not used within loop body
- |
-87 | # Unfixable due to trailing underscore (`_line_` wouldn't be considered an ignorable
-88 | # variable name).
-89 | for line_ in range(self.header_lines):
- | ^^^^^ B007
-90 | fp.readline()
+B007.py:88:14: B007 Loop control variable `bar` not used within loop body
|
+88 | def f():
+89 | # Unfixable (false negative) due to usage of `bar` outside of loop.
+90 | for foo, bar, baz in (["1", "2", "3"],):
+ | ^^^ B007
+91 | if foo or baz:
+92 | break
+ |
+ = help: Rename unused `bar` to `_bar`
+
+B007.py:98:5: B007 Loop control variable `line_` not used within loop body
+ |
+ 98 | # Unfixable due to trailing underscore (`_line_` wouldn't be considered an ignorable
+ 99 | # variable name).
+100 | for line_ in range(self.header_lines):
+ | ^^^^^ B007
+101 | fp.readline()
+ |
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B008_B006_B008.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B008_B006_B008.py.snap
index 7ec11c3ab1900..514e2b2f56f0e 100644
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B008_B006_B008.py.snap
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B008_B006_B008.py.snap
@@ -1,114 +1,83 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
---
-B006_B008.py:87:61: B008 Do not perform function call `range` in argument defaults
+B006_B008.py:88:61: B008 Do not perform function call `range` in argument defaults
|
-87 | # N.B. we're also flagging the function call in the comprehension
-88 | def list_comprehension_also_not_okay(default=[i**2 for i in range(3)]):
+88 | # N.B. we're also flagging the function call in the comprehension
+89 | def list_comprehension_also_not_okay(default=[i**2 for i in range(3)]):
| ^^^^^^^^ B008
-89 | pass
+90 | pass
|
-B006_B008.py:91:64: B008 Do not perform function call `range` in argument defaults
+B006_B008.py:92:64: B008 Do not perform function call `range` in argument defaults
|
-91 | def dict_comprehension_also_not_okay(default={i: i**2 for i in range(3)}):
+92 | def dict_comprehension_also_not_okay(default={i: i**2 for i in range(3)}):
| ^^^^^^^^ B008
-92 | pass
+93 | pass
|
-B006_B008.py:95:60: B008 Do not perform function call `range` in argument defaults
+B006_B008.py:96:60: B008 Do not perform function call `range` in argument defaults
|
-95 | def set_comprehension_also_not_okay(default={i**2 for i in range(3)}):
+96 | def set_comprehension_also_not_okay(default={i**2 for i in range(3)}):
| ^^^^^^^^ B008
-96 | pass
+97 | pass
|
-B006_B008.py:111:39: B008 Do not perform function call `time.time` in argument defaults
+B006_B008.py:112:39: B008 Do not perform function call `time.time` in argument defaults
|
-111 | # B008
-112 | # Flag function calls as default args (including if they are part of a sub-expression)
-113 | def in_fact_all_calls_are_wrong(value=time.time()):
+112 | # B008
+113 | # Flag function calls as default args (including if they are part of a sub-expression)
+114 | def in_fact_all_calls_are_wrong(value=time.time()):
| ^^^^^^^^^^^ B008
-114 | ...
+115 | ...
|
-B006_B008.py:115:12: B008 Do not perform function call `dt.datetime.now` in argument defaults
+B006_B008.py:116:12: B008 Do not perform function call `dt.datetime.now` in argument defaults
|
-115 | def f(when=dt.datetime.now() + dt.timedelta(days=7)):
+116 | def f(when=dt.datetime.now() + dt.timedelta(days=7)):
| ^^^^^^^^^^^^^^^^^ B008
-116 | pass
+117 | pass
|
-B006_B008.py:119:30: B008 Do not perform function call in argument defaults
+B006_B008.py:120:30: B008 Do not perform function call in argument defaults
|
-119 | def can_even_catch_lambdas(a=(lambda x: x)()):
+120 | def can_even_catch_lambdas(a=(lambda x: x)()):
| ^^^^^^^^^^^^^^^ B008
-120 | ...
+121 | ...
|
-B006_B008.py:157:34: B008 Do not perform function call `float` in argument defaults
+B006_B008.py:218:31: B008 Do not perform function call `dt.datetime.now` in argument defaults
|
-157 | def float_infinity_literal(value=float("1e999")):
- | ^^^^^^^^^^^^^^ B008
-158 | pass
- |
-
-B006_B008.py:162:30: B008 Do not perform function call `float` in argument defaults
- |
-162 | # But don't allow standard floats
-163 | def float_int_is_wrong(value=float(3)):
- | ^^^^^^^^ B008
-164 | pass
- |
-
-B006_B008.py:166:45: B008 Do not perform function call `float` in argument defaults
- |
-166 | def float_str_not_inf_or_nan_is_wrong(value=float("3.14")):
- | ^^^^^^^^^^^^^ B008
-167 | pass
- |
-
-B006_B008.py:192:21: B008 Do not perform function call `float` in argument defaults
- |
-192 | # B006 and B008
-193 | # We should handle arbitrary nesting of these B008.
-194 | def nested_combo(a=[float(3), dt.datetime.now()]):
- | ^^^^^^^^ B008
-195 | pass
- |
-
-B006_B008.py:192:31: B008 Do not perform function call `dt.datetime.now` in argument defaults
- |
-192 | # B006 and B008
-193 | # We should handle arbitrary nesting of these B008.
-194 | def nested_combo(a=[float(3), dt.datetime.now()]):
+218 | # B006 and B008
+219 | # We should handle arbitrary nesting of these B008.
+220 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^ B008
-195 | pass
+221 | pass
|
-B006_B008.py:198:22: B008 Do not perform function call `map` in argument defaults
+B006_B008.py:224:22: B008 Do not perform function call `map` in argument defaults
|
-198 | # Don't flag nested B006 since we can't guarantee that
-199 | # it isn't made mutable by the outer operation.
-200 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
+224 | # Don't flag nested B006 since we can't guarantee that
+225 | # it isn't made mutable by the outer operation.
+226 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B008
-201 | pass
+227 | pass
|
-B006_B008.py:203:19: B008 Do not perform function call `random.randint` in argument defaults
+B006_B008.py:229:19: B008 Do not perform function call `random.randint` in argument defaults
|
-203 | # B008-ception.
-204 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
+229 | # B008-ception.
+230 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B008
-205 | pass
+231 | pass
|
-B006_B008.py:203:37: B008 Do not perform function call `dt.datetime.now` in argument defaults
+B006_B008.py:229:37: B008 Do not perform function call `dt.datetime.now` in argument defaults
|
-203 | # B008-ception.
-204 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
+229 | # B008-ception.
+230 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
| ^^^^^^^^^^^^^^^^^ B008
-205 | pass
+231 | pass
|
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B013_B013.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B013_B013.py.snap
index da61eecc72a3c..698bf0d0f33dd 100644
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B013_B013.py.snap
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B013_B013.py.snap
@@ -1,5 +1,6 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
+assertion_line: 57
---
B013.py:3:8: B013 [*] A length-one tuple literal is redundant. Write `except ValueError` instead of `except (ValueError,)`.
|
@@ -12,7 +13,7 @@ B013.py:3:8: B013 [*] A length-one tuple literal is redundant. Write `except Val
|
= help: Replace with `except ValueError`
-ℹ Suggested fix
+ℹ Fix
1 1 | try:
2 2 | pass
3 |-except (ValueError,):
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B021_B021.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B021_B021.py.snap
index 77d34cb658b53..e313e6d8ae032 100644
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B021_B021.py.snap
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B021_B021.py.snap
@@ -1,7 +1,7 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
---
-B021.py:1:1: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:1:1: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
1 | / f"""
2 | | Should emit:
@@ -12,63 +12,63 @@ B021.py:1:1: B021 f-string used as docstring. This will be interpreted by python
6 | VARIABLE = "world"
|
-B021.py:14:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:14:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
14 | def foo2():
15 | f"""hello {VARIABLE}!"""
| ^^^^^^^^^^^^^^^^^^^^^^^^ B021
|
-B021.py:22:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:22:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
22 | class bar2:
23 | f"""hello {VARIABLE}!"""
| ^^^^^^^^^^^^^^^^^^^^^^^^ B021
|
-B021.py:30:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:30:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
30 | def foo2():
31 | f"""hello {VARIABLE}!"""
| ^^^^^^^^^^^^^^^^^^^^^^^^ B021
|
-B021.py:38:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:38:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
38 | class bar2:
39 | f"""hello {VARIABLE}!"""
| ^^^^^^^^^^^^^^^^^^^^^^^^ B021
|
-B021.py:46:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:46:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
46 | def foo2():
47 | f"hello {VARIABLE}!"
| ^^^^^^^^^^^^^^^^^^^^ B021
|
-B021.py:54:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:54:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
54 | class bar2:
55 | f"hello {VARIABLE}!"
| ^^^^^^^^^^^^^^^^^^^^ B021
|
-B021.py:62:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:62:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
62 | def foo2():
63 | f"hello {VARIABLE}!"
| ^^^^^^^^^^^^^^^^^^^^ B021
|
-B021.py:70:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:70:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
70 | class bar2:
71 | f"hello {VARIABLE}!"
| ^^^^^^^^^^^^^^^^^^^^ B021
|
-B021.py:74:5: B021 f-string used as docstring. This will be interpreted by python as a joined string rather than a docstring.
+B021.py:74:5: B021 f-string used as docstring. Python will interpret this as a joined string, rather than a docstring.
|
74 | def baz():
75 | f"""I'm probably a docstring: {VARIABLE}!"""
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B024_B024.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B024_B024.py.snap
index 72378202ef6ac..35db4324c0396 100644
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B024_B024.py.snap
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B024_B024.py.snap
@@ -1,52 +1,52 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
---
-B024.py:18:1: B024 `Base_1` is an abstract base class, but it has no abstract methods
+B024.py:18:7: B024 `Base_1` is an abstract base class, but it has no abstract methods
|
-18 | / class Base_1(ABC): # error
-19 | | def method(self):
-20 | | foo()
- | |_____________^ B024
+18 | class Base_1(ABC): # error
+ | ^^^^^^ B024
+19 | def method(self):
+20 | foo()
|
-B024.py:71:1: B024 `MetaBase_1` is an abstract base class, but it has no abstract methods
+B024.py:71:7: B024 `MetaBase_1` is an abstract base class, but it has no abstract methods
|
-71 | / class MetaBase_1(metaclass=ABCMeta): # error
-72 | | def method(self):
-73 | | foo()
- | |_____________^ B024
+71 | class MetaBase_1(metaclass=ABCMeta): # error
+ | ^^^^^^^^^^ B024
+72 | def method(self):
+73 | foo()
|
-B024.py:82:1: B024 `abc_Base_1` is an abstract base class, but it has no abstract methods
+B024.py:82:7: B024 `abc_Base_1` is an abstract base class, but it has no abstract methods
|
-82 | / class abc_Base_1(abc.ABC): # error
-83 | | def method(self):
-84 | | foo()
- | |_____________^ B024
+82 | class abc_Base_1(abc.ABC): # error
+ | ^^^^^^^^^^ B024
+83 | def method(self):
+84 | foo()
|
-B024.py:87:1: B024 `abc_Base_2` is an abstract base class, but it has no abstract methods
+B024.py:87:7: B024 `abc_Base_2` is an abstract base class, but it has no abstract methods
|
-87 | / class abc_Base_2(metaclass=abc.ABCMeta): # error
-88 | | def method(self):
-89 | | foo()
- | |_____________^ B024
+87 | class abc_Base_2(metaclass=abc.ABCMeta): # error
+ | ^^^^^^^^^^ B024
+88 | def method(self):
+89 | foo()
|
-B024.py:92:1: B024 `notabc_Base_1` is an abstract base class, but it has no abstract methods
+B024.py:92:7: B024 `notabc_Base_1` is an abstract base class, but it has no abstract methods
|
-92 | / class notabc_Base_1(notabc.ABC): # error
-93 | | def method(self):
-94 | | foo()
- | |_____________^ B024
+92 | class notabc_Base_1(notabc.ABC): # error
+ | ^^^^^^^^^^^^^ B024
+93 | def method(self):
+94 | foo()
|
-B024.py:141:1: B024 `abc_set_class_variable_4` is an abstract base class, but it has no abstract methods
+B024.py:141:7: B024 `abc_set_class_variable_4` is an abstract base class, but it has no abstract methods
|
-141 | # this doesn't actually declare a class variable, it's just an expression
-142 | / class abc_set_class_variable_4(ABC): # error
-143 | | foo
- | |_______^ B024
+141 | # this doesn't actually declare a class variable, it's just an expression
+142 | class abc_set_class_variable_4(ABC): # error
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ B024
+143 | foo
|
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B027_B027.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B027_B027.py.snap
index 31830c9e92b34..0cc5957de16a2 100644
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B027_B027.py.snap
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B027_B027.py.snap
@@ -1,96 +1,56 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
---
-B027.py:13:5: B027 [*] `AbstractClass.empty_1` is an empty method in an abstract base class, but has no abstract decorator
+B027.py:18:5: B027 `AbstractClass.empty_1` is an empty method in an abstract base class, but has no abstract decorator
|
-13 | class AbstractClass(ABC):
-14 | def empty_1(self): # error
+18 | class AbstractClass(ABC):
+19 | def empty_1(self): # error
| _____^
-15 | | ...
+20 | | ...
| |___________^ B027
-16 |
-17 | def empty_2(self): # error
+21 |
+22 | def empty_2(self): # error
|
- = help: Add the `@abstractmethod` decorator
-ℹ Suggested fix
-10 10 |
-11 11 |
-12 12 | class AbstractClass(ABC):
- 13 |+ @notabstract
-13 14 | def empty_1(self): # error
-14 15 | ...
-15 16 |
-
-B027.py:16:5: B027 [*] `AbstractClass.empty_2` is an empty method in an abstract base class, but has no abstract decorator
+B027.py:21:5: B027 `AbstractClass.empty_2` is an empty method in an abstract base class, but has no abstract decorator
|
-16 | ...
-17 |
-18 | def empty_2(self): # error
+21 | ...
+22 |
+23 | def empty_2(self): # error
| _____^
-19 | | pass
+24 | | pass
| |____________^ B027
-20 |
-21 | def empty_3(self): # error
+25 |
+26 | def empty_3(self): # error
|
- = help: Add the `@abstractmethod` decorator
-
-ℹ Suggested fix
-13 13 | def empty_1(self): # error
-14 14 | ...
-15 15 |
- 16 |+ @notabstract
-16 17 | def empty_2(self): # error
-17 18 | pass
-18 19 |
-B027.py:19:5: B027 [*] `AbstractClass.empty_3` is an empty method in an abstract base class, but has no abstract decorator
+B027.py:24:5: B027 `AbstractClass.empty_3` is an empty method in an abstract base class, but has no abstract decorator
|
-19 | pass
-20 |
-21 | def empty_3(self): # error
+24 | pass
+25 |
+26 | def empty_3(self): # error
| _____^
-22 | | """docstring"""
-23 | | ...
+27 | | """docstring"""
+28 | | ...
| |___________^ B027
-24 |
-25 | def empty_4(self): # error
+29 |
+30 | def empty_4(self): # error
|
- = help: Add the `@abstractmethod` decorator
-ℹ Suggested fix
-16 16 | def empty_2(self): # error
-17 17 | pass
-18 18 |
- 19 |+ @notabstract
-19 20 | def empty_3(self): # error
-20 21 | """docstring"""
-21 22 | ...
-
-B027.py:23:5: B027 [*] `AbstractClass.empty_4` is an empty method in an abstract base class, but has no abstract decorator
+B027.py:28:5: B027 `AbstractClass.empty_4` is an empty method in an abstract base class, but has no abstract decorator
|
-23 | ...
-24 |
-25 | def empty_4(self): # error
+28 | ...
+29 |
+30 | def empty_4(self): # error
| _____^
-26 | | """multiple ellipsis/pass"""
-27 | | ...
-28 | | pass
-29 | | ...
-30 | | pass
+31 | | """multiple ellipsis/pass"""
+32 | | ...
+33 | | pass
+34 | | ...
+35 | | pass
| |____________^ B027
-31 |
-32 | @notabstract
+36 |
+37 | @notabstract
|
- = help: Add the `@abstractmethod` decorator
-
-ℹ Suggested fix
-20 20 | """docstring"""
-21 21 | ...
-22 22 |
- 23 |+ @notabstract
-23 24 | def empty_4(self): # error
-24 25 | """multiple ellipsis/pass"""
-25 26 | ...
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B027_B027_extended.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B027_B027_extended.py.snap
deleted file mode 100644
index 2ee081c6f5c54..0000000000000
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B027_B027_extended.py.snap
+++ /dev/null
@@ -1,122 +0,0 @@
----
-source: crates/ruff/src/rules/flake8_bugbear/mod.rs
----
-B027_extended.py:9:5: B027 [*] `AbstractClass.empty_1` is an empty method in an abstract base class, but has no abstract decorator
- |
- 9 | class AbstractClass(ABC):
-10 | def empty_1(self): # error
- | _____^
-11 | | ...
- | |___________^ B027
-12 |
-13 | def empty_2(self): # error
- |
- = help: Add the `@abstractmethod` decorator
-
-ℹ Suggested fix
-2 2 | Should emit:
-3 3 | B027 - on lines 13, 16, 19, 23
-4 4 | """
-5 |-from abc import ABC
- 5 |+from abc import ABC, abstractmethod
-6 6 |
-7 7 |
-8 8 | class AbstractClass(ABC):
- 9 |+ @abstractmethod
-9 10 | def empty_1(self): # error
-10 11 | ...
-11 12 |
-
-B027_extended.py:12:5: B027 [*] `AbstractClass.empty_2` is an empty method in an abstract base class, but has no abstract decorator
- |
-12 | ...
-13 |
-14 | def empty_2(self): # error
- | _____^
-15 | | pass
- | |____________^ B027
-16 |
-17 | def body_1(self):
- |
- = help: Add the `@abstractmethod` decorator
-
-ℹ Suggested fix
-2 2 | Should emit:
-3 3 | B027 - on lines 13, 16, 19, 23
-4 4 | """
-5 |-from abc import ABC
- 5 |+from abc import ABC, abstractmethod
-6 6 |
-7 7 |
-8 8 | class AbstractClass(ABC):
-9 9 | def empty_1(self): # error
-10 10 | ...
-11 11 |
- 12 |+ @abstractmethod
-12 13 | def empty_2(self): # error
-13 14 | pass
-14 15 |
-
-B027_extended.py:25:9: B027 [*] `InnerAbstractClass.empty_1` is an empty method in an abstract base class, but has no abstract decorator
- |
-25 | def foo():
-26 | class InnerAbstractClass(ABC):
-27 | def empty_1(self): # error
- | _________^
-28 | | ...
- | |_______________^ B027
-29 |
-30 | def empty_2(self): # error
- |
- = help: Add the `@abstractmethod` decorator
-
-ℹ Suggested fix
-2 2 | Should emit:
-3 3 | B027 - on lines 13, 16, 19, 23
-4 4 | """
-5 |-from abc import ABC
- 5 |+from abc import ABC, abstractmethod
-6 6 |
-7 7 |
-8 8 | class AbstractClass(ABC):
---------------------------------------------------------------------------------
-22 22 |
-23 23 | def foo():
-24 24 | class InnerAbstractClass(ABC):
- 25 |+ @abstractmethod
-25 26 | def empty_1(self): # error
-26 27 | ...
-27 28 |
-
-B027_extended.py:28:9: B027 [*] `InnerAbstractClass.empty_2` is an empty method in an abstract base class, but has no abstract decorator
- |
-28 | ...
-29 |
-30 | def empty_2(self): # error
- | _________^
-31 | | pass
- | |________________^ B027
-32 |
-33 | def body_1(self):
- |
- = help: Add the `@abstractmethod` decorator
-
-ℹ Suggested fix
-2 2 | Should emit:
-3 3 | B027 - on lines 13, 16, 19, 23
-4 4 | """
-5 |-from abc import ABC
- 5 |+from abc import ABC, abstractmethod
-6 6 |
-7 7 |
-8 8 | class AbstractClass(ABC):
---------------------------------------------------------------------------------
-25 25 | def empty_1(self): # error
-26 26 | ...
-27 27 |
- 28 |+ @abstractmethod
-28 29 | def empty_2(self): # error
-29 30 | pass
-30 31 |
-
-
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B033_B033.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B033_B033.py.snap
new file mode 100644
index 0000000000000..a71540f42f781
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B033_B033.py.snap
@@ -0,0 +1,23 @@
+---
+source: crates/ruff/src/rules/flake8_bugbear/mod.rs
+---
+B033.py:4:35: B033 Sets should not contain duplicate item `"value1"`
+ |
+4 | # Errors.
+5 | ###
+6 | incorrect_set = {"value1", 23, 5, "value1"}
+ | ^^^^^^^^ B033
+7 | incorrect_set = {1, 1}
+ |
+
+B033.py:5:21: B033 Sets should not contain duplicate item `1`
+ |
+5 | ###
+6 | incorrect_set = {"value1", 23, 5, "value1"}
+7 | incorrect_set = {1, 1}
+ | ^ B033
+8 |
+9 | ###
+ |
+
+
diff --git a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B905_B905.py.snap b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B905_B905.py.snap
index d18f3715463d7..76dcda7de4bb9 100644
--- a/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B905_B905.py.snap
+++ b/crates/ruff/src/rules/flake8_bugbear/snapshots/ruff__rules__flake8_bugbear__tests__B905_B905.py.snap
@@ -1,70 +1,88 @@
---
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
---
-B905.py:1:1: B905 `zip()` without an explicit `strict=` parameter
+B905.py:4:1: B905 `zip()` without an explicit `strict=` parameter
|
-1 | zip()
+4 | # Errors
+5 | zip()
| ^^^^^ B905
-2 | zip(range(3))
-3 | zip("a", "b")
+6 | zip(range(3))
+7 | zip("a", "b")
|
-B905.py:2:1: B905 `zip()` without an explicit `strict=` parameter
+B905.py:5:1: B905 `zip()` without an explicit `strict=` parameter
|
-2 | zip()
-3 | zip(range(3))
+5 | # Errors
+6 | zip()
+7 | zip(range(3))
| ^^^^^^^^^^^^^ B905
-4 | zip("a", "b")
-5 | zip("a", "b", *zip("c"))
+8 | zip("a", "b")
+9 | zip("a", "b", *zip("c"))
|
-B905.py:3:1: B905 `zip()` without an explicit `strict=` parameter
- |
-3 | zip()
-4 | zip(range(3))
-5 | zip("a", "b")
- | ^^^^^^^^^^^^^ B905
-6 | zip("a", "b", *zip("c"))
-7 | zip(zip("a"), strict=False)
- |
+B905.py:6:1: B905 `zip()` without an explicit `strict=` parameter
+ |
+ 6 | zip()
+ 7 | zip(range(3))
+ 8 | zip("a", "b")
+ | ^^^^^^^^^^^^^ B905
+ 9 | zip("a", "b", *zip("c"))
+10 | zip(zip("a"), strict=False)
+ |
-B905.py:4:1: B905 `zip()` without an explicit `strict=` parameter
- |
-4 | zip(range(3))
-5 | zip("a", "b")
-6 | zip("a", "b", *zip("c"))
- | ^^^^^^^^^^^^^^^^^^^^^^^^ B905
-7 | zip(zip("a"), strict=False)
-8 | zip(zip("a", strict=True))
- |
+B905.py:7:1: B905 `zip()` without an explicit `strict=` parameter
+ |
+ 7 | zip(range(3))
+ 8 | zip("a", "b")
+ 9 | zip("a", "b", *zip("c"))
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ B905
+10 | zip(zip("a"), strict=False)
+11 | zip(zip("a", strict=True))
+ |
-B905.py:4:16: B905 `zip()` without an explicit `strict=` parameter
- |
-4 | zip(range(3))
-5 | zip("a", "b")
-6 | zip("a", "b", *zip("c"))
- | ^^^^^^^^ B905
-7 | zip(zip("a"), strict=False)
-8 | zip(zip("a", strict=True))
- |
+B905.py:7:16: B905 `zip()` without an explicit `strict=` parameter
+ |
+ 7 | zip(range(3))
+ 8 | zip("a", "b")
+ 9 | zip("a", "b", *zip("c"))
+ | ^^^^^^^^ B905
+10 | zip(zip("a"), strict=False)
+11 | zip(zip("a", strict=True))
+ |
-B905.py:5:5: B905 `zip()` without an explicit `strict=` parameter
- |
-5 | zip("a", "b")
-6 | zip("a", "b", *zip("c"))
-7 | zip(zip("a"), strict=False)
- | ^^^^^^^^ B905
-8 | zip(zip("a", strict=True))
- |
+B905.py:8:5: B905 `zip()` without an explicit `strict=` parameter
+ |
+ 8 | zip("a", "b")
+ 9 | zip("a", "b", *zip("c"))
+10 | zip(zip("a"), strict=False)
+ | ^^^^^^^^ B905
+11 | zip(zip("a", strict=True))
+ |
-B905.py:6:1: B905 `zip()` without an explicit `strict=` parameter
+B905.py:9:1: B905 `zip()` without an explicit `strict=` parameter
|
- 6 | zip("a", "b", *zip("c"))
- 7 | zip(zip("a"), strict=False)
- 8 | zip(zip("a", strict=True))
+ 9 | zip("a", "b", *zip("c"))
+10 | zip(zip("a"), strict=False)
+11 | zip(zip("a", strict=True))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ B905
- 9 |
-10 | zip(range(3), strict=True)
+12 |
+13 | # OK
+ |
+
+B905.py:24:1: B905 `zip()` without an explicit `strict=` parameter
+ |
+24 | # Errors (limited iterators).
+25 | zip([1, 2, 3], repeat(1, 1))
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B905
+26 | zip([1, 2, 3], repeat(1, times=4))
+ |
+
+B905.py:25:1: B905 `zip()` without an explicit `strict=` parameter
+ |
+25 | # Errors (limited iterators).
+26 | zip([1, 2, 3], repeat(1, 1))
+27 | zip([1, 2, 3], repeat(1, times=4))
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B905
|
diff --git a/crates/ruff/src/rules/flake8_builtins/helpers.rs b/crates/ruff/src/rules/flake8_builtins/helpers.rs
new file mode 100644
index 0000000000000..1f1eb0f3baffd
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_builtins/helpers.rs
@@ -0,0 +1,45 @@
+use rustpython_parser::ast::{Excepthandler, Expr, Ranged, Stmt};
+
+use ruff_python_ast::helpers::identifier_range;
+use ruff_python_ast::source_code::Locator;
+use ruff_python_stdlib::builtins::BUILTINS;
+use ruff_text_size::TextRange;
+
+pub(super) fn shadows_builtin(name: &str, ignorelist: &[String]) -> bool {
+ BUILTINS.contains(&name) && ignorelist.iter().all(|ignore| ignore != name)
+}
+
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub(crate) enum AnyShadowing<'a> {
+ Expression(&'a Expr),
+ Statement(&'a Stmt),
+ ExceptHandler(&'a Excepthandler),
+}
+
+impl AnyShadowing<'_> {
+ pub(crate) fn range(self, locator: &Locator) -> TextRange {
+ match self {
+ AnyShadowing::Expression(expr) => expr.range(),
+ AnyShadowing::Statement(stmt) => identifier_range(stmt, locator),
+ AnyShadowing::ExceptHandler(handler) => handler.range(),
+ }
+ }
+}
+
+impl<'a> From<&'a Stmt> for AnyShadowing<'a> {
+ fn from(value: &'a Stmt) -> Self {
+ AnyShadowing::Statement(value)
+ }
+}
+
+impl<'a> From<&'a Expr> for AnyShadowing<'a> {
+ fn from(value: &'a Expr) -> Self {
+ AnyShadowing::Expression(value)
+ }
+}
+
+impl<'a> From<&'a Excepthandler> for AnyShadowing<'a> {
+ fn from(value: &'a Excepthandler) -> Self {
+ AnyShadowing::ExceptHandler(value)
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_builtins/mod.rs b/crates/ruff/src/rules/flake8_builtins/mod.rs
index c05849db95473..9321db78d7654 100644
--- a/crates/ruff/src/rules/flake8_builtins/mod.rs
+++ b/crates/ruff/src/rules/flake8_builtins/mod.rs
@@ -1,24 +1,23 @@
//! Rules from [flake8-builtins](https://pypi.org/project/flake8-builtins/).
+pub(crate) mod helpers;
pub(crate) mod rules;
pub mod settings;
-pub(crate) mod types;
#[cfg(test)]
mod tests {
use std::path::Path;
- use crate::assert_messages;
use anyhow::Result;
-
use test_case::test_case;
+ use crate::assert_messages;
use crate::registry::Rule;
use crate::settings::Settings;
use crate::test::test_path;
- #[test_case(Rule::BuiltinVariableShadowing, Path::new("A001.py"); "A001")]
- #[test_case(Rule::BuiltinArgumentShadowing, Path::new("A002.py"); "A002")]
- #[test_case(Rule::BuiltinAttributeShadowing, Path::new("A003.py"); "A003")]
+ #[test_case(Rule::BuiltinVariableShadowing, Path::new("A001.py"))]
+ #[test_case(Rule::BuiltinArgumentShadowing, Path::new("A002.py"))]
+ #[test_case(Rule::BuiltinAttributeShadowing, Path::new("A003.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(
@@ -29,9 +28,9 @@ mod tests {
Ok(())
}
- #[test_case(Rule::BuiltinVariableShadowing, Path::new("A001.py"); "A001")]
- #[test_case(Rule::BuiltinArgumentShadowing, Path::new("A002.py"); "A002")]
- #[test_case(Rule::BuiltinAttributeShadowing, Path::new("A003.py"); "A003")]
+ #[test_case(Rule::BuiltinVariableShadowing, Path::new("A001.py"))]
+ #[test_case(Rule::BuiltinArgumentShadowing, Path::new("A002.py"))]
+ #[test_case(Rule::BuiltinAttributeShadowing, Path::new("A003.py"))]
fn builtins_ignorelist(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"{}_{}_builtins_ignorelist",
diff --git a/crates/ruff/src/rules/flake8_builtins/rules.rs b/crates/ruff/src/rules/flake8_builtins/rules.rs
deleted file mode 100644
index 9c87506e808d5..0000000000000
--- a/crates/ruff/src/rules/flake8_builtins/rules.rs
+++ /dev/null
@@ -1,198 +0,0 @@
-use rustpython_parser::ast::Located;
-
-use ruff_diagnostics::Violation;
-use ruff_diagnostics::{Diagnostic, DiagnosticKind};
-use ruff_macros::{derive_message_formats, violation};
-use ruff_python_stdlib::builtins::BUILTINS;
-
-use super::types::ShadowingType;
-
-/// ## What it does
-/// Checks for variable (and function) assignments that use the same name
-/// as a builtin.
-///
-/// ## Why is this bad?
-/// Reusing a builtin name for the name of a variable increases the
-/// difficulty of reading and maintaining the code, and can cause
-/// non-obvious errors, as readers may mistake the variable for the
-/// builtin and vice versa.
-///
-/// Builtins can be marked as exceptions to this rule via the
-/// [`flake8-builtins.builtins-ignorelist`] configuration option.
-///
-/// ## Options
-///
-/// - `flake8-builtins.builtins-ignorelist`
-///
-/// ## Example
-/// ```python
-/// def find_max(list_of_lists):
-/// max = 0
-/// for flat_list in list_of_lists:
-/// for value in flat_list:
-/// max = max(max, value) # TypeError: 'int' object is not callable
-/// return max
-/// ```
-///
-/// Use instead:
-/// ```python
-/// def find_max(list_of_lists):
-/// result = 0
-/// for flat_list in list_of_lists:
-/// for value in flat_list:
-/// result = max(result, value)
-/// return result
-/// ```
-///
-/// - [_Why is it a bad idea to name a variable `id` in Python?_](https://stackoverflow.com/questions/77552/id-is-a-bad-variable-name-in-python)
-#[violation]
-pub struct BuiltinVariableShadowing {
- pub name: String,
-}
-
-impl Violation for BuiltinVariableShadowing {
- #[derive_message_formats]
- fn message(&self) -> String {
- let BuiltinVariableShadowing { name } = self;
- format!("Variable `{name}` is shadowing a python builtin")
- }
-}
-
-/// ## What it does
-/// Checks for any function arguments that use the same name as a builtin.
-///
-/// ## Why is this bad?
-/// Reusing a builtin name for the name of an argument increases the
-/// difficulty of reading and maintaining the code, and can cause
-/// non-obvious errors, as readers may mistake the argument for the
-/// builtin and vice versa.
-///
-/// Builtins can be marked as exceptions to this rule via the
-/// [`flake8-builtins.builtins-ignorelist`] configuration option.
-///
-/// ## Options
-///
-/// - `flake8-builtins.builtins-ignorelist`
-///
-/// ## Example
-/// ```python
-/// def remove_duplicates(list, list2):
-/// result = set()
-/// for value in list:
-/// result.add(value)
-/// for value in list2:
-/// result.add(value)
-/// return list(result) # TypeError: 'list' object is not callable
-/// ```
-///
-/// Use instead:
-/// ```python
-/// def remove_duplicates(list1, list2):
-/// result = set()
-/// for value in list1:
-/// result.add(value)
-/// for value in list2:
-/// result.add(value)
-/// return list(result)
-/// ```
-///
-/// ## References
-/// - [_Is it bad practice to use a built-in function name as an attribute or method identifier?_](https://stackoverflow.com/questions/9109333/is-it-bad-practice-to-use-a-built-in-function-name-as-an-attribute-or-method-ide)
-/// - [_Why is it a bad idea to name a variable `id` in Python?_](https://stackoverflow.com/questions/77552/id-is-a-bad-variable-name-in-python)
-#[violation]
-pub struct BuiltinArgumentShadowing {
- pub name: String,
-}
-
-impl Violation for BuiltinArgumentShadowing {
- #[derive_message_formats]
- fn message(&self) -> String {
- let BuiltinArgumentShadowing { name } = self;
- format!("Argument `{name}` is shadowing a python builtin")
- }
-}
-
-/// ## What it does
-/// Checks for any class attributes that use the same name as a builtin.
-///
-/// ## Why is this bad?
-/// Reusing a builtin name for the name of an attribute increases the
-/// difficulty of reading and maintaining the code, and can cause
-/// non-obvious errors, as readers may mistake the attribute for the
-/// builtin and vice versa.
-///
-/// Builtins can be marked as exceptions to this rule via the
-/// [`flake8-builtins.builtins-ignorelist`] configuration option, or
-/// converted to the appropriate dunder method.
-///
-/// ## Options
-///
-/// - `flake8-builtins.builtins-ignorelist`
-///
-/// ## Example
-/// ```python
-/// class Shadow:
-/// def int():
-/// return 0
-/// ```
-///
-/// Use instead:
-/// ```python
-/// class Shadow:
-/// def to_int():
-/// return 0
-/// ```
-///
-/// Or:
-/// ```python
-/// class Shadow:
-/// # Callable as `int(shadow)`
-/// def __int__():
-/// return 0
-/// ```
-///
-/// ## References
-/// - [_Is it bad practice to use a built-in function name as an attribute or method identifier?_](https://stackoverflow.com/questions/9109333/is-it-bad-practice-to-use-a-built-in-function-name-as-an-attribute-or-method-ide)
-/// - [_Why is it a bad idea to name a variable `id` in Python?_](https://stackoverflow.com/questions/77552/id-is-a-bad-variable-name-in-python)
-#[violation]
-pub struct BuiltinAttributeShadowing {
- pub name: String,
-}
-
-impl Violation for BuiltinAttributeShadowing {
- #[derive_message_formats]
- fn message(&self) -> String {
- let BuiltinAttributeShadowing { name } = self;
- format!("Class attribute `{name}` is shadowing a python builtin")
- }
-}
-
-/// Check builtin name shadowing.
-pub fn builtin_shadowing(
- name: &str,
- located: &Located,
- node_type: ShadowingType,
- ignorelist: &[String],
-) -> Option {
- if BUILTINS.contains(&name) && !ignorelist.contains(&name.to_string()) {
- Some(Diagnostic::new::(
- match node_type {
- ShadowingType::Variable => BuiltinVariableShadowing {
- name: name.to_string(),
- }
- .into(),
- ShadowingType::Argument => BuiltinArgumentShadowing {
- name: name.to_string(),
- }
- .into(),
- ShadowingType::Attribute => BuiltinAttributeShadowing {
- name: name.to_string(),
- }
- .into(),
- },
- located.range(),
- ))
- } else {
- None
- }
-}
diff --git a/crates/ruff/src/rules/flake8_builtins/rules/builtin_argument_shadowing.rs b/crates/ruff/src/rules/flake8_builtins/rules/builtin_argument_shadowing.rs
new file mode 100644
index 0000000000000..e6aea40d3d1ec
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_builtins/rules/builtin_argument_shadowing.rs
@@ -0,0 +1,78 @@
+use rustpython_parser::ast::{Arg, Ranged};
+
+use ruff_diagnostics::Diagnostic;
+use ruff_diagnostics::Violation;
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+
+use super::super::helpers::shadows_builtin;
+
+/// ## What it does
+/// Checks for any function arguments that use the same name as a builtin.
+///
+/// ## Why is this bad?
+/// Reusing a builtin name for the name of an argument increases the
+/// difficulty of reading and maintaining the code, and can cause
+/// non-obvious errors, as readers may mistake the argument for the
+/// builtin and vice versa.
+///
+/// Builtins can be marked as exceptions to this rule via the
+/// [`flake8-builtins.builtins-ignorelist`] configuration option.
+///
+/// ## Options
+///
+/// - `flake8-builtins.builtins-ignorelist`
+///
+/// ## Example
+/// ```python
+/// def remove_duplicates(list, list2):
+/// result = set()
+/// for value in list:
+/// result.add(value)
+/// for value in list2:
+/// result.add(value)
+/// return list(result) # TypeError: 'list' object is not callable
+/// ```
+///
+/// Use instead:
+/// ```python
+/// def remove_duplicates(list1, list2):
+/// result = set()
+/// for value in list1:
+/// result.add(value)
+/// for value in list2:
+/// result.add(value)
+/// return list(result)
+/// ```
+///
+/// ## References
+/// - [_Is it bad practice to use a built-in function name as an attribute or method identifier?_](https://stackoverflow.com/questions/9109333/is-it-bad-practice-to-use-a-built-in-function-name-as-an-attribute-or-method-ide)
+/// - [_Why is it a bad idea to name a variable `id` in Python?_](https://stackoverflow.com/questions/77552/id-is-a-bad-variable-name-in-python)
+#[violation]
+pub struct BuiltinArgumentShadowing {
+ name: String,
+}
+
+impl Violation for BuiltinArgumentShadowing {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ let BuiltinArgumentShadowing { name } = self;
+ format!("Argument `{name}` is shadowing a Python builtin")
+ }
+}
+
+/// A002
+pub(crate) fn builtin_argument_shadowing(checker: &mut Checker, argument: &Arg) {
+ if shadows_builtin(
+ argument.arg.as_str(),
+ &checker.settings.flake8_builtins.builtins_ignorelist,
+ ) {
+ checker.diagnostics.push(Diagnostic::new(
+ BuiltinArgumentShadowing {
+ name: argument.arg.to_string(),
+ },
+ argument.range(),
+ ));
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_builtins/rules/builtin_attribute_shadowing.rs b/crates/ruff/src/rules/flake8_builtins/rules/builtin_attribute_shadowing.rs
new file mode 100644
index 0000000000000..701fa4edd3c93
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_builtins/rules/builtin_attribute_shadowing.rs
@@ -0,0 +1,78 @@
+use ruff_diagnostics::Diagnostic;
+use ruff_diagnostics::Violation;
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+
+use super::super::helpers::{shadows_builtin, AnyShadowing};
+
+/// ## What it does
+/// Checks for any class attributes that use the same name as a builtin.
+///
+/// ## Why is this bad?
+/// Reusing a builtin name for the name of an attribute increases the
+/// difficulty of reading and maintaining the code, and can cause
+/// non-obvious errors, as readers may mistake the attribute for the
+/// builtin and vice versa.
+///
+/// Builtins can be marked as exceptions to this rule via the
+/// [`flake8-builtins.builtins-ignorelist`] configuration option, or
+/// converted to the appropriate dunder method.
+///
+/// ## Options
+///
+/// - `flake8-builtins.builtins-ignorelist`
+///
+/// ## Example
+/// ```python
+/// class Shadow:
+/// def int():
+/// return 0
+/// ```
+///
+/// Use instead:
+/// ```python
+/// class Shadow:
+/// def to_int():
+/// return 0
+/// ```
+///
+/// Or:
+/// ```python
+/// class Shadow:
+/// # Callable as `int(shadow)`
+/// def __int__():
+/// return 0
+/// ```
+///
+/// ## References
+/// - [_Is it bad practice to use a built-in function name as an attribute or method identifier?_](https://stackoverflow.com/questions/9109333/is-it-bad-practice-to-use-a-built-in-function-name-as-an-attribute-or-method-ide)
+/// - [_Why is it a bad idea to name a variable `id` in Python?_](https://stackoverflow.com/questions/77552/id-is-a-bad-variable-name-in-python)
+#[violation]
+pub struct BuiltinAttributeShadowing {
+ name: String,
+}
+
+impl Violation for BuiltinAttributeShadowing {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ let BuiltinAttributeShadowing { name } = self;
+ format!("Class attribute `{name}` is shadowing a Python builtin")
+ }
+}
+
+/// A003
+pub(crate) fn builtin_attribute_shadowing(
+ checker: &mut Checker,
+ name: &str,
+ shadowing: AnyShadowing,
+) {
+ if shadows_builtin(name, &checker.settings.flake8_builtins.builtins_ignorelist) {
+ checker.diagnostics.push(Diagnostic::new(
+ BuiltinAttributeShadowing {
+ name: name.to_string(),
+ },
+ shadowing.range(checker.locator),
+ ));
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_builtins/rules/builtin_variable_shadowing.rs b/crates/ruff/src/rules/flake8_builtins/rules/builtin_variable_shadowing.rs
new file mode 100644
index 0000000000000..a965af53cafb1
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_builtins/rules/builtin_variable_shadowing.rs
@@ -0,0 +1,74 @@
+use ruff_diagnostics::Diagnostic;
+use ruff_diagnostics::Violation;
+use ruff_macros::{derive_message_formats, violation};
+
+use crate::checkers::ast::Checker;
+
+use super::super::helpers::{shadows_builtin, AnyShadowing};
+
+/// ## What it does
+/// Checks for variable (and function) assignments that use the same name
+/// as a builtin.
+///
+/// ## Why is this bad?
+/// Reusing a builtin name for the name of a variable increases the
+/// difficulty of reading and maintaining the code, and can cause
+/// non-obvious errors, as readers may mistake the variable for the
+/// builtin and vice versa.
+///
+/// Builtins can be marked as exceptions to this rule via the
+/// [`flake8-builtins.builtins-ignorelist`] configuration option.
+///
+/// ## Options
+///
+/// - `flake8-builtins.builtins-ignorelist`
+///
+/// ## Example
+/// ```python
+/// def find_max(list_of_lists):
+/// max = 0
+/// for flat_list in list_of_lists:
+/// for value in flat_list:
+/// max = max(max, value) # TypeError: 'int' object is not callable
+/// return max
+/// ```
+///
+/// Use instead:
+/// ```python
+/// def find_max(list_of_lists):
+/// result = 0
+/// for flat_list in list_of_lists:
+/// for value in flat_list:
+/// result = max(result, value)
+/// return result
+/// ```
+///
+/// - [_Why is it a bad idea to name a variable `id` in Python?_](https://stackoverflow.com/questions/77552/id-is-a-bad-variable-name-in-python)
+#[violation]
+pub struct BuiltinVariableShadowing {
+ name: String,
+}
+
+impl Violation for BuiltinVariableShadowing {
+ #[derive_message_formats]
+ fn message(&self) -> String {
+ let BuiltinVariableShadowing { name } = self;
+ format!("Variable `{name}` is shadowing a Python builtin")
+ }
+}
+
+/// A001
+pub(crate) fn builtin_variable_shadowing(
+ checker: &mut Checker,
+ name: &str,
+ shadowing: AnyShadowing,
+) {
+ if shadows_builtin(name, &checker.settings.flake8_builtins.builtins_ignorelist) {
+ checker.diagnostics.push(Diagnostic::new(
+ BuiltinVariableShadowing {
+ name: name.to_string(),
+ },
+ shadowing.range(checker.locator),
+ ));
+ }
+}
diff --git a/crates/ruff/src/rules/flake8_builtins/rules/mod.rs b/crates/ruff/src/rules/flake8_builtins/rules/mod.rs
new file mode 100644
index 0000000000000..f9b8c3c3d7d48
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_builtins/rules/mod.rs
@@ -0,0 +1,9 @@
+pub(crate) use builtin_argument_shadowing::{builtin_argument_shadowing, BuiltinArgumentShadowing};
+pub(crate) use builtin_attribute_shadowing::{
+ builtin_attribute_shadowing, BuiltinAttributeShadowing,
+};
+pub(crate) use builtin_variable_shadowing::{builtin_variable_shadowing, BuiltinVariableShadowing};
+
+mod builtin_argument_shadowing;
+mod builtin_attribute_shadowing;
+mod builtin_variable_shadowing;
diff --git a/crates/ruff/src/rules/flake8_builtins/rules/rules.rs b/crates/ruff/src/rules/flake8_builtins/rules/rules.rs
new file mode 100644
index 0000000000000..8b137891791fe
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_builtins/rules/rules.rs
@@ -0,0 +1 @@
+
diff --git a/crates/ruff/src/rules/flake8_builtins/settings.rs b/crates/ruff/src/rules/flake8_builtins/settings.rs
index f5d53cbdc8122..2a512626b9d90 100644
--- a/crates/ruff/src/rules/flake8_builtins/settings.rs
+++ b/crates/ruff/src/rules/flake8_builtins/settings.rs
@@ -1,18 +1,18 @@
//! Settings for the `flake8-builtins` plugin.
-use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use ruff_macros::{CacheKey, ConfigurationOptions};
+use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
- Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, JsonSchema,
+ Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8BuiltinsOptions"
)]
+#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"[]"#,
diff --git a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A001_A001.py.snap b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A001_A001.py.snap
index 41016ae017733..d65e5fa47670a 100644
--- a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A001_A001.py.snap
+++ b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A001_A001.py.snap
@@ -1,7 +1,7 @@
---
source: crates/ruff/src/rules/flake8_builtins/mod.rs
---
-A001.py:1:1: A001 Variable `sum` is shadowing a python builtin
+A001.py:1:1: A001 Variable `sum` is shadowing a Python builtin
|
1 | import some as sum
| ^^^^^^^^^^^^^^^^^^ A001
@@ -9,7 +9,7 @@ A001.py:1:1: A001 Variable `sum` is shadowing a python builtin
3 | from directory import new as dir
|
-A001.py:2:1: A001 Variable `int` is shadowing a python builtin
+A001.py:2:1: A001 Variable `int` is shadowing a Python builtin
|
2 | import some as sum
3 | from some import other as int
@@ -17,7 +17,7 @@ A001.py:2:1: A001 Variable `int` is shadowing a python builtin
4 | from directory import new as dir
|
-A001.py:3:1: A001 Variable `dir` is shadowing a python builtin
+A001.py:3:1: A001 Variable `dir` is shadowing a Python builtin
|
3 | import some as sum
4 | from some import other as int
@@ -27,7 +27,7 @@ A001.py:3:1: A001 Variable `dir` is shadowing a python builtin
7 | print = 1
|
-A001.py:5:1: A001 Variable `print` is shadowing a python builtin
+A001.py:5:1: A001 Variable `print` is shadowing a Python builtin
|
5 | from directory import new as dir
6 |
@@ -37,7 +37,7 @@ A001.py:5:1: A001 Variable `print` is shadowing a python builtin
9 | (complex := 3)
|
-A001.py:6:1: A001 Variable `copyright` is shadowing a python builtin
+A001.py:6:1: A001 Variable `copyright` is shadowing a Python builtin
|
6 | print = 1
7 | copyright: 'annotation' = 2
@@ -46,7 +46,7 @@ A001.py:6:1: A001 Variable `copyright` is shadowing a python builtin
9 | float = object = 4
|
-A001.py:7:2: A001 Variable `complex` is shadowing a python builtin
+A001.py:7:2: A001 Variable `complex` is shadowing a Python builtin
|
7 | print = 1
8 | copyright: 'annotation' = 2
@@ -56,7 +56,7 @@ A001.py:7:2: A001 Variable `complex` is shadowing a python builtin
11 | min, max = 5, 6
|
-A001.py:8:1: A001 Variable `float` is shadowing a python builtin
+A001.py:8:1: A001 Variable `float` is shadowing a Python builtin
|
8 | copyright: 'annotation' = 2
9 | (complex := 3)
@@ -65,7 +65,7 @@ A001.py:8:1: A001 Variable `float` is shadowing a python builtin
11 | min, max = 5, 6
|
-A001.py:8:9: A001 Variable `object` is shadowing a python builtin
+A001.py:8:9: A001 Variable `object` is shadowing a Python builtin
|
8 | copyright: 'annotation' = 2
9 | (complex := 3)
@@ -74,7 +74,7 @@ A001.py:8:9: A001 Variable `object` is shadowing a python builtin
11 | min, max = 5, 6
|
-A001.py:9:1: A001 Variable `min` is shadowing a python builtin
+A001.py:9:1: A001 Variable `min` is shadowing a Python builtin
|
9 | (complex := 3)
10 | float = object = 4
@@ -84,7 +84,7 @@ A001.py:9:1: A001 Variable `min` is shadowing a python builtin
13 | id = 4
|
-A001.py:9:6: A001 Variable `max` is shadowing a python builtin
+A001.py:9:6: A001 Variable `max` is shadowing a Python builtin
|
9 | (complex := 3)
10 | float = object = 4
@@ -94,7 +94,7 @@ A001.py:9:6: A001 Variable `max` is shadowing a python builtin
13 | id = 4
|
-A001.py:11:1: A001 Variable `id` is shadowing a python builtin
+A001.py:11:1: A001 Variable `id` is shadowing a Python builtin
|
11 | min, max = 5, 6
12 |
@@ -104,29 +104,25 @@ A001.py:11:1: A001 Variable `id` is shadowing a python builtin
15 | def bytes():
|
-A001.py:13:1: A001 Variable `bytes` is shadowing a python builtin
+A001.py:13:5: A001 Variable `bytes` is shadowing a Python builtin
|
-13 | id = 4
-14 |
-15 | / def bytes():
-16 | | pass
- | |________^ A001
-17 |
-18 | class slice:
+13 | id = 4
+14 |
+15 | def bytes():
+ | ^^^^^ A001
+16 | pass
|
-A001.py:16:1: A001 Variable `slice` is shadowing a python builtin
+A001.py:16:7: A001 Variable `slice` is shadowing a Python builtin
|
-16 | pass
-17 |
-18 | / class slice:
-19 | | pass
- | |________^ A001
-20 |
-21 | try:
+16 | pass
+17 |
+18 | class slice:
+ | ^^^^^ A001
+19 | pass
|
-A001.py:21:1: A001 Variable `ValueError` is shadowing a python builtin
+A001.py:21:1: A001 Variable `ValueError` is shadowing a Python builtin
|
21 | try:
22 | ...
@@ -137,7 +133,7 @@ A001.py:21:1: A001 Variable `ValueError` is shadowing a python builtin
26 | for memoryview, *bytearray in []:
|
-A001.py:24:5: A001 Variable `memoryview` is shadowing a python builtin
+A001.py:24:5: A001 Variable `memoryview` is shadowing a Python builtin
|
24 | ...
25 |
@@ -146,7 +142,7 @@ A001.py:24:5: A001 Variable `memoryview` is shadowing a python builtin
27 | pass
|
-A001.py:24:18: A001 Variable `bytearray` is shadowing a python builtin
+A001.py:24:18: A001 Variable `bytearray` is shadowing a Python builtin
|
24 | ...
25 |
@@ -155,7 +151,7 @@ A001.py:24:18: A001 Variable `bytearray` is shadowing a python builtin
27 | pass
|
-A001.py:27:22: A001 Variable `str` is shadowing a python builtin
+A001.py:27:22: A001 Variable `str` is shadowing a Python builtin
|
27 | pass
28 |
@@ -164,7 +160,7 @@ A001.py:27:22: A001 Variable `str` is shadowing a python builtin
30 | pass
|
-A001.py:27:45: A001 Variable `all` is shadowing a python builtin
+A001.py:27:45: A001 Variable `all` is shadowing a Python builtin
|
27 | pass
28 |
@@ -173,7 +169,7 @@ A001.py:27:45: A001 Variable `all` is shadowing a python builtin
30 | pass
|
-A001.py:27:50: A001 Variable `any` is shadowing a python builtin
+A001.py:27:50: A001 Variable `any` is shadowing a Python builtin
|
27 | pass
28 |
@@ -182,7 +178,7 @@ A001.py:27:50: A001 Variable `any` is shadowing a python builtin
30 | pass
|
-A001.py:30:8: A001 Variable `sum` is shadowing a python builtin
+A001.py:30:8: A001 Variable `sum` is shadowing a Python builtin
|
30 | pass
31 |
diff --git a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A001_A001.py_builtins_ignorelist.snap b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A001_A001.py_builtins_ignorelist.snap
index ad582d411fb52..a709ce3c92a35 100644
--- a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A001_A001.py_builtins_ignorelist.snap
+++ b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A001_A001.py_builtins_ignorelist.snap
@@ -1,7 +1,7 @@
---
source: crates/ruff/src/rules/flake8_builtins/mod.rs
---
-A001.py:1:1: A001 Variable `sum` is shadowing a python builtin
+A001.py:1:1: A001 Variable `sum` is shadowing a Python builtin
|
1 | import some as sum
| ^^^^^^^^^^^^^^^^^^ A001
@@ -9,7 +9,7 @@ A001.py:1:1: A001 Variable `sum` is shadowing a python builtin
3 | from directory import new as dir
|
-A001.py:2:1: A001 Variable `int` is shadowing a python builtin
+A001.py:2:1: A001 Variable `int` is shadowing a Python builtin
|
2 | import some as sum
3 | from some import other as int
@@ -17,7 +17,7 @@ A001.py:2:1: A001 Variable `int` is shadowing a python builtin
4 | from directory import new as dir
|
-A001.py:5:1: A001 Variable `print` is shadowing a python builtin
+A001.py:5:1: A001 Variable `print` is shadowing a Python builtin
|
5 | from directory import new as dir
6 |
@@ -27,7 +27,7 @@ A001.py:5:1: A001 Variable `print` is shadowing a python builtin
9 | (complex := 3)
|
-A001.py:6:1: A001 Variable `copyright` is shadowing a python builtin
+A001.py:6:1: A001 Variable `copyright` is shadowing a Python builtin
|
6 | print = 1
7 | copyright: 'annotation' = 2
@@ -36,7 +36,7 @@ A001.py:6:1: A001 Variable `copyright` is shadowing a python builtin
9 | float = object = 4
|
-A001.py:7:2: A001 Variable `complex` is shadowing a python builtin
+A001.py:7:2: A001 Variable `complex` is shadowing a Python builtin
|
7 | print = 1
8 | copyright: 'annotation' = 2
@@ -46,7 +46,7 @@ A001.py:7:2: A001 Variable `complex` is shadowing a python builtin
11 | min, max = 5, 6
|
-A001.py:8:1: A001 Variable `float` is shadowing a python builtin
+A001.py:8:1: A001 Variable `float` is shadowing a Python builtin
|
8 | copyright: 'annotation' = 2
9 | (complex := 3)
@@ -55,7 +55,7 @@ A001.py:8:1: A001 Variable `float` is shadowing a python builtin
11 | min, max = 5, 6
|
-A001.py:8:9: A001 Variable `object` is shadowing a python builtin
+A001.py:8:9: A001 Variable `object` is shadowing a Python builtin
|
8 | copyright: 'annotation' = 2
9 | (complex := 3)
@@ -64,7 +64,7 @@ A001.py:8:9: A001 Variable `object` is shadowing a python builtin
11 | min, max = 5, 6
|
-A001.py:9:1: A001 Variable `min` is shadowing a python builtin
+A001.py:9:1: A001 Variable `min` is shadowing a Python builtin
|
9 | (complex := 3)
10 | float = object = 4
@@ -74,7 +74,7 @@ A001.py:9:1: A001 Variable `min` is shadowing a python builtin
13 | id = 4
|
-A001.py:9:6: A001 Variable `max` is shadowing a python builtin
+A001.py:9:6: A001 Variable `max` is shadowing a Python builtin
|
9 | (complex := 3)
10 | float = object = 4
@@ -84,29 +84,25 @@ A001.py:9:6: A001 Variable `max` is shadowing a python builtin
13 | id = 4
|
-A001.py:13:1: A001 Variable `bytes` is shadowing a python builtin
+A001.py:13:5: A001 Variable `bytes` is shadowing a Python builtin
|
-13 | id = 4
-14 |
-15 | / def bytes():
-16 | | pass
- | |________^ A001
-17 |
-18 | class slice:
+13 | id = 4
+14 |
+15 | def bytes():
+ | ^^^^^ A001
+16 | pass
|
-A001.py:16:1: A001 Variable `slice` is shadowing a python builtin
+A001.py:16:7: A001 Variable `slice` is shadowing a Python builtin
|
-16 | pass
-17 |
-18 | / class slice:
-19 | | pass
- | |________^ A001
-20 |
-21 | try:
+16 | pass
+17 |
+18 | class slice:
+ | ^^^^^ A001
+19 | pass
|
-A001.py:21:1: A001 Variable `ValueError` is shadowing a python builtin
+A001.py:21:1: A001 Variable `ValueError` is shadowing a Python builtin
|
21 | try:
22 | ...
@@ -117,7 +113,7 @@ A001.py:21:1: A001 Variable `ValueError` is shadowing a python builtin
26 | for memoryview, *bytearray in []:
|
-A001.py:24:5: A001 Variable `memoryview` is shadowing a python builtin
+A001.py:24:5: A001 Variable `memoryview` is shadowing a Python builtin
|
24 | ...
25 |
@@ -126,7 +122,7 @@ A001.py:24:5: A001 Variable `memoryview` is shadowing a python builtin
27 | pass
|
-A001.py:24:18: A001 Variable `bytearray` is shadowing a python builtin
+A001.py:24:18: A001 Variable `bytearray` is shadowing a Python builtin
|
24 | ...
25 |
@@ -135,7 +131,7 @@ A001.py:24:18: A001 Variable `bytearray` is shadowing a python builtin
27 | pass
|
-A001.py:27:22: A001 Variable `str` is shadowing a python builtin
+A001.py:27:22: A001 Variable `str` is shadowing a Python builtin
|
27 | pass
28 |
@@ -144,7 +140,7 @@ A001.py:27:22: A001 Variable `str` is shadowing a python builtin
30 | pass
|
-A001.py:27:45: A001 Variable `all` is shadowing a python builtin
+A001.py:27:45: A001 Variable `all` is shadowing a Python builtin
|
27 | pass
28 |
@@ -153,7 +149,7 @@ A001.py:27:45: A001 Variable `all` is shadowing a python builtin
30 | pass
|
-A001.py:27:50: A001 Variable `any` is shadowing a python builtin
+A001.py:27:50: A001 Variable `any` is shadowing a Python builtin
|
27 | pass
28 |
@@ -162,7 +158,7 @@ A001.py:27:50: A001 Variable `any` is shadowing a python builtin
30 | pass
|
-A001.py:30:8: A001 Variable `sum` is shadowing a python builtin
+A001.py:30:8: A001 Variable `sum` is shadowing a Python builtin
|
30 | pass
31 |
diff --git a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A002_A002.py.snap b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A002_A002.py.snap
index b89b75c589128..5d8e3f3883c2f 100644
--- a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A002_A002.py.snap
+++ b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A002_A002.py.snap
@@ -1,49 +1,49 @@
---
source: crates/ruff/src/rules/flake8_builtins/mod.rs
---
-A002.py:1:11: A002 Argument `str` is shadowing a python builtin
+A002.py:1:11: A002 Argument `str` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^ A002
2 | pass
|
-A002.py:1:19: A002 Argument `type` is shadowing a python builtin
+A002.py:1:19: A002 Argument `type` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^^ A002
2 | pass
|
-A002.py:1:26: A002 Argument `complex` is shadowing a python builtin
+A002.py:1:26: A002 Argument `complex` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^^^^^ A002
2 | pass
|
-A002.py:1:35: A002 Argument `Exception` is shadowing a python builtin
+A002.py:1:35: A002 Argument `Exception` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^^^^^^^ A002
2 | pass
|
-A002.py:1:48: A002 Argument `getattr` is shadowing a python builtin
+A002.py:1:48: A002 Argument `getattr` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^^^^^ A002
2 | pass
|
-A002.py:5:17: A002 Argument `bytes` is shadowing a python builtin
+A002.py:5:17: A002 Argument `bytes` is shadowing a Python builtin
|
5 | async def func2(bytes):
| ^^^^^ A002
6 | pass
|
-A002.py:8:17: A002 Argument `id` is shadowing a python builtin
+A002.py:8:17: A002 Argument `id` is shadowing a Python builtin
|
8 | pass
9 |
@@ -52,7 +52,7 @@ A002.py:8:17: A002 Argument `id` is shadowing a python builtin
11 | pass
|
-A002.py:8:21: A002 Argument `dir` is shadowing a python builtin
+A002.py:8:21: A002 Argument `dir` is shadowing a Python builtin
|
8 | pass
9 |
@@ -61,7 +61,7 @@ A002.py:8:21: A002 Argument `dir` is shadowing a python builtin
11 | pass
|
-A002.py:11:16: A002 Argument `float` is shadowing a python builtin
+A002.py:11:16: A002 Argument `float` is shadowing a Python builtin
|
11 | pass
12 |
diff --git a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A002_A002.py_builtins_ignorelist.snap b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A002_A002.py_builtins_ignorelist.snap
index 6b7bfcd61216f..7522c0f937e5d 100644
--- a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A002_A002.py_builtins_ignorelist.snap
+++ b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A002_A002.py_builtins_ignorelist.snap
@@ -1,49 +1,49 @@
---
source: crates/ruff/src/rules/flake8_builtins/mod.rs
---
-A002.py:1:11: A002 Argument `str` is shadowing a python builtin
+A002.py:1:11: A002 Argument `str` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^ A002
2 | pass
|
-A002.py:1:19: A002 Argument `type` is shadowing a python builtin
+A002.py:1:19: A002 Argument `type` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^^ A002
2 | pass
|
-A002.py:1:26: A002 Argument `complex` is shadowing a python builtin
+A002.py:1:26: A002 Argument `complex` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^^^^^ A002
2 | pass
|
-A002.py:1:35: A002 Argument `Exception` is shadowing a python builtin
+A002.py:1:35: A002 Argument `Exception` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^^^^^^^ A002
2 | pass
|
-A002.py:1:48: A002 Argument `getattr` is shadowing a python builtin
+A002.py:1:48: A002 Argument `getattr` is shadowing a Python builtin
|
1 | def func1(str, /, type, *complex, Exception, **getattr):
| ^^^^^^^ A002
2 | pass
|
-A002.py:5:17: A002 Argument `bytes` is shadowing a python builtin
+A002.py:5:17: A002 Argument `bytes` is shadowing a Python builtin
|
5 | async def func2(bytes):
| ^^^^^ A002
6 | pass
|
-A002.py:11:16: A002 Argument `float` is shadowing a python builtin
+A002.py:11:16: A002 Argument `float` is shadowing a Python builtin
|
11 | pass
12 |
diff --git a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A003_A003.py.snap b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A003_A003.py.snap
index e5a2304dc03bb..1513b901fd94f 100644
--- a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A003_A003.py.snap
+++ b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A003_A003.py.snap
@@ -1,7 +1,7 @@
---
source: crates/ruff/src/rules/flake8_builtins/mod.rs
---
-A003.py:2:5: A003 Class attribute `ImportError` is shadowing a python builtin
+A003.py:2:5: A003 Class attribute `ImportError` is shadowing a Python builtin
|
2 | class MyClass:
3 | ImportError = 4
@@ -10,7 +10,7 @@ A003.py:2:5: A003 Class attribute `ImportError` is shadowing a python builtin
5 | dir = "/"
|
-A003.py:3:5: A003 Class attribute `id` is shadowing a python builtin
+A003.py:3:5: A003 Class attribute `id` is shadowing a Python builtin
|
3 | class MyClass:
4 | ImportError = 4
@@ -19,7 +19,7 @@ A003.py:3:5: A003 Class attribute `id` is shadowing a python builtin
6 | dir = "/"
|
-A003.py:4:5: A003 Class attribute `dir` is shadowing a python builtin
+A003.py:4:5: A003 Class attribute `dir` is shadowing a Python builtin
|
4 | ImportError = 4
5 | id = 5
@@ -29,14 +29,13 @@ A003.py:4:5: A003 Class attribute `dir` is shadowing a python builtin
8 | def __init__(self):
|
-A003.py:11:5: A003 Class attribute `str` is shadowing a python builtin
+A003.py:11:9: A003 Class attribute `str` is shadowing a Python builtin
|
-11 | self.dir = "."
-12 |
-13 | def str(self):
- | _____^
-14 | | pass
- | |____________^ A003
+11 | self.dir = "."
+12 |
+13 | def str(self):
+ | ^^^ A003
+14 | pass
|
diff --git a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A003_A003.py_builtins_ignorelist.snap b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A003_A003.py_builtins_ignorelist.snap
index 3b91a75f929fb..82c08824fcbc8 100644
--- a/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A003_A003.py_builtins_ignorelist.snap
+++ b/crates/ruff/src/rules/flake8_builtins/snapshots/ruff__rules__flake8_builtins__tests__A003_A003.py_builtins_ignorelist.snap
@@ -1,7 +1,7 @@
---
source: crates/ruff/src/rules/flake8_builtins/mod.rs
---
-A003.py:2:5: A003 Class attribute `ImportError` is shadowing a python builtin
+A003.py:2:5: A003 Class attribute `ImportError` is shadowing a Python builtin
|
2 | class MyClass:
3 | ImportError = 4
@@ -10,14 +10,13 @@ A003.py:2:5: A003 Class attribute `ImportError` is shadowing a python builtin
5 | dir = "/"
|
-A003.py:11:5: A003 Class attribute `str` is shadowing a python builtin
+A003.py:11:9: A003 Class attribute `str` is shadowing a Python builtin
|
-11 | self.dir = "."
-12 |
-13 | def str(self):
- | _____^
-14 | | pass
- | |____________^ A003
+11 | self.dir = "."
+12 |
+13 | def str(self):
+ | ^^^ A003
+14 | pass
|
diff --git a/crates/ruff/src/rules/flake8_builtins/types.rs b/crates/ruff/src/rules/flake8_builtins/types.rs
deleted file mode 100644
index c32aa2599394c..0000000000000
--- a/crates/ruff/src/rules/flake8_builtins/types.rs
+++ /dev/null
@@ -1,6 +0,0 @@
-#[derive(Clone, Copy)]
-pub enum ShadowingType {
- Variable,
- Argument,
- Attribute,
-}
diff --git a/crates/ruff/src/rules/flake8_commas/mod.rs b/crates/ruff/src/rules/flake8_commas/mod.rs
index 9736b33f84814..d08f8fc8ee049 100644
--- a/crates/ruff/src/rules/flake8_commas/mod.rs
+++ b/crates/ruff/src/rules/flake8_commas/mod.rs
@@ -6,14 +6,13 @@ mod tests {
use std::path::Path;
use anyhow::Result;
-
use test_case::test_case;
use crate::registry::Rule;
use crate::test::test_path;
use crate::{assert_messages, settings};
- #[test_case(Path::new("COM81.py"); "COM81")]
+ #[test_case(Path::new("COM81.py"))]
fn rules(path: &Path) -> Result<()> {
let snapshot = path.to_string_lossy().into_owned();
let diagnostics = test_path(
diff --git a/crates/ruff/src/rules/flake8_commas/rules/mod.rs b/crates/ruff/src/rules/flake8_commas/rules/mod.rs
new file mode 100644
index 0000000000000..0286278d8c7c9
--- /dev/null
+++ b/crates/ruff/src/rules/flake8_commas/rules/mod.rs
@@ -0,0 +1,5 @@
+pub(crate) use trailing_commas::{
+ trailing_commas, MissingTrailingComma, ProhibitedTrailingComma, TrailingCommaOnBareTuple,
+};
+
+mod trailing_commas;
diff --git a/crates/ruff/src/rules/flake8_commas/rules.rs b/crates/ruff/src/rules/flake8_commas/rules/trailing_commas.rs
similarity index 93%
rename from crates/ruff/src/rules/flake8_commas/rules.rs
rename to crates/ruff/src/rules/flake8_commas/rules/trailing_commas.rs
index 64228455af7fc..eaee333c3ca84 100644
--- a/crates/ruff/src/rules/flake8_commas/rules.rs
+++ b/crates/ruff/src/rules/flake8_commas/rules/trailing_commas.rs
@@ -4,12 +4,12 @@ use rustpython_parser::lexer::{LexResult, Spanned};
use rustpython_parser::Tok;
use ruff_diagnostics::{AlwaysAutofixableViolation, Violation};
-use ruff_diagnostics::{Diagnostic, Edit};
+use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::source_code::Locator;
use crate::registry::Rule;
-use crate::settings::{flags, Settings};
+use crate::settings::Settings;
/// Simplified token type.
#[derive(Copy, Clone, PartialEq, Eq)]
@@ -27,6 +27,7 @@ enum TokenType {
Def,
Lambda,
Colon,
+ String,
}
/// Simplified token specialized for the task.
@@ -55,6 +56,7 @@ impl<'tok> Token<'tok> {
// Import treated like a function.
Tok::Import => TokenType::Named,
Tok::Name { .. } => TokenType::Named,
+ Tok::String { .. } => TokenType::String,
Tok::Comma => TokenType::Comma,
Tok::Lpar => TokenType::OpeningBracket,
Tok::Lsqb => TokenType::OpeningSquareBracket,
@@ -160,9 +162,7 @@ impl AlwaysAutofixableViolation for MissingTrailingComma {
/// import json
///
///
-/// foo = json.dumps({
-/// "bar": 1,
-/// }),
+/// foo = json.dumps({"bar": 1}),
/// ```
///
/// Use instead:
@@ -170,9 +170,7 @@ impl AlwaysAutofixableViolation for MissingTrailingComma {
/// import json
///
///
-/// foo = json.dumps({
-/// "bar": 1,
-/// })
+/// foo = json.dumps({"bar": 1})
/// ```
///
/// In the event that a tuple is intended, then use instead:
@@ -180,11 +178,7 @@ impl AlwaysAutofixableViolation for MissingTrailingComma {
/// import json
///
///
-/// foo = (
-/// json.dumps({
-/// "bar": 1,
-/// }),
-/// )
+/// foo = (json.dumps({"bar": 1}),)
/// ```
#[violation]
pub struct TrailingCommaOnBareTuple;
@@ -227,11 +221,10 @@ impl AlwaysAutofixableViolation for ProhibitedTrailingComma {
}
/// COM812, COM818, COM819
-pub fn trailing_commas(
+pub(crate) fn trailing_commas(
tokens: &[LexResult],
locator: &Locator,
settings: &Settings,
- autofix: flags::Autofix,
) -> Vec {
let mut diagnostics = vec![];
@@ -274,7 +267,7 @@ pub fn trailing_commas(
}
},
TokenType::OpeningSquareBracket => match prev.type_ {
- TokenType::ClosingBracket | TokenType::Named => {
+ TokenType::ClosingBracket | TokenType::Named | TokenType::String => {
stack.push(Context::new(ContextType::Subscript));
}
_ => {
@@ -332,8 +325,9 @@ pub fn trailing_commas(
if comma_prohibited {
let comma = prev.spanned.unwrap();
let mut diagnostic = Diagnostic::new(ProhibitedTrailingComma, comma.1);
- if autofix.into() && settings.rules.should_fix(Rule::ProhibitedTrailingComma) {
- diagnostic.set_fix(Edit::range_deletion(diagnostic.range()));
+ if settings.rules.should_fix(Rule::ProhibitedTrailingComma) {
+ #[allow(deprecated)]
+ diagnostic.set_fix(Fix::unspecified(Edit::range_deletion(diagnostic.range())));
}
diagnostics.push(diagnostic);
}
@@ -367,16 +361,17 @@ pub fn trailing_commas(
MissingTrailingComma,
TextRange::empty(missing_comma.1.end()),
);
- if autofix.into() && settings.rules.should_fix(Rule::MissingTrailingComma) {
+ if settings.rules.should_fix(Rule::MissingTrailingComma) {
// Create a replacement that includes the final bracket (or other token),
// rather than just inserting a comma at the end. This prevents the UP034 autofix
// removing any brackets in the same linter pass - doing both at the same time could
// lead to a syntax error.
let contents = locator.slice(missing_comma.1);
- diagnostic.set_fix(Edit::range_replacement(
+ #[allow(deprecated)]
+ diagnostic.set_fix(Fix::unspecified(Edit::range_replacement(
format!("{contents},"),
missing_comma.1,
- ));
+ )));
}
diagnostics.push(diagnostic);
}
diff --git a/crates/ruff/src/rules/flake8_commas/snapshots/ruff__rules__flake8_commas__tests__COM81.py.snap b/crates/ruff/src/rules/flake8_commas/snapshots/ruff__rules__flake8_commas__tests__COM81.py.snap
index 0089753e67694..e7b889c36d964 100644
--- a/crates/ruff/src/rules/flake8_commas/snapshots/ruff__rules__flake8_commas__tests__COM81.py.snap
+++ b/crates/ruff/src/rules/flake8_commas/snapshots/ruff__rules__flake8_commas__tests__COM81.py.snap
@@ -936,5 +936,7 @@ COM81.py:632:42: COM812 [*] Trailing comma missing
632 |- (i for i in range(10) if i // 2 == 0) # COM812 fix should include the final bracket
632 |+ (i for i in range(10) if i // 2 == 0), # COM812 fix should include the final bracket
633 633 | )
+634 634 |
+635 635 | foo = namedtuple(
diff --git a/crates/ruff/src/rules/flake8_comprehensions/fixes.rs b/crates/ruff/src/rules/flake8_comprehensions/fixes.rs
index 57a0a78ee9ed2..b10156a39ae52 100644
--- a/crates/ruff/src/rules/flake8_comprehensions/fixes.rs
+++ b/crates/ruff/src/rules/flake8_comprehensions/fixes.rs
@@ -1,53 +1,38 @@
use anyhow::{bail, Result};
use itertools::Itertools;
use libcst_native::{
- Arg, AssignEqual, AssignTargetExpression, Call, Codegen, CodegenState, CompFor, Dict, DictComp,
- DictElement, Element, Expr, Expression, GeneratorExp, LeftCurlyBrace, LeftParen,
- LeftSquareBracket, List, ListComp, Name, ParenthesizableWhitespace, RightCurlyBrace,
- RightParen, RightSquareBracket, Set, SetComp, SimpleString, SimpleWhitespace, Tuple,
+ Arg, AssignEqual, AssignTargetExpression, Call, Comment, CompFor, Dict, DictComp, DictElement,
+ Element, EmptyLine, Expression, GeneratorExp, LeftCurlyBrace, LeftParen, LeftSquareBracket,
+ List, ListComp, Name, ParenthesizableWhitespace, ParenthesizedWhitespace, RightCurlyBrace,
+ RightParen, RightSquareBracket, Set, SetComp, SimpleString, SimpleWhitespace,
+ TrailingWhitespace, Tuple,
};
+use rustpython_parser::ast::Ranged;
-use ruff_diagnostics::Edit;
+use crate::autofix::codemods::CodegenStylist;
+use ruff_diagnostics::{Edit, Fix};
use ruff_python_ast::source_code::{Locator, Stylist};
-use crate::cst::matchers::{match_expr, match_module};
-
-fn match_call<'a, 'b>(expr: &'a mut Expr<'b>) -> Result<&'a mut Call<'b>> {
- if let Expression::Call(call) = &mut expr.value {
- Ok(call)
- } else {
- bail!("Expected Expression::Call")
- }
-}
-
-fn match_arg<'a, 'b>(call: &'a Call<'b>) -> Result<&'a Arg<'b>> {
- if let Some(arg) = call.args.first() {
- Ok(arg)
- } else {
- bail!("Expected Arg")
- }
-}
+use crate::cst::matchers::{
+ match_arg, match_call, match_call_mut, match_expression, match_generator_exp, match_lambda,
+ match_list_comp, match_name, match_tuple,
+};
/// (C400) Convert `list(x for x in y)` to `[x for x in y]`.
-pub fn fix_unnecessary_generator_list(
+pub(crate) fn fix_unnecessary_generator_list(
locator: &Locator,
stylist: &Stylist,
expr: &rustpython_parser::ast::Expr,
) -> Result {
// Expr(Call(GeneratorExp)))) -> Expr(ListComp)))
let module_text = locator.slice(expr.range());
- let mut tree = match_module(module_text)?;
- let mut body = match_expr(&mut tree)?;
- let call = match_call(body)?;
+ let mut tree = match_expression(module_text)?;
+ let call = match_call_mut(&mut tree)?;
let arg = match_arg(call)?;
- let Expression::GeneratorExp(generator_exp) = &arg.value else {
- bail!(
- "Expected Expression::GeneratorExp"
- );
- };
+ let generator_exp = match_generator_exp(&arg.value)?;
- body.value = Expression::ListComp(Box::new(ListComp {
+ tree = Expression::ListComp(Box::new(ListComp {
elt: generator_exp.elt.clone(),
for_in: generator_exp.for_in.clone(),
lbracket: LeftSquareBracket {
@@ -60,18 +45,14 @@ pub fn fix_unnecessary_generator_list(
rpar: generator_exp.rpar.clone(),
}));
- let mut state = CodegenState {
- default_newline: &stylist.line_ending(),
- default_indent: stylist.indentation(),
- ..CodegenState::default()
- };
- tree.codegen(&mut state);
-
- Ok(Edit::range_replacement(state.to_string(), expr.range()))
+ Ok(Edit::range_replacement(
+ tree.codegen_stylist(stylist),
+ expr.range(),
+ ))
}
/// (C401) Convert `set(x for x in y)` to `{x for x in y}`.
-pub fn fix_unnecessary_generator_set(
+pub(crate) fn fix_unnecessary_generator_set(
locator: &Locator,
stylist: &Stylist,
expr: &rustpython_parser::ast::Expr,
@@ -79,18 +60,13 @@ pub fn fix_unnecessary_generator_set(
) -> Result {
// Expr(Call(GeneratorExp)))) -> Expr(SetComp)))
let module_text = locator.slice(expr.range());
- let mut tree = match_module(module_text)?;
- let mut body = match_expr(&mut tree)?;
- let call = match_call(body)?;
+ let mut tree = match_expression(module_text)?;
+ let call = match_call_mut(&mut tree)?;
let arg = match_arg(call)?;
- let Expression::GeneratorExp(generator_exp) = &arg.value else {
- bail!(
- "Expected Expression::GeneratorExp"
- );
- };
+ let generator_exp = match_generator_exp(&arg.value)?;
- body.value = Expression::SetComp(Box::new(SetComp {
+ tree = Expression::SetComp(Box::new(SetComp {
elt: generator_exp.elt.clone(),
for_in: generator_exp.for_in.clone(),
lbrace: LeftCurlyBrace {
@@ -103,21 +79,12 @@ pub fn fix_unnecessary_generator_set(
rpar: generator_exp.rpar.clone(),
}));
- let mut state = CodegenState {
- default_newline: &stylist.line_ending(),
- default_indent: stylist.indentation(),
- ..CodegenState::default()
- };
- tree.codegen(&mut state);
-
- let mut content = state.to_string();
+ let mut content = tree.codegen_stylist(stylist);
// If the expression is embedded in an f-string, surround it with spaces to avoid
// syntax errors.
- if let Some(parent_element) = parent {
- if let &rustpython_parser::ast::ExprKind::FormattedValue { .. } = &parent_element.node {
- content = format!(" {content} ");
- }
+ if let Some(rustpython_parser::ast::Expr::FormattedValue(_)) = parent {
+ content = format!(" {content} ");
}
Ok(Edit::range_replacement(content, expr.range()))
@@ -125,39 +92,25 @@ pub fn fix_unnecessary_generator_set(
/// (C402) Convert `dict((x, x) for x in range(3))` to `{x: x for x in
/// range(3)}`.
-pub fn fix_unnecessary_generator_dict(
+pub(crate) fn fix_unnecessary_generator_dict(
locator: &Locator,
stylist: &Stylist,
expr: &rustpython_parser::ast::Expr,
parent: Option<&rustpython_parser::ast::Expr>,
) -> Result {
let module_text = locator.slice(expr.range());
- let mut tree = match_module(module_text)?;
- let mut body = match_expr(&mut tree)?;
- let call = match_call(body)?;
+ let mut tree = match_expression(module_text)?;
+ let call = match_call_mut(&mut tree)?;
let arg = match_arg(call)?;
// Extract the (k, v) from `(k, v) for ...`.
- let Expression::GeneratorExp(generator_exp) = &arg.value else {
- bail!(
- "Expected Expression::GeneratorExp"
- );
- };
- let Expression::Tuple(tuple) = &generator_exp.elt.as_ref() else {
- bail!("Expected Expression::Tuple");
- };
- let Some(Element::Simple { value: key, .. }) = &tuple.elements.get(0) else {
- bail!(
- "Expected tuple to contain a key as the first element"
- );
- };
- let Some(Element::Simple { value, .. }) = &tuple.elements.get(1) else {
- bail!(
- "Expected tuple to contain a key as the second element"
- );
+ let generator_exp = match_generator_exp(&arg.value)?;
+ let tuple = match_tuple(&generator_exp.elt)?;
+ let [Element::Simple { value: key, .. }, Element::Simple { value, .. }] = &tuple.elements[..] else {
+ bail!("Expected tuple to contain two elements");
};
- body.value = Expression::DictComp(Box::new(DictComp {
+ tree = Expression::DictComp(Box::new(DictComp {
key: Box::new(key.clone()),
value: Box::new(value.clone()),
for_in: generator_exp.for_in.clone(),
@@ -173,28 +126,19 @@ pub fn fix_unnecessary_generator_dict(
whitespace_after_colon: ParenthesizableWhitespace::SimpleWhitespace(SimpleWhitespace(" ")),
}));
- let mut state = CodegenState {
- default_newline: &stylist.line_ending(),
- default_indent: stylist.indentation(),
- ..CodegenState::default()
- };
- tree.codegen(&mut state);
-
- let mut content = state.to_string();
+ let mut content = tree.codegen_stylist(stylist);
// If the expression is embedded in an f-string, surround it with spaces to avoid
// syntax errors.
- if let Some(parent_element) = parent {
- if let &rustpython_parser::ast::ExprKind::FormattedValue { .. } = &parent_element.node {
- content = format!(" {content} ");
- }
+ if let Some(rustpython_parser::ast::Expr::FormattedValue(_)) = parent {
+ content = format!(" {content} ");
}
Ok(Edit::range_replacement(content, expr.range()))
}
/// (C403) Convert `set([x for x in y])` to `{x for x in y}`.
-pub fn fix_unnecessary_list_comprehension_set(
+pub(crate) fn fix_unnecessary_list_comprehension_set(
locator: &Locator,
stylist: &Stylist,
expr: &rustpython_parser::ast::Expr,
@@ -202,16 +146,13 @@ pub fn fix_unnecessary_list_comprehension_set(
// Expr(Call(ListComp)))) ->
// Expr(SetComp)))
let module_text = locator.slice(expr.range());
- let mut tree = match_module(module_text)?;
- let mut body = match_expr(&mut tree)?;
- let call = match_call(body)?;
+ let mut tree = match_expression(module_text)?;
+ let call = match_call_mut(&mut tree)?;
let arg = match_arg(call)?;
- let Expression::ListComp(list_comp) = &arg.value else {
- bail!("Expected Expression::ListComp");
- };
+ let list_comp = match_list_comp(&arg.value)?;
- body.value = Expression::SetComp(Box::new(SetComp {
+ tree = Expression::SetComp(Box::new(SetComp {
elt: list_comp.elt.clone(),
for_in: list_comp.for_in.clone(),
lbrace: LeftCurlyBrace {
@@ -224,43 +165,34 @@ pub fn fix_unnecessary_list_comprehension_set(
rpar: list_comp.rpar.clone(),
}));
- let mut state = CodegenState {
- default_newline: &stylist.line_ending(),
- default_indent: stylist.indentation(),
- ..CodegenState::default()
- };
- tree.codegen(&mut state);
-
- Ok(Edit::range_replacement(state.to_string(), expr.range()))
+ Ok(Edit::range_replacement(
+ tree.codegen_stylist(stylist),
+ expr.range(),
+ ))
}
/// (C404) Convert `dict([(i, i) for i in range(3)])` to `{i: i for i in
/// range(3)}`.
-pub fn fix_unnecessary_list_comprehension_dict(
+pub(crate) fn fix_unnecessary_list_comprehension_dict(
locator: &Locator,
stylist: &Stylist,
expr: &rustpython_parser::ast::Expr,
) -> Result {
let module_text = locator.slice(expr.range());
- let mut tree = match_module(module_text)?;
- let mut body = match_expr(&mut tree)?;
- let call = match_call(body)?;
+ let mut tree = match_expression(module_text)?;
+ let call = match_call_mut(&mut tree)?;
let arg = match_arg(call)?;
- let Expression::ListComp(list_comp) = &arg.value else {
- bail!("Expected Expression::ListComp")
- };
+ let list_comp = match_list_comp(&arg.value)?;
- let Expression::Tuple(tuple) = &*list_comp.elt else {
- bail!("Expected Expression::Tuple")
- };
+ let tuple = match_tuple(&list_comp.elt)?;
let [Element::Simple {
value: key,
comma: Some(comma),
}, Element::Simple { value, .. }] = &tuple.elements[..] else { bail!("Expected tuple with two elements"); };
- body.value = Expression::DictComp(Box::new(DictComp {
+ tree = Expression::DictComp(Box::new(DictComp {
key: Box::new(key.clone()),
value: Box::new(value.clone()),
for_in: list_comp.for_in.clone(),
@@ -276,14 +208,10 @@ pub fn fix_unnecessary_list_comprehension_dict(
rpar: list_comp.rpar.clone(),
}));
- let mut state = CodegenState {
- default_newline: &stylist.line_ending(),
- default_indent: stylist.indentation(),
- ..CodegenState::default()
- };
- tree.codegen(&mut state);
-
- Ok(Edit::range_replacement(state.to_string(), expr.range()))
+ Ok(Edit::range_replacement(
+ tree.codegen_stylist(stylist),
+ expr.range(),
+ ))
}
/// Drop a trailing comma from a list of tuple elements.
@@ -330,16 +258,15 @@ fn drop_trailing_comma<'a>(
}
/// (C405) Convert `set((1, 2))` to `{1, 2}`.
-pub fn fix_unnecessary_literal_set(
+pub(crate) fn fix_unnecessary_literal_set(
locator: &Locator,
stylist: &Stylist,
expr: &rustpython_parser::ast::Expr,
) -> Result {
// Expr(Call(List|Tuple)))) -> Expr(Set)))
let module_text = locator.slice(expr.range());
- let mut tree = match_module(module_text)?;
- let mut body = match_expr(&mut tree)?;
- let mut call = match_call(body)?;
+ let mut tree = match_expression(module_text)?;
+ let call = match_call_mut(&mut tree)?;
let arg = match_arg(call)?;
let (elements, whitespace_after, whitespace_before) = match &arg.value {
@@ -357,7 +284,7 @@ pub fn fix_unnecessary_literal_set(
if elements.is_empty() {
call.args = vec![];
} else {
- body.value = Expression::Set(Box::new(Set {
+ tree = Expression::Set(Box::new(Set {
elements,
lbrace: LeftCurlyBrace { whitespace_after },
rbrace: RightCurlyBrace { whitespace_before },
@@ -366,27 +293,22 @@ pub fn fix_unnecessary_literal_set(
}));
}
- let mut state = CodegenState {
- default_newline: &stylist.line_ending(),
- default_indent: stylist.indentation(),
- ..CodegenState::default()
- };
- tree.codegen(&mut state);
-
- Ok(Edit::range_replacement(state.to_string(), expr.range()))
+ Ok(Edit::range_replacement(
+ tree.codegen_stylist(stylist),
+ expr.range(),
+ ))
}
/// (C406) Convert `dict([(1, 2)])` to `{1: 2}`.
-pub fn fix_unnecessary_literal_dict(
+pub(crate) fn fix_unnecessary_literal_dict(
locator: &Locator,
stylist: &Stylist,
expr: &rustpython_parser::ast::Expr,
) -> Result