Skip to content

Commit b9f972b

Browse files
committed
Merge branch 'main' into dcreager/function-separation
* main: Add `offset` method to `ruff_python_trivia::Cursor` (#18371) ty_ide: improve completions by using scopes ruff_python_parser: add `Tokens::before` method [ty] Split `Type::KnownInstance` into two type variants (#18350) Bump 0.11.12 (#18369)
2 parents df24254 + 7df79cf commit b9f972b

File tree

31 files changed

+1893
-853
lines changed

31 files changed

+1893
-853
lines changed

CHANGELOG.md

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,33 @@
11
# Changelog
22

3+
## 0.11.12
4+
5+
### Preview features
6+
7+
- \[`airflow`\] Revise fix titles (`AIR3`) ([#18215](https://github.com/astral-sh/ruff/pull/18215))
8+
- \[`pylint`\] Implement `missing-maxsplit-arg` (`PLC0207`) ([#17454](https://github.com/astral-sh/ruff/pull/17454))
9+
- \[`pyupgrade`\] New rule `UP050` (`useless-class-metaclass-type`) ([#18334](https://github.com/astral-sh/ruff/pull/18334))
10+
- \[`flake8-use-pathlib`\] Replace `os.symlink` with `Path.symlink_to` (`PTH211`) ([#18337](https://github.com/astral-sh/ruff/pull/18337))
11+
12+
### Bug fixes
13+
14+
- \[`flake8-bugbear`\] Ignore `__debug__` attribute in `B010` ([#18357](https://github.com/astral-sh/ruff/pull/18357))
15+
- \[`flake8-async`\] Fix `anyio.sleep` argument name (`ASYNC115`, `ASYNC116`) ([#18262](https://github.com/astral-sh/ruff/pull/18262))
16+
- \[`refurb`\] Fix `FURB129` autofix generating invalid syntax ([#18235](https://github.com/astral-sh/ruff/pull/18235))
17+
18+
### Rule changes
19+
20+
- \[`flake8-implicit-str-concat`\] Add autofix for `ISC003` ([#18256](https://github.com/astral-sh/ruff/pull/18256))
21+
- \[`pycodestyle`\] Improve the diagnostic message for `E712` ([#18328](https://github.com/astral-sh/ruff/pull/18328))
22+
- \[`flake8-2020`\] Fix diagnostic message for `!=` comparisons (`YTT201`) ([#18293](https://github.com/astral-sh/ruff/pull/18293))
23+
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP010`) ([#18291](https://github.com/astral-sh/ruff/pull/18291))
24+
25+
### Documentation
26+
27+
- Simplify rules table to improve readability ([#18297](https://github.com/astral-sh/ruff/pull/18297))
28+
- Update editor integrations link in README ([#17977](https://github.com/astral-sh/ruff/pull/17977))
29+
- \[`flake8-bugbear`\] Add fix safety section (`B006`) ([#17652](https://github.com/astral-sh/ruff/pull/17652))
30+
331
## 0.11.11
432

533
### Preview features

Cargo.lock

Lines changed: 3 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
148148
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
149149

150150
# For a specific version.
151-
curl -LsSf https://astral.sh/ruff/0.11.11/install.sh | sh
152-
powershell -c "irm https://astral.sh/ruff/0.11.11/install.ps1 | iex"
151+
curl -LsSf https://astral.sh/ruff/0.11.12/install.sh | sh
152+
powershell -c "irm https://astral.sh/ruff/0.11.12/install.ps1 | iex"
153153
```
154154

155155
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
182182
```yaml
183183
- repo: https://github.com/astral-sh/ruff-pre-commit
184184
# Ruff version.
185-
rev: v0.11.11
185+
rev: v0.11.12
186186
hooks:
187187
# Run the linter.
188188
- id: ruff

crates/ruff/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "ruff"
3-
version = "0.11.11"
3+
version = "0.11.12"
44
publish = true
55
authors = { workspace = true }
66
edition = { workspace = true }

crates/ruff_linter/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "ruff_linter"
3-
version = "0.11.11"
3+
version = "0.11.12"
44
publish = false
55
authors = { workspace = true }
66
edition = { workspace = true }

crates/ruff_python_ast/src/name.rs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,13 @@ impl From<Name> for compact_str::CompactString {
111111
}
112112
}
113113

114+
impl From<Name> for String {
115+
#[inline]
116+
fn from(name: Name) -> Self {
117+
name.as_str().into()
118+
}
119+
}
120+
114121
impl FromIterator<char> for Name {
115122
fn from_iter<I: IntoIterator<Item = char>>(iter: I) -> Self {
116123
Self(iter.into_iter().collect())

crates/ruff_python_parser/src/lib.rs

Lines changed: 99 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -637,6 +637,41 @@ impl Tokens {
637637
}
638638
}
639639

640+
/// Returns a slice of tokens before the given [`TextSize`] offset.
641+
///
642+
/// If the given offset is between two tokens, the returned slice will end just before the
643+
/// following token. In other words, if the offset is between the end of previous token and
644+
/// start of next token, the returned slice will end just before the next token.
645+
///
646+
/// # Panics
647+
///
648+
/// If the given offset is inside a token range at any point
649+
/// other than the start of the range.
650+
pub fn before(&self, offset: TextSize) -> &[Token] {
651+
match self.binary_search_by(|token| token.start().cmp(&offset)) {
652+
Ok(idx) => &self[..idx],
653+
Err(idx) => {
654+
// We can't use `saturating_sub` here because a file could contain a BOM header, in
655+
// which case the token starts at offset 3 for UTF-8 encoded file content.
656+
if idx > 0 {
657+
if let Some(prev) = self.get(idx - 1) {
658+
// If it's equal to the end offset, then it's at a token boundary which is
659+
// valid. If it's greater than the end offset, then it's in the gap between
660+
// the tokens which is valid as well.
661+
assert!(
662+
offset >= prev.end(),
663+
"Offset {:?} is inside a token range {:?}",
664+
offset,
665+
prev.range()
666+
);
667+
}
668+
}
669+
670+
&self[..idx]
671+
}
672+
}
673+
}
674+
640675
/// Returns a slice of tokens after the given [`TextSize`] offset.
641676
///
642677
/// If the given offset is between two tokens, the returned slice will start from the following
@@ -645,7 +680,8 @@ impl Tokens {
645680
///
646681
/// # Panics
647682
///
648-
/// If the given offset is inside a token range.
683+
/// If the given offset is inside a token range at any point
684+
/// other than the start of the range.
649685
pub fn after(&self, offset: TextSize) -> &[Token] {
650686
match self.binary_search_by(|token| token.start().cmp(&offset)) {
651687
Ok(idx) => &self[idx..],
@@ -947,6 +983,68 @@ mod tests {
947983
tokens.after(TextSize::new(5));
948984
}
949985

986+
#[test]
987+
fn tokens_before_offset_at_first_token_start() {
988+
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
989+
let before = tokens.before(TextSize::new(0));
990+
assert_eq!(before.len(), 0);
991+
}
992+
993+
#[test]
994+
fn tokens_before_offset_after_first_token_gap() {
995+
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
996+
let before = tokens.before(TextSize::new(3));
997+
assert_eq!(before.len(), 1);
998+
assert_eq!(before.last().unwrap().kind(), TokenKind::Def);
999+
}
1000+
1001+
#[test]
1002+
fn tokens_before_offset_at_second_token_start() {
1003+
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
1004+
let before = tokens.before(TextSize::new(4));
1005+
assert_eq!(before.len(), 1);
1006+
assert_eq!(before.last().unwrap().kind(), TokenKind::Def);
1007+
}
1008+
1009+
#[test]
1010+
fn tokens_before_offset_at_token_start() {
1011+
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
1012+
let before = tokens.before(TextSize::new(8));
1013+
assert_eq!(before.len(), 3);
1014+
assert_eq!(before.last().unwrap().kind(), TokenKind::Lpar);
1015+
}
1016+
1017+
#[test]
1018+
fn tokens_before_offset_at_token_end() {
1019+
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
1020+
let before = tokens.before(TextSize::new(11));
1021+
assert_eq!(before.len(), 6);
1022+
assert_eq!(before.last().unwrap().kind(), TokenKind::Newline);
1023+
}
1024+
1025+
#[test]
1026+
fn tokens_before_offset_between_tokens() {
1027+
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
1028+
let before = tokens.before(TextSize::new(13));
1029+
assert_eq!(before.len(), 6);
1030+
assert_eq!(before.last().unwrap().kind(), TokenKind::Newline);
1031+
}
1032+
1033+
#[test]
1034+
fn tokens_before_offset_at_last_token_end() {
1035+
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
1036+
let before = tokens.before(TextSize::new(33));
1037+
assert_eq!(before.len(), 10);
1038+
assert_eq!(before.last().unwrap().kind(), TokenKind::Pass);
1039+
}
1040+
1041+
#[test]
1042+
#[should_panic(expected = "Offset 5 is inside a token range 4..7")]
1043+
fn tokens_before_offset_inside_token() {
1044+
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
1045+
tokens.before(TextSize::new(5));
1046+
}
1047+
9501048
#[test]
9511049
fn tokens_in_range_at_token_offset() {
9521050
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());

crates/ruff_python_trivia/src/cursor.rs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,11 @@ impl<'a> Cursor<'a> {
2121
}
2222
}
2323

24+
/// Retrieves the current offset of the cursor within the source code.
25+
pub fn offset(&self) -> TextSize {
26+
self.source_length - self.text_len()
27+
}
28+
2429
/// Return the remaining input as a string slice.
2530
pub fn chars(&self) -> Chars<'a> {
2631
self.chars.clone()

crates/ruff_wasm/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "ruff_wasm"
3-
version = "0.11.11"
3+
version = "0.11.12"
44
publish = false
55
authors = { workspace = true }
66
edition = { workspace = true }

0 commit comments

Comments
 (0)