Skip to content

Commit

Permalink
Move RustPython vendored and helper code into its own crate (#3171)
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh authored Feb 23, 2023
1 parent 0f04aa2 commit 095f005
Show file tree
Hide file tree
Showing 19 changed files with 58 additions and 57 deletions.
12 changes: 12 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions crates/ruff/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@ crate-type = ["cdylib", "rlib"]
doctest = false

[dependencies]
ruff_macros = { path = "../ruff_macros" }
ruff_python = { path = "../ruff_python" }
ruff_rustpython = { path = "../ruff_rustpython" }

anyhow = { workspace = true }
bisection = { version = "0.1.0" }
bitflags = { version = "1.3.2" }
Expand All @@ -40,8 +44,6 @@ once_cell = { workspace = true }
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }
regex = { workspace = true }
result-like = "0.4.6"
ruff_macros = { path = "../ruff_macros" }
ruff_python = { path = "../ruff_python" }
rustc-hash = { workspace = true }
rustpython-common = { workspace = true }
rustpython-parser = { workspace = true }
Expand Down
2 changes: 0 additions & 2 deletions crates/ruff/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,8 @@ pub mod resolver;
mod rule_redirects;
mod rule_selector;
mod rules;
mod rustpython_helpers;
pub mod settings;
pub mod source_code;
mod vendor;
mod violation;
mod visibility;

Expand Down
3 changes: 1 addition & 2 deletions crates/ruff/src/lib_wasm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ use crate::rules::{
flake8_quotes, flake8_self, flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments,
isort, mccabe, pep8_naming, pycodestyle, pydocstyle, pylint, pyupgrade,
};
use crate::rustpython_helpers::tokenize;
use crate::settings::configuration::Configuration;
use crate::settings::options::Options;
use crate::settings::{defaults, flags, Settings};
Expand Down Expand Up @@ -175,7 +174,7 @@ pub fn check(contents: &str, options: JsValue) -> Result<JsValue, JsValue> {
Settings::from_configuration(configuration, Path::new(".")).map_err(|e| e.to_string())?;

// Tokenize once.
let tokens: Vec<LexResult> = tokenize(contents);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);

// Map row and column locations to byte slices (lazily).
let locator = Locator::new(contents);
Expand Down
10 changes: 5 additions & 5 deletions crates/ruff/src/linter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use crate::registry::{Diagnostic, LintSource, Rule};
use crate::rules::pycodestyle;
use crate::settings::{flags, Settings};
use crate::source_code::{Indexer, Locator, Stylist};
use crate::{directives, fs, rustpython_helpers};
use crate::{directives, fs};

const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
const CARGO_PKG_REPOSITORY: &str = env!("CARGO_PKG_REPOSITORY");
Expand Down Expand Up @@ -115,7 +115,7 @@ pub fn check_path(
.iter_enabled()
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports));
if use_ast || use_imports || use_doc_lines {
match rustpython_helpers::parse_program_tokens(tokens, &path.to_string_lossy()) {
match ruff_rustpython::parse_program_tokens(tokens, &path.to_string_lossy()) {
Ok(python_ast) => {
if use_ast {
diagnostics.extend(check_ast(
Expand Down Expand Up @@ -226,7 +226,7 @@ pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings
let contents = std::fs::read_to_string(path)?;

// Tokenize once.
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&contents);

// Map row and column locations to byte slices (lazily).
let locator = Locator::new(&contents);
Expand Down Expand Up @@ -290,7 +290,7 @@ pub fn lint_only(
autofix: flags::Autofix,
) -> LinterResult<Vec<Message>> {
// Tokenize once.
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(contents);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);

// Map row and column locations to byte slices (lazily).
let locator = Locator::new(contents);
Expand Down Expand Up @@ -359,7 +359,7 @@ pub fn lint_fix<'a>(
// Continuously autofix until the source code stabilizes.
loop {
// Tokenize once.
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&transformed);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&transformed);

// Map row and column locations to byte slices (lazily).
let locator = Locator::new(&transformed);
Expand Down
4 changes: 2 additions & 2 deletions crates/ruff/src/rules/pandas_vet/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ mod tests {
use crate::settings::flags;
use crate::source_code::{Indexer, Locator, Stylist};
use crate::test::test_path;
use crate::{directives, rustpython_helpers, settings};
use crate::{directives, settings};

fn rule_code(contents: &str, expected: &[Rule]) {
let contents = dedent(contents);
let settings = settings::Settings::for_rules(&Linter::PandasVet);
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&contents);
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);
let indexer: Indexer = tokens.as_slice().into();
Expand Down
4 changes: 2 additions & 2 deletions crates/ruff/src/rules/pyflakes/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ mod tests {
use crate::settings::flags;
use crate::source_code::{Indexer, Locator, Stylist};
use crate::test::test_path;
use crate::{directives, rustpython_helpers, settings};
use crate::{directives, settings};

#[test_case(Rule::UnusedImport, Path::new("F401_0.py"); "F401_0")]
#[test_case(Rule::UnusedImport, Path::new("F401_1.py"); "F401_1")]
Expand Down Expand Up @@ -243,7 +243,7 @@ mod tests {
fn flakes(contents: &str, expected: &[Rule]) {
let contents = dedent(contents);
let settings = settings::Settings::for_rules(&Linter::Pyflakes);
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&contents);
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);
let indexer: Indexer = tokens.as_slice().into();
Expand Down
3 changes: 2 additions & 1 deletion crates/ruff/src/source_code/generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ use rustpython_parser::ast::{
Suite, Withitem,
};

use ruff_rustpython::vendor::{bytes, str};

use crate::source_code::stylist::{Indentation, LineEnding, Quote, Stylist};
use crate::vendor::{bytes, str};

mod precedence {
pub const ASSIGN: u8 = 3;
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff/src/source_code/stylist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@ use std::fmt;
use std::ops::Deref;

use once_cell::unsync::OnceCell;
use ruff_rustpython::vendor;
use rustpython_parser::ast::Location;
use rustpython_parser::{lexer, Mode, Tok};

use crate::ast::types::Range;
use crate::rules::pydocstyle::helpers::leading_quote;
use crate::source_code::Locator;
use crate::vendor;

pub struct Stylist<'a> {
contents: &'a str,
Expand Down
6 changes: 3 additions & 3 deletions crates/ruff/src/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ use anyhow::Result;
use rustpython_parser::lexer::LexResult;

use crate::autofix::fix_file;
use crate::directives;
use crate::linter::{check_path, LinterResult};
use crate::packaging::detect_package_root;
use crate::registry::Diagnostic;
use crate::settings::{flags, Settings};
use crate::source_code::{Indexer, Locator, Stylist};
use crate::{directives, rustpython_helpers};

pub fn test_resource_path(path: impl AsRef<Path>) -> std::path::PathBuf {
Path::new("./resources/test/").join(path)
Expand All @@ -23,7 +23,7 @@ pub fn test_resource_path(path: impl AsRef<Path>) -> std::path::PathBuf {
pub fn test_path(path: &Path, settings: &Settings) -> Result<Vec<Diagnostic>> {
let path = test_resource_path("fixtures").join(path);
let contents = std::fs::read_to_string(&path)?;
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&contents);
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);
let indexer: Indexer = tokens.as_slice().into();
Expand Down Expand Up @@ -58,7 +58,7 @@ pub fn test_path(path: &Path, settings: &Settings) -> Result<Vec<Diagnostic>> {
let mut iterations = 0;

loop {
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&contents);
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);
let indexer: Indexer = tokens.as_slice().into();
Expand Down
11 changes: 7 additions & 4 deletions crates/ruff_python_formatter/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,20 @@ edition = { workspace = true }
rust-version = { workspace = true }

[dependencies]
ruff_formatter = { path = "../ruff_formatter" }
ruff_rustpython = { path = "../ruff_rustpython" }
ruff_text_size = { path = "../ruff_text_size" }

anyhow = { workspace = true }
clap = { workspace = true }
once_cell = { workspace = true }
ruff_formatter = { path = "../ruff_formatter" }
ruff_text_size = { path = "../ruff_text_size" }
rustc-hash = { workspace = true }
rustpython-common = { workspace = true }
rustpython-parser = { workspace = true }

[dev-dependencies]
ruff_testing_macros = { path = "../ruff_testing_macros" }

insta = { version = "1.19.0", features = [] }
test-case = { version = "2.2.2" }
ruff_testing_macros = { path = "../ruff_testing_macros" }
similar = "2.2.1"
similar = { version = "2.2.1" }
1 change: 0 additions & 1 deletion crates/ruff_python_formatter/src/core/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
pub mod locator;
pub mod rustpython_helpers;
pub mod types;
pub mod visitor;
28 changes: 0 additions & 28 deletions crates/ruff_python_formatter/src/core/rustpython_helpers.rs

This file was deleted.

5 changes: 2 additions & 3 deletions crates/ruff_python_formatter/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use rustpython_parser::lexer::LexResult;
use crate::attachment::attach;
use crate::context::ASTFormatContext;
use crate::core::locator::Locator;
use crate::core::rustpython_helpers;
use crate::cst::Stmt;
use crate::newlines::normalize_newlines;
use crate::parentheses::normalize_parentheses;
Expand All @@ -24,13 +23,13 @@ pub mod trivia;

pub fn fmt(contents: &str) -> Result<Formatted<ASTFormatContext>> {
// Tokenize once.
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(contents);
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);

// Extract trivia.
let trivia = trivia::extract_trivia_tokens(&tokens);

// Parse the AST.
let python_ast = rustpython_helpers::parse_program_tokens(tokens, "<filename>")?;
let python_ast = ruff_rustpython::parse_program_tokens(tokens, "<filename>")?;

// Convert to a CST.
let mut python_cst: Vec<Stmt> = python_ast.into_iter().map(Into::into).collect();
Expand Down
14 changes: 14 additions & 0 deletions crates/ruff_rustpython/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
[package]
name = "ruff_rustpython"
version = "0.0.0"
publish = false
edition = { workspace = true }
rust-version = { workspace = true }

[lib]

[dependencies]
anyhow = { workspace = true }
once_cell = { workspace = true }
rustpython-common = { workspace = true }
rustpython-parser = { workspace = true }
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ use rustpython_parser::ast::{Mod, Suite};
use rustpython_parser::lexer::LexResult;
use rustpython_parser::{lexer, Mode, ParseError};

pub mod vendor;

/// Collect tokens up to and including the first error.
pub fn tokenize(contents: &str) -> Vec<LexResult> {
let mut tokens: Vec<LexResult> = vec![];
Expand All @@ -17,7 +19,7 @@ pub fn tokenize(contents: &str) -> Vec<LexResult> {
}

/// Parse a full Python program from its tokens.
pub(crate) fn parse_program_tokens(
pub fn parse_program_tokens(
lxr: Vec<LexResult>,
source_path: &str,
) -> anyhow::Result<Suite, ParseError> {
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.

0 comments on commit 095f005

Please sign in to comment.