Skip to content

Commit

Permalink
Replace flake8, isort, pyupgrade with ruff
Browse files Browse the repository at this point in the history
https://github.com/charliermarsh/ruff/ is a faster replacement of most of the linting tools we use, and is starting to picked up by several other projects. Even project like Pandas have adopted Ruff (pandas-dev/pandas#50160)

This PR replaces flake8, isort, pyupgrade.
  • Loading branch information
kaxil committed Jan 12, 2023
1 parent 1b1e36f commit a2b3316
Show file tree
Hide file tree
Showing 13 changed files with 56 additions and 56 deletions.
10 changes: 0 additions & 10 deletions .flake8

This file was deleted.

8 changes: 0 additions & 8 deletions .isort.cfg

This file was deleted.

31 changes: 5 additions & 26 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -70,34 +70,13 @@ repos:
alias: black
additional_dependencies: [black>=22.10.0]

- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.0.219'
hooks:
- id: flake8
additional_dependencies:
- flake8-builtins
- flake8-comprehensions
- flake8-colors
- flake8-assertive
- flake8-typing-imports
- flake8-logging-format

- repo: https://github.com/PyCQA/isort
rev: 5.11.4
hooks:
- id: isort
name: Run isort
# Exclude auto-generated example files from being changed
exclude: ^sql-cli/include/base/.airflow/dags
- id: ruff
args:
# These options are duplicated to known_first_party in .isort.cfg,
# Please keep these in sync for now. (See comments there for details.)
- --profile=black
- -l=110
- --combine-as
- -p=astro
- -p=tests
- -p=sql_cli
- --config=./ruff.toml

- repo: https://github.com/codespell-project/codespell
rev: v2.2.2
hooks:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
log = logging.getLogger(__file__)


def load_file_to_delta(
def load_file_to_delta( # noqa: C901
input_file: File,
delta_table: BaseTable,
databricks_job_name: str,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,9 @@ def process_checks(self):
passed_tests.extend(_get_success_checks(checks, column))

if len(failed_tests) > 0:
raise AirflowException(f"The following tests have failed:" f"\n{''.join(failed_tests)}")
raise AirflowException(f"The following tests have failed: \n{''.join(failed_tests)}")
if len(passed_tests) > 0:
print(f"The following tests have passed:" f"\n{''.join(passed_tests)}")
print(f"The following tests have passed: \n{''.join(passed_tests)}")


def _get_failed_checks(checks, col=None):
Expand Down
2 changes: 1 addition & 1 deletion python-sdk/src/astro/sql/operators/export_to_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def get_openlineage_facets_on_complete(self, task_instance): # skipcq: PYL-W061
)
]
output_uri = (
f"{self.output_file.openlineage_dataset_namespace}" f"{self.output_file.openlineage_dataset_name}"
f"{self.output_file.openlineage_dataset_namespace}{self.output_file.openlineage_dataset_name}"
)
output_dataset = [
OpenlineageDataset(
Expand Down
2 changes: 1 addition & 1 deletion python-sdk/src/astro/sql/operators/raw_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
try:
from airflow.decorators.base import TaskDecorator, task_decorator_factory
except ImportError:
from airflow.decorators.base import task_decorator_factory
from airflow.decorators import _TaskDecorator as TaskDecorator
from airflow.decorators.base import task_decorator_factory

import airflow

Expand Down
2 changes: 1 addition & 1 deletion python-sdk/src/astro/sql/operators/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
try:
from airflow.decorators.base import TaskDecorator, task_decorator_factory
except ImportError:
from airflow.decorators.base import task_decorator_factory
from airflow.decorators import _TaskDecorator as TaskDecorator
from airflow.decorators.base import task_decorator_factory

from airflow.decorators.base import get_unique_task_id
from airflow.models.xcom_arg import XComArg
Expand Down
2 changes: 1 addition & 1 deletion python-sdk/src/astro/sql/operators/upstream_task_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ def __init__(self, **kwargs):
self.set_upstream(task)
else:
raise AirflowException(
"Cannot upstream a non-task, please only use XcomArg or operators for this" " parameter"
"Cannot upstream a non-task, please only use XcomArg or operators for this parameter"
)
4 changes: 2 additions & 2 deletions python-sdk/tests/benchmark/dags/benchmark_gcs_to_big_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,9 @@
task_id="load_five_gb",
bucket="astro-sdk",
source_objects=[
("benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000" + str(i) + ".ndjson")
f"benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000{str(i)}.ndjson"
if i >= 10
else ("benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000" + "0" + str(i) + ".ndjson")
else f"benchmark/trimmed/pypi/pypi-downloads-2021-03-28-00000000000{str(i)}.ndjson"
for i in range(20)
],
destination_project_dataset_table=f"{DATASET_NAME}.{TABLE_NAME}",
Expand Down
4 changes: 2 additions & 2 deletions python-sdk/tests/files/locations/test_location_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def test_get_class_name_method_valid_name():
"""Test valid case of implicit naming dependency among the module name and class name for dynamic imports"""

class Test: # skipcq: PY-D0002
__name__ = "test.some"
__name__ = "test.some" # noqa: A003

class TestLocation: # skipcq: PY-D0002
pass
Expand Down Expand Up @@ -96,7 +96,7 @@ def test_get_class_name_method_invalid_name():
"""Test invalid case of implicit naming dependency among the module name and class name for dynamic imports"""

class Test: # skipcq: PY-D0002
__name__ = "test.some"
__name__ = "test.some" # noqa: A003

class SomethingElseLocation: # skipcq: PY-D0002
pass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ def test_is_valid_snow_identifier(self): # skipcq PYL-R0201
]
invalid_strings = [
"$invalid",
"Infvalid\x00" "Invalid Name",
"Infvalid\x00",
"Invalid Name",
'"Invalid " Name"',
'"Also Invalid Name""',
]
Expand Down
38 changes: 38 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
line-length = 120

# Enable Pyflakes `E` and `F` codes by default.
extend-select = [
"W", # pycodestyle warnings
"I", # isort
"C90", # Complexity
# "B", # flake8-bugbear
"C", # flake8-comprehensions
# "ANN", # flake8-comprehensions
"ISC", # flake8-implicit-str-concat
"T10", # flake8-debugger
"A", # flake8-builtins
"UP", # pyupgrade
]
extend-ignore = ["A002"]

# Exclude a variety of commonly ignored directories.
extend-exclude = [
"__pycache__",
"docs/source/conf.py",
]

target-version = "py37"
fix = true

[per-file-ignores]
"python-sdk/src/astro/sql/__init__.py" = ["F401"]
"python-sdk/src/astro/lineage/__init__.py" = ["F401"]
"python-sdk/src/astro/sql/table.py" = ["F401"]


[mccabe]
max-complexity = 6

[isort]
combine-as-imports = true
known-first-party = ["astro", "tests", "sql_cli"]

0 comments on commit a2b3316

Please sign in to comment.