diff --git a/.flake8 b/.flake8 deleted file mode 100644 index baf7b8fd2..000000000 --- a/.flake8 +++ /dev/null @@ -1,10 +0,0 @@ -[flake8] -min_python_version = 3.7 -exclude = .git,__pycache__,docs/source/conf.py,old,build,dist -max-complexity = 6 -max-line-length = 120 -ignore = A002,W503 -extend-ignore = E203 -per-file-ignores = - python-sdk/src/astro/sql/__init__.py: F401 - python-sdk/src/astro/lineage/__init__.py: F401 diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index 3efc54fc5..000000000 --- a/.isort.cfg +++ /dev/null @@ -1,8 +0,0 @@ -[settings] -# This is duplicated with arguments in .pre-commit-config.yaml because isort is -# having some issues picking up these config files. Please keep these in sync -# for now and track the isort issue: https://github.com/PyCQA/isort/issues/1889 -profile=black -line_length=110 -combine_as_imports=true -known_first_party=astro,tests,sql_cli diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f274968cc..00b783eec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -70,34 +70,13 @@ repos: alias: black additional_dependencies: [black>=22.10.0] - - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: 'v0.0.219' hooks: - - id: flake8 - additional_dependencies: - - flake8-builtins - - flake8-comprehensions - - flake8-colors - - flake8-assertive - - flake8-typing-imports - - flake8-logging-format - - - repo: https://github.com/PyCQA/isort - rev: 5.11.4 - hooks: - - id: isort - name: Run isort - # Exclude auto-generated example files from being changed - exclude: ^sql-cli/include/base/.airflow/dags + - id: ruff args: - # These options are duplicated to known_first_party in .isort.cfg, - # Please keep these in sync for now. (See comments there for details.) - - --profile=black - - -l=110 - - --combine-as - - -p=astro - - -p=tests - - -p=sql_cli + - --config=./ruff.toml + - repo: https://github.com/codespell-project/codespell rev: v2.2.2 hooks: diff --git a/python-sdk/src/astro/databases/databricks/load_file/load_file_job.py b/python-sdk/src/astro/databases/databricks/load_file/load_file_job.py index 3ab6d02a3..8f8cbdb77 100644 --- a/python-sdk/src/astro/databases/databricks/load_file/load_file_job.py +++ b/python-sdk/src/astro/databases/databricks/load_file/load_file_job.py @@ -28,7 +28,7 @@ log = logging.getLogger(__file__) -def load_file_to_delta( +def load_file_to_delta( # noqa: C901 input_file: File, delta_table: BaseTable, databricks_job_name: str, diff --git a/python-sdk/src/astro/sql/operators/data_validations/ColumnCheckOperator.py b/python-sdk/src/astro/sql/operators/data_validations/ColumnCheckOperator.py index 2fc2de3d3..4e59831ca 100644 --- a/python-sdk/src/astro/sql/operators/data_validations/ColumnCheckOperator.py +++ b/python-sdk/src/astro/sql/operators/data_validations/ColumnCheckOperator.py @@ -138,9 +138,9 @@ def process_checks(self): passed_tests.extend(_get_success_checks(checks, column)) if len(failed_tests) > 0: - raise AirflowException(f"The following tests have failed:" f"\n{''.join(failed_tests)}") + raise AirflowException(f"The following tests have failed: \n{''.join(failed_tests)}") if len(passed_tests) > 0: - print(f"The following tests have passed:" f"\n{''.join(passed_tests)}") + print(f"The following tests have passed: \n{''.join(passed_tests)}") def _get_failed_checks(checks, col=None): diff --git a/python-sdk/src/astro/sql/operators/export_to_file.py b/python-sdk/src/astro/sql/operators/export_to_file.py index 4a12be950..6e6f354b1 100644 --- a/python-sdk/src/astro/sql/operators/export_to_file.py +++ b/python-sdk/src/astro/sql/operators/export_to_file.py @@ -105,7 +105,7 @@ def get_openlineage_facets_on_complete(self, task_instance): # skipcq: PYL-W061 ) ] output_uri = ( - f"{self.output_file.openlineage_dataset_namespace}" f"{self.output_file.openlineage_dataset_name}" + f"{self.output_file.openlineage_dataset_namespace}{self.output_file.openlineage_dataset_name}" ) output_dataset = [ OpenlineageDataset( diff --git a/python-sdk/src/astro/sql/operators/raw_sql.py b/python-sdk/src/astro/sql/operators/raw_sql.py index 77d756e46..aaa402c98 100644 --- a/python-sdk/src/astro/sql/operators/raw_sql.py +++ b/python-sdk/src/astro/sql/operators/raw_sql.py @@ -7,8 +7,8 @@ try: from airflow.decorators.base import TaskDecorator, task_decorator_factory except ImportError: - from airflow.decorators.base import task_decorator_factory from airflow.decorators import _TaskDecorator as TaskDecorator + from airflow.decorators.base import task_decorator_factory import airflow diff --git a/python-sdk/src/astro/sql/operators/transform.py b/python-sdk/src/astro/sql/operators/transform.py index 5bd7898c9..b477dac28 100644 --- a/python-sdk/src/astro/sql/operators/transform.py +++ b/python-sdk/src/astro/sql/operators/transform.py @@ -6,8 +6,8 @@ try: from airflow.decorators.base import TaskDecorator, task_decorator_factory except ImportError: - from airflow.decorators.base import task_decorator_factory from airflow.decorators import _TaskDecorator as TaskDecorator + from airflow.decorators.base import task_decorator_factory from airflow.decorators.base import get_unique_task_id from airflow.models.xcom_arg import XComArg diff --git a/python-sdk/src/astro/sql/operators/upstream_task_mixin.py b/python-sdk/src/astro/sql/operators/upstream_task_mixin.py index 7048794b4..e217fd116 100644 --- a/python-sdk/src/astro/sql/operators/upstream_task_mixin.py +++ b/python-sdk/src/astro/sql/operators/upstream_task_mixin.py @@ -23,5 +23,5 @@ def __init__(self, **kwargs): self.set_upstream(task) else: raise AirflowException( - "Cannot upstream a non-task, please only use XcomArg or operators for this" " parameter" + "Cannot upstream a non-task, please only use XcomArg or operators for this parameter" ) diff --git a/python-sdk/tests/benchmark/dags/benchmark_gcs_to_big_query.py b/python-sdk/tests/benchmark/dags/benchmark_gcs_to_big_query.py index 4fb49c083..c1459d0e3 100644 --- a/python-sdk/tests/benchmark/dags/benchmark_gcs_to_big_query.py +++ b/python-sdk/tests/benchmark/dags/benchmark_gcs_to_big_query.py @@ -83,9 +83,9 @@ task_id="load_five_gb", bucket="astro-sdk", source_objects=[ - ("benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000" + str(i) + ".ndjson") + f"benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000{str(i)}.ndjson" if i >= 10 - else ("benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000" + "0" + str(i) + ".ndjson") + else f"benchmark/trimmed/pypi/pypi-downloads-2021-03-28-00000000000{str(i)}.ndjson" for i in range(20) ], destination_project_dataset_table=f"{DATASET_NAME}.{TABLE_NAME}", diff --git a/python-sdk/tests/files/locations/test_location_base.py b/python-sdk/tests/files/locations/test_location_base.py index d1853cff4..33a2fe191 100644 --- a/python-sdk/tests/files/locations/test_location_base.py +++ b/python-sdk/tests/files/locations/test_location_base.py @@ -40,7 +40,7 @@ def test_get_class_name_method_valid_name(): """Test valid case of implicit naming dependency among the module name and class name for dynamic imports""" class Test: # skipcq: PY-D0002 - __name__ = "test.some" + __name__ = "test.some" # noqa: A003 class TestLocation: # skipcq: PY-D0002 pass @@ -96,7 +96,7 @@ def test_get_class_name_method_invalid_name(): """Test invalid case of implicit naming dependency among the module name and class name for dynamic imports""" class Test: # skipcq: PY-D0002 - __name__ = "test.some" + __name__ = "test.some" # noqa: A003 class SomethingElseLocation: # skipcq: PY-D0002 pass diff --git a/python-sdk/tests_integration/sql/operators/test_snowflake_merge_func.py b/python-sdk/tests_integration/sql/operators/test_snowflake_merge_func.py index 764a17f47..91c2fa3d1 100644 --- a/python-sdk/tests_integration/sql/operators/test_snowflake_merge_func.py +++ b/python-sdk/tests_integration/sql/operators/test_snowflake_merge_func.py @@ -141,7 +141,8 @@ def test_is_valid_snow_identifier(self): # skipcq PYL-R0201 ] invalid_strings = [ "$invalid", - "Infvalid\x00" "Invalid Name", + "Infvalid\x00", + "Invalid Name", '"Invalid " Name"', '"Also Invalid Name""', ] diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 000000000..e1b91d284 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,38 @@ +line-length = 120 + +# Enable Pyflakes `E` and `F` codes by default. +extend-select = [ + "W", # pycodestyle warnings + "I", # isort + "C90", # Complexity +# "B", # flake8-bugbear + "C", # flake8-comprehensions +# "ANN", # flake8-comprehensions + "ISC", # flake8-implicit-str-concat + "T10", # flake8-debugger + "A", # flake8-builtins + "UP", # pyupgrade +] +extend-ignore = ["A002"] + +# Exclude a variety of commonly ignored directories. +extend-exclude = [ + "__pycache__", + "docs/source/conf.py", +] + +target-version = "py37" +fix = true + +[per-file-ignores] +"python-sdk/src/astro/sql/__init__.py" = ["F401"] +"python-sdk/src/astro/lineage/__init__.py" = ["F401"] +"python-sdk/src/astro/sql/table.py" = ["F401"] + + +[mccabe] +max-complexity = 6 + +[isort] +combine-as-imports = true +known-first-party = ["astro", "tests", "sql_cli"]