Skip to content

Commit

Permalink
style: fix ruff issues (#2172)
Browse files Browse the repository at this point in the history
  • Loading branch information
Lee-W authored Jun 21, 2024
1 parent e5f5e82 commit 33ca675
Show file tree
Hide file tree
Showing 8 changed files with 64 additions and 32 deletions.
13 changes: 9 additions & 4 deletions python-sdk/src/astro/databases/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def get_merge_initialization_query(parameters: tuple) -> str:
it agnostic to database.
"""
constraints = ",".join(parameters)
sql = "ALTER TABLE {{table}} ADD CONSTRAINT airflow UNIQUE (%s)" % constraints
sql = f"ALTER TABLE {{{{table}}}} ADD CONSTRAINT airflow UNIQUE ({constraints})"
return sql

@staticmethod
Expand Down Expand Up @@ -319,7 +319,8 @@ def create_table_using_schema_autodetection(
)

def is_native_autodetect_schema_available( # skipcq: PYL-R0201
self, file: File # skipcq: PYL-W0613
self,
file: File, # skipcq: PYL-W0613
) -> bool:
"""
Check if native auto detection of schema is available.
Expand Down Expand Up @@ -801,7 +802,9 @@ def schema_exists(self, schema: str) -> bool:
# ---------------------------------------------------------

def get_sqlalchemy_template_table_identifier_and_parameter(
self, table: BaseTable, jinja_table_identifier: str # skipcq PYL-W0613
self,
table: BaseTable,
jinja_table_identifier: str, # skipcq PYL-W0613
) -> tuple[str, str]:
"""
During the conversion from a Jinja-templated SQL query to a SQLAlchemy query, there is the need to
Expand Down Expand Up @@ -853,7 +856,9 @@ def parameterize_variable(self, variable: str):
return ":" + variable

def is_native_load_file_available( # skipcq: PYL-R0201
self, source_file: File, target_table: BaseTable # skipcq: PYL-W0613
self,
source_file: File,
target_table: BaseTable, # skipcq: PYL-W0613
) -> bool:
"""
Check if there is an optimised path for source to destination.
Expand Down
3 changes: 2 additions & 1 deletion python-sdk/src/astro/databases/duckdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,8 @@ def get_merge_initialization_query(parameters: tuple) -> str:
"""
Handles database-specific logic to handle index for DuckDB.
"""
return "CREATE UNIQUE INDEX merge_index ON {{table}}(%s)" % ",".join(parameters) # skipcq PYL-C0209
joined_parameters = ",".join(parameters)
return f"CREATE UNIQUE INDEX merge_index ON {{{{table}}}}({joined_parameters})"

def merge_table(
self,
Expand Down
13 changes: 9 additions & 4 deletions python-sdk/src/astro/databases/snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,8 @@ def drop_stage(self, stage: SnowflakeStage) -> None:
# ---------------------------------------------------------

def is_native_autodetect_schema_available( # skipcq: PYL-R0201
self, file: File # skipcq: PYL-W0613
self,
file: File, # skipcq: PYL-W0613
) -> bool:
"""
Check if native auto detection of schema is available.
Expand Down Expand Up @@ -585,7 +586,9 @@ def create_table_using_schema_autodetection(
self.truncate_table(table)

def is_native_load_file_available(
self, source_file: File, target_table: BaseTable # skipcq PYL-W0613, PYL-R0201
self,
source_file: File,
target_table: BaseTable, # skipcq PYL-W0613, PYL-R0201
) -> bool:
"""
Check if there is an optimised path for source to destination.
Expand Down Expand Up @@ -654,7 +657,9 @@ def _get_table_columns_count(self, table_name: str) -> int:
try:
table_columns_count = int(
self.hook.run(
sql_statement, parameters={"table_name": table_name}, handler=lambda cur: cur.fetchone()
sql_statement,
parameters={"table_name": table_name},
handler=lambda cur: cur.fetchone(),
)[0]
)
except AttributeError: # pragma: no cover
Expand Down Expand Up @@ -1059,7 +1064,7 @@ def get_merge_initialization_query(cls, parameters: tuple) -> str:
identifier_enclosure = '"'

constraints = ",".join([f"{identifier_enclosure}{p}{identifier_enclosure}" for p in parameters])
sql = "ALTER TABLE {{table}} ADD CONSTRAINT airflow UNIQUE (%s)" % constraints # skipcq PYL-C0209
sql = f"ALTER TABLE {{{{table}}}} ADD CONSTRAINT airflow UNIQUE ({constraints})"
return sql

def openlineage_dataset_name(self, table: BaseTable) -> str:
Expand Down
3 changes: 2 additions & 1 deletion python-sdk/src/astro/databases/sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,8 @@ def get_merge_initialization_query(parameters: tuple) -> str:
"""
Handles database-specific logic to handle index for Sqlite.
"""
return "CREATE UNIQUE INDEX merge_index ON {{table}}(%s)" % ",".join(parameters) # skipcq PYL-C0209
joined_parameters = ",".join(parameters)
return f"CREATE UNIQUE INDEX merge_index ON {{{{table}}}}({joined_parameters})"

def merge_table(
self,
Expand Down
8 changes: 8 additions & 0 deletions python-sdk/tests/databases/test_duckdb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from astro.databases.duckdb import DuckdbDatabase


def test_get_merge_initialization_query():
parameters = ("col_1", "col_2")

sql = DuckdbDatabase.get_merge_initialization_query(parameters)
assert sql == "CREATE UNIQUE INDEX merge_index ON {{table}}(col_1,col_2)"
6 changes: 6 additions & 0 deletions python-sdk/tests/databases/test_snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,3 +287,9 @@ def test_get_copy_into_with_metadata_sql_statement_no_metadata_columns():
)
with pytest.raises(ValueError, match="Error: Requires metadata columns to be set in load options"):
database._get_copy_into_with_metadata_sql_statement(file_path, table, stage)


def test_get_merge_initialization_query():
parameters = ("col_1", "col_2")
sql = SnowflakeDatabase.get_merge_initialization_query(parameters)
assert sql == "ALTER TABLE {{table}} ADD CONSTRAINT airflow UNIQUE (col_1,col_2)"
8 changes: 8 additions & 0 deletions python-sdk/tests/databases/test_sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import pytest

from astro.constants import Database
from astro.databases.sqlite import SqliteDatabase
from astro.files import File

DEFAULT_CONN_ID = "sqlite_default"
Expand All @@ -31,3 +32,10 @@ def test_export_table_to_file_file_already_exists_raises_exception(
database.export_table_to_file(source_table, File(str(filepath)))
err_msg = exception_info.value.args[0]
assert err_msg.endswith(f"The file {filepath} already exists.")


def test_get_merge_initialization_query():
parameters = ("col_1 text(4)", "col_2 text(15)")

sql = SqliteDatabase.get_merge_initialization_query(parameters)
assert sql == "CREATE UNIQUE INDEX merge_index ON {{table}}(col_1 text(4),col_2 text(15))"
42 changes: 20 additions & 22 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -1,38 +1,36 @@
line-length = 120

target-version = "py37"
fix = true
# Exclude a variety of commonly ignored directories.
extend-exclude = ["__pycache__", "docs/source/conf.py"]

[lint]
extend-ignore = ["A002"]
# Enable Pyflakes `E` and `F` codes by default.
extend-select = [
"W", # pycodestyle warnings
"I", # isort
"C90", # Complexity
# "B", # flake8-bugbear
"C", # flake8-comprehensions
# "ANN", # flake8-comprehensions
"ISC", # flake8-implicit-str-concat
"T10", # flake8-debugger
"A", # flake8-builtins
"UP", # pyupgrade
"W", # pycodestyle warnings
"I", # isort
"C90", # Complexity
# "B", # flake8-bugbear
"C", # flake8-comprehensions
# "ANN", # flake8-comprehensions
"ISC", # flake8-implicit-str-concat
"T10", # flake8-debugger
"A", # flake8-builtins
"UP", # pyupgrade
]
extend-ignore = ["A002"]

# Exclude a variety of commonly ignored directories.
extend-exclude = [
"__pycache__",
"docs/source/conf.py",
]

target-version = "py37"
fix = true

[per-file-ignores]
[lint.per-file-ignores]
"python-sdk/src/astro/sql/__init__.py" = ["F401"]
"python-sdk/src/astro/lineage/__init__.py" = ["F401"]
"python-sdk/src/astro/sql/table.py" = ["F401"]


[mccabe]
[lint.mccabe]
max-complexity = 6

[isort]
[lint.isort]
combine-as-imports = true
known-first-party = ["astro", "tests"]

0 comments on commit 33ca675

Please sign in to comment.