From 33ca6758f8d4052faba21e4579f358cda232dc98 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Fri, 21 Jun 2024 17:19:33 +0800 Subject: [PATCH] style: fix ruff issues (#2172) --- python-sdk/src/astro/databases/base.py | 13 ++++-- python-sdk/src/astro/databases/duckdb.py | 3 +- python-sdk/src/astro/databases/snowflake.py | 13 ++++-- python-sdk/src/astro/databases/sqlite.py | 3 +- python-sdk/tests/databases/test_duckdb.py | 8 ++++ python-sdk/tests/databases/test_snowflake.py | 6 +++ python-sdk/tests/databases/test_sqlite.py | 8 ++++ ruff.toml | 42 ++++++++++---------- 8 files changed, 64 insertions(+), 32 deletions(-) create mode 100644 python-sdk/tests/databases/test_duckdb.py diff --git a/python-sdk/src/astro/databases/base.py b/python-sdk/src/astro/databases/base.py index dcf247ca6..9eb7cf8cf 100644 --- a/python-sdk/src/astro/databases/base.py +++ b/python-sdk/src/astro/databases/base.py @@ -205,7 +205,7 @@ def get_merge_initialization_query(parameters: tuple) -> str: it agnostic to database. """ constraints = ",".join(parameters) - sql = "ALTER TABLE {{table}} ADD CONSTRAINT airflow UNIQUE (%s)" % constraints + sql = f"ALTER TABLE {{{{table}}}} ADD CONSTRAINT airflow UNIQUE ({constraints})" return sql @staticmethod @@ -319,7 +319,8 @@ def create_table_using_schema_autodetection( ) def is_native_autodetect_schema_available( # skipcq: PYL-R0201 - self, file: File # skipcq: PYL-W0613 + self, + file: File, # skipcq: PYL-W0613 ) -> bool: """ Check if native auto detection of schema is available. @@ -801,7 +802,9 @@ def schema_exists(self, schema: str) -> bool: # --------------------------------------------------------- def get_sqlalchemy_template_table_identifier_and_parameter( - self, table: BaseTable, jinja_table_identifier: str # skipcq PYL-W0613 + self, + table: BaseTable, + jinja_table_identifier: str, # skipcq PYL-W0613 ) -> tuple[str, str]: """ During the conversion from a Jinja-templated SQL query to a SQLAlchemy query, there is the need to @@ -853,7 +856,9 @@ def parameterize_variable(self, variable: str): return ":" + variable def is_native_load_file_available( # skipcq: PYL-R0201 - self, source_file: File, target_table: BaseTable # skipcq: PYL-W0613 + self, + source_file: File, + target_table: BaseTable, # skipcq: PYL-W0613 ) -> bool: """ Check if there is an optimised path for source to destination. diff --git a/python-sdk/src/astro/databases/duckdb.py b/python-sdk/src/astro/databases/duckdb.py index b78f6de4b..f8a794afc 100644 --- a/python-sdk/src/astro/databases/duckdb.py +++ b/python-sdk/src/astro/databases/duckdb.py @@ -81,7 +81,8 @@ def get_merge_initialization_query(parameters: tuple) -> str: """ Handles database-specific logic to handle index for DuckDB. """ - return "CREATE UNIQUE INDEX merge_index ON {{table}}(%s)" % ",".join(parameters) # skipcq PYL-C0209 + joined_parameters = ",".join(parameters) + return f"CREATE UNIQUE INDEX merge_index ON {{{{table}}}}({joined_parameters})" def merge_table( self, diff --git a/python-sdk/src/astro/databases/snowflake.py b/python-sdk/src/astro/databases/snowflake.py index 44c7bbce4..40a5d81d4 100644 --- a/python-sdk/src/astro/databases/snowflake.py +++ b/python-sdk/src/astro/databases/snowflake.py @@ -455,7 +455,8 @@ def drop_stage(self, stage: SnowflakeStage) -> None: # --------------------------------------------------------- def is_native_autodetect_schema_available( # skipcq: PYL-R0201 - self, file: File # skipcq: PYL-W0613 + self, + file: File, # skipcq: PYL-W0613 ) -> bool: """ Check if native auto detection of schema is available. @@ -585,7 +586,9 @@ def create_table_using_schema_autodetection( self.truncate_table(table) def is_native_load_file_available( - self, source_file: File, target_table: BaseTable # skipcq PYL-W0613, PYL-R0201 + self, + source_file: File, + target_table: BaseTable, # skipcq PYL-W0613, PYL-R0201 ) -> bool: """ Check if there is an optimised path for source to destination. @@ -654,7 +657,9 @@ def _get_table_columns_count(self, table_name: str) -> int: try: table_columns_count = int( self.hook.run( - sql_statement, parameters={"table_name": table_name}, handler=lambda cur: cur.fetchone() + sql_statement, + parameters={"table_name": table_name}, + handler=lambda cur: cur.fetchone(), )[0] ) except AttributeError: # pragma: no cover @@ -1059,7 +1064,7 @@ def get_merge_initialization_query(cls, parameters: tuple) -> str: identifier_enclosure = '"' constraints = ",".join([f"{identifier_enclosure}{p}{identifier_enclosure}" for p in parameters]) - sql = "ALTER TABLE {{table}} ADD CONSTRAINT airflow UNIQUE (%s)" % constraints # skipcq PYL-C0209 + sql = f"ALTER TABLE {{{{table}}}} ADD CONSTRAINT airflow UNIQUE ({constraints})" return sql def openlineage_dataset_name(self, table: BaseTable) -> str: diff --git a/python-sdk/src/astro/databases/sqlite.py b/python-sdk/src/astro/databases/sqlite.py index bc80ef165..c346e3c02 100644 --- a/python-sdk/src/astro/databases/sqlite.py +++ b/python-sdk/src/astro/databases/sqlite.py @@ -90,7 +90,8 @@ def get_merge_initialization_query(parameters: tuple) -> str: """ Handles database-specific logic to handle index for Sqlite. """ - return "CREATE UNIQUE INDEX merge_index ON {{table}}(%s)" % ",".join(parameters) # skipcq PYL-C0209 + joined_parameters = ",".join(parameters) + return f"CREATE UNIQUE INDEX merge_index ON {{{{table}}}}({joined_parameters})" def merge_table( self, diff --git a/python-sdk/tests/databases/test_duckdb.py b/python-sdk/tests/databases/test_duckdb.py new file mode 100644 index 000000000..5bfce2765 --- /dev/null +++ b/python-sdk/tests/databases/test_duckdb.py @@ -0,0 +1,8 @@ +from astro.databases.duckdb import DuckdbDatabase + + +def test_get_merge_initialization_query(): + parameters = ("col_1", "col_2") + + sql = DuckdbDatabase.get_merge_initialization_query(parameters) + assert sql == "CREATE UNIQUE INDEX merge_index ON {{table}}(col_1,col_2)" diff --git a/python-sdk/tests/databases/test_snowflake.py b/python-sdk/tests/databases/test_snowflake.py index 636e96682..82a8971e4 100644 --- a/python-sdk/tests/databases/test_snowflake.py +++ b/python-sdk/tests/databases/test_snowflake.py @@ -287,3 +287,9 @@ def test_get_copy_into_with_metadata_sql_statement_no_metadata_columns(): ) with pytest.raises(ValueError, match="Error: Requires metadata columns to be set in load options"): database._get_copy_into_with_metadata_sql_statement(file_path, table, stage) + + +def test_get_merge_initialization_query(): + parameters = ("col_1", "col_2") + sql = SnowflakeDatabase.get_merge_initialization_query(parameters) + assert sql == "ALTER TABLE {{table}} ADD CONSTRAINT airflow UNIQUE (col_1,col_2)" diff --git a/python-sdk/tests/databases/test_sqlite.py b/python-sdk/tests/databases/test_sqlite.py index d3fb3cb15..2b98cec46 100644 --- a/python-sdk/tests/databases/test_sqlite.py +++ b/python-sdk/tests/databases/test_sqlite.py @@ -5,6 +5,7 @@ import pytest from astro.constants import Database +from astro.databases.sqlite import SqliteDatabase from astro.files import File DEFAULT_CONN_ID = "sqlite_default" @@ -31,3 +32,10 @@ def test_export_table_to_file_file_already_exists_raises_exception( database.export_table_to_file(source_table, File(str(filepath))) err_msg = exception_info.value.args[0] assert err_msg.endswith(f"The file {filepath} already exists.") + + +def test_get_merge_initialization_query(): + parameters = ("col_1 text(4)", "col_2 text(15)") + + sql = SqliteDatabase.get_merge_initialization_query(parameters) + assert sql == "CREATE UNIQUE INDEX merge_index ON {{table}}(col_1 text(4),col_2 text(15))" diff --git a/ruff.toml b/ruff.toml index a46eb3b10..801cf0f93 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,38 +1,36 @@ line-length = 120 +target-version = "py37" +fix = true +# Exclude a variety of commonly ignored directories. +extend-exclude = ["__pycache__", "docs/source/conf.py"] + +[lint] +extend-ignore = ["A002"] # Enable Pyflakes `E` and `F` codes by default. extend-select = [ - "W", # pycodestyle warnings - "I", # isort - "C90", # Complexity -# "B", # flake8-bugbear - "C", # flake8-comprehensions -# "ANN", # flake8-comprehensions - "ISC", # flake8-implicit-str-concat - "T10", # flake8-debugger - "A", # flake8-builtins - "UP", # pyupgrade + "W", # pycodestyle warnings + "I", # isort + "C90", # Complexity + # "B", # flake8-bugbear + "C", # flake8-comprehensions + # "ANN", # flake8-comprehensions + "ISC", # flake8-implicit-str-concat + "T10", # flake8-debugger + "A", # flake8-builtins + "UP", # pyupgrade ] -extend-ignore = ["A002"] -# Exclude a variety of commonly ignored directories. -extend-exclude = [ - "__pycache__", - "docs/source/conf.py", -] - -target-version = "py37" -fix = true -[per-file-ignores] +[lint.per-file-ignores] "python-sdk/src/astro/sql/__init__.py" = ["F401"] "python-sdk/src/astro/lineage/__init__.py" = ["F401"] "python-sdk/src/astro/sql/table.py" = ["F401"] -[mccabe] +[lint.mccabe] max-complexity = 6 -[isort] +[lint.isort] combine-as-imports = true known-first-party = ["astro", "tests"]