Skip to content

Commit cdd0e04

Browse files
authored
fix: Resolve all pre-commit ruff and mypy errors (92 total) (#2569)
* fix: Resolve all pre-commit ruff and mypy errors (92 total) Fixed 90 ruff errors and 2 mypy errors across 242 files through proper code improvements without using noqa suppressions. Ruff errors resolved: - ARG001/ARG002: Removed unused function/method arguments - RUF059: Removed unused unpacked variables from tuples - PLC0415: Moved imports to module top-level - B904: Added proper exception chaining with 'from' clause - B007: Prefixed unused loop variables with underscore - B905: Added strict=True parameter to zip() calls - B011: Replaced 'assert False' with 'raise AssertionError' - B026: Moved star-arg unpacking before keyword arguments - B028: Added stacklevel parameter to warnings.warn() - UP031: Converted percent formatting to f-strings Mypy errors resolved: - Fixed type annotation issues in exception handling - Corrected function signature type hints Test fixes: - Fixed 4 failing tests affected by argument removal - Updated test assertions and mock calls Impact: - All pre-commit checks now pass - Code quality improved through proper refactoring - Test suite fully passing - Zero technical debt from error suppression * fix: organize imports in resource files Fix import formatting in authentication and gateway resource classes: - Add blank lines between import groups per PEP 8 - Alphabetize imports in data_science_pipelines_application.py Files updated: - authentication_config_openshift_io.py - authentication_operator_openshift_io.py - data_science_pipelines_application.py - gateway_config.py
1 parent ac60b7f commit cdd0e04

File tree

292 files changed

+574
-419
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

292 files changed

+574
-419
lines changed

class_generator/cli.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
"""Command-line interface for the class generator."""
22

33
import fnmatch
4+
import logging
5+
import os
46
import shutil
57
import sys
68
from datetime import datetime
79
from pathlib import Path
810
from typing import Any
911

10-
import logging
11-
1212
import cloup
1313
from cloup.constraints import If, IsSet, accept_none, require_one
1414
from simple_logger.logger import get_logger
@@ -17,7 +17,7 @@
1717
from class_generator.core.coverage import analyze_coverage, generate_report
1818
from class_generator.core.discovery import discover_generated_resources
1919
from class_generator.core.generator import class_generator
20-
from class_generator.core.schema import update_kind_schema, ClusterVersionError
20+
from class_generator.core.schema import ClusterVersionError, update_kind_schema
2121
from class_generator.tests.test_generation import generate_class_generator_tests
2222
from class_generator.utils import execute_parallel_tasks
2323
from ocp_resources.utils.utils import convert_camel_case_to_snake_case
@@ -57,7 +57,7 @@ def handle_schema_update(update_schema: bool, generate_missing: bool) -> bool:
5757
LOGGER.info("Updating resource schema...")
5858
try:
5959
update_kind_schema()
60-
except (RuntimeError, IOError, ClusterVersionError) as e:
60+
except (OSError, RuntimeError, ClusterVersionError) as e:
6161
LOGGER.exception(f"Failed to update schema: {e}")
6262
sys.exit(1)
6363

@@ -251,9 +251,9 @@ def regenerate_single_resource(resource: dict[str, Any]) -> tuple[str, bool, str
251251
return resource_kind, False, str(e)
252252

253253
# Process results from parallel execution
254-
def process_regeneration_result(resource: dict[str, Any], result: tuple[str, bool, str | None]) -> None:
254+
def process_regeneration_result(_resource: dict[str, Any], result: tuple[str, bool, str | None]) -> None:
255255
nonlocal success_count, error_count
256-
resource_kind, success, error = result
256+
_resource_kind, success, _error = result
257257
if success:
258258
success_count += 1
259259
else:
@@ -391,7 +391,7 @@ def generate_with_backup(kind_to_generate: str) -> tuple[str, bool, str | None]:
391391
return kind_to_generate, False, str(e)
392392

393393
# Process results from parallel execution
394-
def process_generation_result(kind_to_generate: str, result: tuple[str, bool, str | None]) -> None:
394+
def process_generation_result(_kind_to_generate: str, result: tuple[str, bool, str | None]) -> None:
395395
nonlocal success_count, error_count, failed_kinds
396396
kind_name, success, error = result
397397
if success:
@@ -447,8 +447,6 @@ def handle_test_generation(add_tests: bool) -> None:
447447

448448
# Run the generated test file
449449
LOGGER.info("Running generated tests...")
450-
import os
451-
452450
test_file = "class_generator/tests/test_class_generator.py"
453451
exit_code = os.system(f"uv run pytest {test_file}")
454452

class_generator/core/coverage.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def analyze_coverage(
6868
continue
6969

7070
try:
71-
with open(filepath, "r") as f:
71+
with open(filepath) as f:
7272
content = f.read()
7373

7474
# Check if file is auto-generated

class_generator/core/discovery.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
"""Discovery functions for finding cluster resources and generated files."""
22

3+
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
34
from pathlib import Path
45
from typing import Any
5-
from concurrent.futures import ThreadPoolExecutor, as_completed, Future
66

77
from kubernetes.dynamic import DynamicClient
88
from simple_logger.logger import get_logger
@@ -221,7 +221,7 @@ def discover_generated_resources() -> list[dict[str, Any]]:
221221
for info in resource_infos:
222222
# Read file to check for user code
223223
try:
224-
with open(info.file_path, "r", encoding="utf-8") as f:
224+
with open(info.file_path, encoding="utf-8") as f:
225225
content = f.read()
226226
except FileNotFoundError:
227227
LOGGER.warning(f"File not found: {info.file_path}, skipping...")

class_generator/core/generator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def class_generator(
155155
LOGGER.info("Updating schema")
156156
try:
157157
update_kind_schema()
158-
except (RuntimeError, IOError) as e:
158+
except (OSError, RuntimeError) as e:
159159
error_msg = f"Failed to update schema: {e}"
160160
LOGGER.error(error_msg)
161161
raise RuntimeError(error_msg) from e

class_generator/core/schema.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from simple_logger.logger import get_logger
1313

1414
from class_generator.constants import DEFINITIONS_FILE, RESOURCES_MAPPING_FILE, SCHEMA_DIR
15-
from class_generator.utils import execute_parallel_with_mapping, execute_parallel_tasks
15+
from class_generator.utils import execute_parallel_tasks, execute_parallel_with_mapping
1616
from ocp_resources.utils.archive_utils import save_json_archive
1717
from ocp_resources.utils.schema_validator import SchemaValidator
1818

@@ -136,9 +136,9 @@ def check_and_update_cluster_version(client: str) -> bool:
136136
last_cluster_version_generated: str = ""
137137

138138
try:
139-
with open(cluster_version_file, "r") as fd:
139+
with open(cluster_version_file) as fd:
140140
last_cluster_version_generated = fd.read().strip()
141-
except (FileNotFoundError, IOError):
141+
except (OSError, FileNotFoundError):
142142
# Treat missing file as first run - use baseline version that allows updates
143143
last_cluster_version_generated = "v0.0.0"
144144
LOGGER.info("Cluster version file not found - treating as first run with baseline version v0.0.0")
@@ -530,7 +530,7 @@ def process_schema_definitions(
530530
definitions = {}
531531
if not allow_updates:
532532
try:
533-
with open(DEFINITIONS_FILE, "r") as f:
533+
with open(DEFINITIONS_FILE) as f:
534534
existing_definitions_data = json.load(f)
535535
definitions = existing_definitions_data.get("definitions", {})
536536
LOGGER.info(f"Loaded {len(definitions)} existing definitions to preserve")
@@ -1064,12 +1064,12 @@ def process_explain_result(spec_tuple: tuple[str, str], result: Any) -> None:
10641064
LOGGER.debug(f"Failed to obtain explain data for {ref_name} from {explain_path}")
10651065

10661066
def handle_explain_error(spec_tuple: tuple[str, str], exc: Exception) -> None:
1067-
ref_name, explain_path = spec_tuple
1068-
LOGGER.debug(f"Exception occurred while explaining {ref_name} from {explain_path}: {exc}")
1067+
ref_name, _explain_path = spec_tuple
1068+
LOGGER.debug(f"Exception occurred while explaining {ref_name}: {exc}")
10691069
explain_results[ref_name] = None
10701070

10711071
def create_explain_task(spec_tuple: tuple[str, str]) -> Any:
1072-
ref_name, explain_path = spec_tuple
1072+
_ref_name, explain_path = spec_tuple
10731073
return _run_explain_and_parse(client, explain_path)
10741074

10751075
execute_parallel_tasks(
@@ -1244,7 +1244,7 @@ def process_required_field_result(task_tuple: tuple[str, str], result: Any) -> N
12441244
definitions[schema_key] = updated_schema
12451245

12461246
def handle_required_field_error(task_tuple: tuple[str, str], exc: Exception) -> None:
1247-
schema_key, explain_path = task_tuple
1247+
schema_key, _explain_path = task_tuple
12481248
LOGGER.debug(f"Failed to process required fields for {schema_key}: {exc}")
12491249
# Set empty list if explain fails
12501250
current_schema = definitions[schema_key]
@@ -1307,14 +1307,14 @@ def _get_missing_core_definitions(
13071307
if refs_to_fetch:
13081308

13091309
def process_missing_definition_result(task_tuple: tuple[str, str], result: Any) -> None:
1310-
ref_name, oc_path = task_tuple
1310+
_ref_name, _oc_path = task_tuple
13111311
if result:
13121312
fetched_ref_name, schema = result
13131313
missing_definitions[fetched_ref_name] = schema
13141314
LOGGER.debug(f"Successfully fetched definition for {fetched_ref_name}")
13151315

13161316
def handle_missing_definition_error(task_tuple: tuple[str, str], exc: Exception) -> None:
1317-
ref_name, oc_path = task_tuple
1317+
ref_name, _oc_path = task_tuple
13181318
LOGGER.debug(f"Failed to fetch definition for {ref_name}: {exc}")
13191319

13201320
def create_missing_definition_task(task_tuple: tuple[str, str]) -> Any:
@@ -1360,10 +1360,10 @@ def write_schema_files(
13601360
# Ensure schema directory exists
13611361
try:
13621362
Path(SCHEMA_DIR).mkdir(parents=True, exist_ok=True)
1363-
except (OSError, IOError) as e:
1363+
except OSError as e:
13641364
error_msg = f"Failed to create schema directory {SCHEMA_DIR}: {e}"
13651365
LOGGER.error(error_msg)
1366-
raise IOError(error_msg) from e
1366+
raise OSError(error_msg) from e
13671367

13681368
# Fetch missing core definitions if schemas are available
13691369
if schemas:
@@ -1400,18 +1400,18 @@ def write_schema_files(
14001400
with open(definitions_file, "w") as fd:
14011401
json.dump(definitions_data, fd, indent=2, sort_keys=True)
14021402
LOGGER.info(f"Written {len(definitions)} definitions to {definitions_file}")
1403-
except (OSError, IOError, TypeError) as e:
1403+
except (OSError, TypeError) as e:
14041404
error_msg = f"Failed to write definitions file {definitions_file}: {e}"
14051405
LOGGER.error(error_msg)
1406-
raise IOError(error_msg) from e
1406+
raise OSError(error_msg) from e
14071407

14081408
# Write and archive resources mapping
14091409
try:
14101410
save_json_archive(resources_mapping, RESOURCES_MAPPING_FILE)
1411-
except (OSError, IOError, TypeError) as e:
1411+
except (OSError, TypeError) as e:
14121412
error_msg = f"Failed to save and archive resources mapping file {RESOURCES_MAPPING_FILE}: {e}"
14131413
LOGGER.error(error_msg)
1414-
raise IOError(error_msg) from e
1414+
raise OSError(error_msg) from e
14151415

14161416

14171417
@dataclasses.dataclass
@@ -1523,11 +1523,11 @@ def _handle_no_schemas_case() -> None:
15231523
"""
15241524
LOGGER.info("No schemas fetched. Preserving existing data to avoid overwriting with empty definitions.")
15251525
try:
1526-
with open(DEFINITIONS_FILE, "r") as fd:
1526+
with open(DEFINITIONS_FILE) as fd:
15271527
existing_definitions_data = json.load(fd)
15281528
definitions = existing_definitions_data.get("definitions", {})
15291529
LOGGER.info(f"Found {len(definitions)} existing definitions that will be preserved")
1530-
except (FileNotFoundError, IOError, json.JSONDecodeError):
1530+
except (OSError, FileNotFoundError, json.JSONDecodeError):
15311531
LOGGER.debug("Could not load existing definitions file. No existing definitions to preserve.")
15321532

15331533

class_generator/formatters/template_renderer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Jinja template rendering for resource generation."""
22

3-
from typing import Any
43
from pathlib import Path
4+
from typing import Any
55

66
from jinja2 import DebugUndefined, Environment, FileSystemLoader, meta
77
from simple_logger.logger import get_logger
@@ -38,7 +38,7 @@ def render_jinja_template(template_dict: dict[Any, Any], template_dir: str, temp
3838
except AttributeError:
3939
# Fallback: read the template file directly
4040
template_path = Path(template_dir) / template_name
41-
with open(template_path, "r", encoding="utf-8") as f:
41+
with open(template_path, encoding="utf-8") as f:
4242
template_source = f.read()
4343

4444
ast = env.parse(source=template_source)

class_generator/parsers/explain_parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def parse_explain(kind: str) -> list[dict[str, Any]]:
5555

5656
# For each API group, select the latest version
5757
filtered_schemas = []
58-
for group, group_schemas in schemas_by_group.items():
58+
for _group, group_schemas in schemas_by_group.items():
5959
if len(group_schemas) > 1:
6060
# Multiple versions in same group - pick latest
6161
versions = []

class_generator/parsers/type_parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
from simple_logger.logger import get_logger
88

9-
from class_generator.constants import MISSING_DESCRIPTION_STR, SPEC_STR, DEFINITIONS_FILE
9+
from class_generator.constants import DEFINITIONS_FILE, MISSING_DESCRIPTION_STR, SPEC_STR
1010
from class_generator.utils import sanitize_python_name
1111
from ocp_resources.utils.utils import convert_camel_case_to_snake_case
1212

class_generator/parsers/user_code_parser.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,20 +26,20 @@ def parse_user_code_from_file(file_path: str) -> tuple[str, str]:
2626
try:
2727
with open(file_path, encoding="utf-8") as fd:
2828
data = fd.read()
29-
except FileNotFoundError:
30-
raise FileNotFoundError(f"File not found: {file_path}. Please ensure the file exists.")
31-
except PermissionError:
32-
raise PermissionError(f"Permission denied when accessing file: {file_path}. Check file permissions.")
29+
except FileNotFoundError as err:
30+
raise FileNotFoundError(f"File not found: {file_path}. Please ensure the file exists.") from err
31+
except PermissionError as err:
32+
raise PermissionError(f"Permission denied when accessing file: {file_path}. Check file permissions.") from err
3333
except UnicodeDecodeError as e:
3434
raise UnicodeDecodeError(
3535
e.encoding,
3636
e.object,
3737
e.start,
3838
e.end,
3939
f"Failed to decode file {file_path} with UTF-8 encoding. The file may contain invalid characters.",
40-
)
40+
) from e
4141
except Exception as e:
42-
raise Exception(f"Unexpected error reading file {file_path}: {type(e).__name__}: {str(e)}")
42+
raise Exception(f"Unexpected error reading file {file_path}: {type(e).__name__}: {str(e)}") from e
4343

4444
end_of_generated_code_line = " # End of generated code"
4545
user_code: str = ""

class_generator/tests/manifests/APIServer/api_server.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33

44
from typing import Any
5+
56
from ocp_resources.resource import Resource
67

78

0 commit comments

Comments
 (0)