From d47a8d2e4ff886b0a878d5f60e4c5352d495fec6 Mon Sep 17 00:00:00 2001 From: Deepyaman Datta Date: Tue, 6 Aug 2024 08:26:48 -0600 Subject: [PATCH] Apply Flake8 type checking and Ruff-specific rules (#4038) * Apply Flake8 type checking and Ruff-specific rules Signed-off-by: Deepyaman Datta * Change outdated import path for `BaseSessionStore` Signed-off-by: Deepyaman Datta * Remove parenthetical refs to `DataCatalog` in docs Signed-off-by: Deepyaman Datta * Make `DataCatalog` top level for ReadTheDocs build Signed-off-by: Deepyaman Datta * Update the patch path to `kedro.framework.context` Signed-off-by: Deepyaman Datta * Use `exclude_also`, and add `TYPE_CHECKING` blocks Signed-off-by: Deepyaman Datta --------- Signed-off-by: Deepyaman Datta --- docs/source/conf.py | 6 ++-- features/environment.py | 3 +- features/steps/cli_steps.py | 6 ++-- features/steps/sh_run.py | 2 +- features/steps/util.py | 6 ++-- kedro/config/omegaconf_config.py | 3 +- kedro/framework/cli/catalog.py | 11 +++++--- kedro/framework/cli/hooks/specs.py | 5 +++- kedro/framework/cli/jupyter.py | 21 ++++++++------ kedro/framework/cli/micropkg.py | 15 +++++----- kedro/framework/cli/pipeline.py | 12 ++++---- kedro/framework/cli/project.py | 10 ++++--- kedro/framework/cli/starters.py | 16 ++++++----- kedro/framework/cli/utils.py | 2 +- kedro/framework/context/context.py | 18 ++++++------ kedro/framework/hooks/specs.py | 13 +++++---- kedro/framework/project/__init__.py | 10 ++++--- kedro/framework/session/session.py | 10 ++++--- kedro/framework/startup.py | 4 +-- kedro/io/core.py | 28 ++++++++----------- kedro/io/shared_memory_dataset.py | 8 ++++-- kedro/pipeline/modular_pipeline.py | 6 ++-- kedro/pipeline/node.py | 18 ++++++------ kedro/pipeline/pipeline.py | 4 +-- kedro/runner/parallel_runner.py | 12 ++++---- kedro/runner/runner.py | 11 +++++--- kedro/runner/sequential_runner.py | 12 ++++---- kedro/runner/thread_runner.py | 14 ++++++---- kedro/utils.py | 4 +-- pyproject.toml | 18 ++++++------ tests/config/test_omegaconf_config.py | 2 +- tests/framework/cli/conftest.py | 2 +- .../cli/micropkg/test_micropkg_package.py | 2 +- tests/framework/cli/test_cli_hooks.py | 5 +++- tests/framework/cli/test_starters.py | 1 - tests/framework/session/conftest.py | 13 +++++---- tests/framework/session/test_session.py | 7 ++--- .../session/test_session_extension_hooks.py | 4 +-- tests/io/test_core.py | 10 +++---- tests/ipython/conftest.py | 2 +- tests/pipeline/test_modular_pipeline.py | 2 +- tests/pipeline/test_pipeline.py | 4 +-- tests/runner/conftest.py | 14 +++++----- tests/runner/test_parallel_runner.py | 5 ++-- tests/runner/test_thread_runner.py | 2 +- 45 files changed, 209 insertions(+), 174 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index a18d11bfe7..aeb7df2279 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -476,7 +476,7 @@ def autolink_classes_and_methods(lines): lines[i] = re.sub(existing, rf"{replacement}", lines[i]) -def autodoc_process_docstring(app, what, name, obj, options, lines): # noqa: PLR0913 +def autodoc_process_docstring(app, what, name, obj, options, lines): try: # guarded method to make sure build never fails log_suggestions(lines, name) @@ -486,7 +486,7 @@ def autodoc_process_docstring(app, what, name, obj, options, lines): # noqa: PL style( "Failed to check for class name mentions that can be " f"converted to reStructuredText links in docstring of {name}. " - f"Error is: \n{str(e)}", + f"Error is: \n{e!s}", fg="red", ) ) @@ -515,7 +515,7 @@ def setup(app): style( "Failed to create list of (regex, reStructuredText link " "replacement) for class names and method names in docstrings. " - f"Error is: \n{str(e)}", + f"Error is: \n{e!s}", fg="red", ) ) diff --git a/features/environment.py b/features/environment.py index 14be1445ef..0bc8835164 100644 --- a/features/environment.py +++ b/features/environment.py @@ -1,5 +1,4 @@ """Behave environment setup commands.""" -# noqa: unused-argument from __future__ import annotations import os @@ -65,7 +64,7 @@ def _setup_context_with_venv(context, venv_dir): path = context.env["PATH"].split(os.pathsep) path = [p for p in path if not (Path(p).parent / "pyvenv.cfg").is_file()] path = [p for p in path if not (Path(p).parent / "conda-meta").is_dir()] - path = [str(bin_dir)] + path + path = [str(bin_dir), *path] context.env["PATH"] = os.pathsep.join(path) # Create an empty pip.conf file and point pip to it diff --git a/features/steps/cli_steps.py b/features/steps/cli_steps.py index 871a1f0c78..7ee2c153d8 100644 --- a/features/steps/cli_steps.py +++ b/features/steps/cli_steps.py @@ -342,7 +342,7 @@ def commit_changes_to_git(context): def exec_kedro_target(context, command): """Execute Kedro target.""" split_command = command.split() - cmd = [context.kedro] + split_command + cmd = [context.kedro, *split_command] context.result = run(cmd, env=context.env, cwd=str(context.root_project_dir)) @@ -378,7 +378,7 @@ def get_kedro_version_python(context): def exec_notebook(context, command): """Execute Kedro Jupyter target.""" split_command = command.split() - cmd = [context.kedro, "jupyter"] + split_command + cmd = [context.kedro, "jupyter", *split_command] # Jupyter notebook forks a child process from a parent process, and # only kills the parent process when it is terminated @@ -711,7 +711,7 @@ def check_docs_generated(context: behave.runner.Context): context.root_project_dir / "docs" / "build" / "html" / "index.html" ).read_text("utf-8") project_repo = context.project_name.replace("-", "_") - assert f"Welcome to project {project_repo}’s API docs!" in index_html, index_html + assert f"Welcome to project {project_repo}'s API docs!" in index_html, index_html @then("requirements should be generated") diff --git a/features/steps/sh_run.py b/features/steps/sh_run.py index c201d5b9ef..c6581a8978 100644 --- a/features/steps/sh_run.py +++ b/features/steps/sh_run.py @@ -90,7 +90,7 @@ def terminate(self) -> None: """Terminate process and children.""" try: proc = psutil.Process(self.pid) - procs = [proc] + proc.children(recursive=True) + procs = [proc, *proc.children(recursive=True)] except psutil.NoSuchProcess: pass else: diff --git a/features/steps/util.py b/features/steps/util.py index f9c7b2c4e2..ab659b2cb0 100644 --- a/features/steps/util.py +++ b/features/steps/util.py @@ -5,9 +5,11 @@ import os import re from contextlib import contextmanager -from pathlib import Path from time import sleep, time -from typing import Any, Callable, Iterator +from typing import TYPE_CHECKING, Any, Callable, Iterator + +if TYPE_CHECKING: + from pathlib import Path @contextmanager diff --git a/kedro/config/omegaconf_config.py b/kedro/config/omegaconf_config.py index a74c0a1351..51fae73906 100644 --- a/kedro/config/omegaconf_config.py +++ b/kedro/config/omegaconf_config.py @@ -296,7 +296,6 @@ def load_and_merge_dir_config( # noqa: PLR0913 Resulting configuration dictionary. """ - # noqa: too-many-locals if not self._fs.isdir(Path(conf_path).as_posix()): raise MissingConfigException( @@ -306,7 +305,7 @@ def load_and_merge_dir_config( # noqa: PLR0913 paths = [] for pattern in patterns: - for each in self._fs.glob(Path(f"{str(conf_path)}/{pattern}").as_posix()): + for each in self._fs.glob(Path(f"{conf_path!s}/{pattern}").as_posix()): if not self._is_hidden(each): paths.append(Path(each)) diff --git a/kedro/framework/cli/catalog.py b/kedro/framework/cli/catalog.py index a0f6b413bd..020e62270c 100644 --- a/kedro/framework/cli/catalog.py +++ b/kedro/framework/cli/catalog.py @@ -4,8 +4,7 @@ import copy from collections import defaultdict from itertools import chain -from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING, Any import click import yaml @@ -14,10 +13,14 @@ from kedro.framework.cli.utils import KedroCliError, env_option, split_string from kedro.framework.project import pipelines, settings from kedro.framework.session import KedroSession -from kedro.framework.startup import ProjectMetadata -from kedro.io import AbstractDataset from kedro.io.data_catalog import DataCatalog +if TYPE_CHECKING: + from pathlib import Path + + from kedro.framework.startup import ProjectMetadata + from kedro.io import AbstractDataset + def _create_session(package_name: str, **kwargs: Any) -> KedroSession: kwargs.setdefault("save_on_close", False) diff --git a/kedro/framework/cli/hooks/specs.py b/kedro/framework/cli/hooks/specs.py index f772c6de57..73e750557b 100644 --- a/kedro/framework/cli/hooks/specs.py +++ b/kedro/framework/cli/hooks/specs.py @@ -4,10 +4,13 @@ """ from __future__ import annotations -from kedro.framework.startup import ProjectMetadata +from typing import TYPE_CHECKING from .markers import cli_hook_spec +if TYPE_CHECKING: + from kedro.framework.startup import ProjectMetadata + class CLICommandSpecs: """Namespace that defines all specifications for Kedro CLI's lifecycle hooks.""" diff --git a/kedro/framework/cli/jupyter.py b/kedro/framework/cli/jupyter.py index e064a16420..91c62c4af3 100644 --- a/kedro/framework/cli/jupyter.py +++ b/kedro/framework/cli/jupyter.py @@ -7,7 +7,7 @@ import os import shutil from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING, Any import click @@ -19,7 +19,9 @@ python_call, ) from kedro.framework.project import validate_settings -from kedro.framework.startup import ProjectMetadata + +if TYPE_CHECKING: + from kedro.framework.startup import ProjectMetadata @click.group(name="Kedro") @@ -34,7 +36,7 @@ def jupyter() -> None: @forward_command(jupyter, "setup", forward_help=True) @click.pass_obj # this will pass the metadata as first argument -def setup(metadata: ProjectMetadata, /, args: Any, **kwargs: Any) -> None: # noqa: unused-argument +def setup(metadata: ProjectMetadata, /, args: Any, **kwargs: Any) -> None: """Initialise the Jupyter Kernel for a kedro project.""" _check_module_importable("ipykernel") validate_settings() @@ -53,7 +55,7 @@ def jupyter_notebook( env: str, args: Any, **kwargs: Any, -) -> None: # noqa: unused-argument +) -> None: """Open Jupyter Notebook with project specific variables loaded.""" _check_module_importable("notebook") validate_settings() @@ -66,8 +68,11 @@ def jupyter_notebook( python_call( "jupyter", - ["notebook", f"--MultiKernelManager.default_kernel_name={kernel_name}"] - + list(args), + [ + "notebook", + f"--MultiKernelManager.default_kernel_name={kernel_name}", + *list(args), + ], ) @@ -80,7 +85,7 @@ def jupyter_lab( env: str, args: Any, **kwargs: Any, -) -> None: # noqa: unused-argument +) -> None: """Open Jupyter Lab with project specific variables loaded.""" _check_module_importable("jupyterlab") validate_settings() @@ -93,7 +98,7 @@ def jupyter_lab( python_call( "jupyter", - ["lab", f"--MultiKernelManager.default_kernel_name={kernel_name}"] + list(args), + ["lab", f"--MultiKernelManager.default_kernel_name={kernel_name}", *list(args)], ) diff --git a/kedro/framework/cli/micropkg.py b/kedro/framework/cli/micropkg.py index ba23245268..b826753cc6 100644 --- a/kedro/framework/cli/micropkg.py +++ b/kedro/framework/cli/micropkg.py @@ -11,10 +11,9 @@ import toml from importlib import import_module from pathlib import Path -from typing import Any, Iterable, Iterator +from typing import Any, Iterable, Iterator, TYPE_CHECKING import click -from importlib_metadata import PackageMetadata from omegaconf import OmegaConf from packaging.requirements import InvalidRequirement, Requirement from packaging.utils import canonicalize_name @@ -38,7 +37,10 @@ env_option, python_call, ) -from kedro.framework.startup import ProjectMetadata + +if TYPE_CHECKING: + from kedro.framework.startup import ProjectMetadata + from importlib_metadata import PackageMetadata _PYPROJECT_TOML_TEMPLATE = """ [build-system] @@ -107,7 +109,7 @@ def __eq__(self, other: Any) -> bool: ) -def _check_module_path(ctx: click.core.Context, param: Any, value: str) -> str: # noqa: unused-argument +def _check_module_path(ctx: click.core.Context, param: Any, value: str) -> str: if value and not re.match(r"^[\w.]+$", value): message = ( "The micro-package location you provided is not a valid Python module path" @@ -116,7 +118,6 @@ def _check_module_path(ctx: click.core.Context, param: Any, value: str) -> str: return value -# noqa: missing-function-docstring @click.group(name="Kedro") def micropkg_cli() -> None: # pragma: no cover pass @@ -379,7 +380,6 @@ def package_micropkg( # noqa: PLR0913 def _get_fsspec_filesystem(location: str, fs_args: str | None) -> Any: - # noqa: import-outside-toplevel import fsspec from kedro.io.core import get_protocol_and_path @@ -389,7 +389,7 @@ def _get_fsspec_filesystem(location: str, fs_args: str | None) -> Any: try: return fsspec.filesystem(protocol, **fs_args_config) - except Exception as exc: # noqa: broad-except + except Exception as exc: # Specified protocol is not supported by `fsspec` # or requires extra dependencies click.secho(str(exc), fg="red") @@ -408,7 +408,6 @@ def safe_extract(tar: tarfile.TarFile, path: Path) -> None: for member in tar.getmembers(): member_path = path / member.name if not _is_within_directory(path, member_path): - # noqa: broad-exception-raised raise Exception("Failed to safely extract tar file.") safe_members.append(member) tar.extractall(path, members=safe_members) # nosec B202 diff --git a/kedro/framework/cli/pipeline.py b/kedro/framework/cli/pipeline.py index 4d73e2e343..b2b9e64956 100644 --- a/kedro/framework/cli/pipeline.py +++ b/kedro/framework/cli/pipeline.py @@ -5,7 +5,7 @@ import shutil from pathlib import Path from textwrap import indent -from typing import Any, NamedTuple +from typing import TYPE_CHECKING, Any, NamedTuple import click @@ -17,7 +17,9 @@ env_option, ) from kedro.framework.project import settings -from kedro.framework.startup import ProjectMetadata + +if TYPE_CHECKING: + from kedro.framework.startup import ProjectMetadata _SETUP_PY_TEMPLATE = """# -*- coding: utf-8 -*- from setuptools import setup, find_packages @@ -65,7 +67,7 @@ def _assert_pkg_name_ok(pkg_name: str) -> None: raise KedroCliError(message) -def _check_pipeline_name(ctx: click.Context, param: Any, value: str) -> str: # noqa: unused-argument +def _check_pipeline_name(ctx: click.Context, param: Any, value: str) -> str: if value: _assert_pkg_name_ok(value) return value @@ -105,7 +107,7 @@ def create_pipeline( skip_config: bool, env: str, **kwargs: Any, -) -> None: # noqa: unused-argument +) -> None: """Create a new modular pipeline by providing a name.""" package_dir = metadata.source_dir / metadata.package_name project_root = metadata.project_path / metadata.project_name @@ -148,7 +150,7 @@ def create_pipeline( @click.pass_obj # this will pass the metadata as first argument def delete_pipeline( metadata: ProjectMetadata, /, name: str, env: str, yes: bool, **kwargs: Any -) -> None: # noqa: unused-argument +) -> None: """Delete a modular pipeline by providing a name.""" package_dir = metadata.source_dir / metadata.package_name conf_source = settings.CONF_SOURCE diff --git a/kedro/framework/cli/project.py b/kedro/framework/cli/project.py index cd9a072184..482dc9e270 100644 --- a/kedro/framework/cli/project.py +++ b/kedro/framework/cli/project.py @@ -4,7 +4,7 @@ import os import sys from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING, Any import click @@ -21,9 +21,11 @@ ) from kedro.framework.project import settings from kedro.framework.session import KedroSession -from kedro.framework.startup import ProjectMetadata from kedro.utils import load_obj +if TYPE_CHECKING: + from kedro.framework.startup import ProjectMetadata + NO_DEPENDENCY_MESSAGE = """{module} is not installed. Please make sure {module} is in requirements.txt and run 'pip install -r requirements.txt'.""" LINT_CHECK_ONLY_HELP = """Check the files for style guide violations, unsorted / @@ -69,13 +71,13 @@ def project_group() -> None: # pragma: no cover @forward_command(project_group, forward_help=True) @env_option @click.pass_obj # this will pass the metadata as first argument -def ipython(metadata: ProjectMetadata, /, env: str, args: Any, **kwargs: Any) -> None: # noqa: unused-argument +def ipython(metadata: ProjectMetadata, /, env: str, args: Any, **kwargs: Any) -> None: """Open IPython with project specific variables loaded.""" _check_module_importable("IPython") if env: os.environ["KEDRO_ENV"] = env - call(["ipython", "--ext", "kedro.ipython"] + list(args)) + call(["ipython", "--ext", "kedro.ipython", *list(args)]) @project_group.command() diff --git a/kedro/framework/cli/starters.py b/kedro/framework/cli/starters.py index bb07cfab2e..51e27884f6 100644 --- a/kedro/framework/cli/starters.py +++ b/kedro/framework/cli/starters.py @@ -12,16 +12,14 @@ import stat import sys import tempfile -from collections import OrderedDict from itertools import groupby from pathlib import Path -from typing import Any, Callable +from typing import TYPE_CHECKING, Any, Callable import click import requests import yaml from attrs import define, field -from importlib_metadata import EntryPoints from packaging.version import parse import kedro @@ -35,6 +33,11 @@ command_with_verbosity, ) +if TYPE_CHECKING: + from collections import OrderedDict + + from importlib_metadata import EntryPoints + TOOLS_ARG_HELP = """ Select which tools you'd like to include. By default, none are included.\n @@ -226,7 +229,7 @@ def _parse_yes_no_to_bool(value: str) -> Any: def _validate_selected_tools(selected_tools: str | None) -> None: - valid_tools = list(TOOLS_SHORTNAME_TO_NUMBER) + ["all", "none"] + valid_tools = [*list(TOOLS_SHORTNAME_TO_NUMBER), "all", "none"] if selected_tools is not None: tools = re.sub(r"\s", "", selected_tools).split(",") @@ -973,7 +976,7 @@ def _create_project( class _Prompt: """Represent a single CLI prompt for `kedro new`""" - def __init__(self, *args: Any, **kwargs: Any) -> None: # noqa: unused-argument + def __init__(self, *args: Any, **kwargs: Any) -> None: try: self.title = kwargs["title"] except KeyError as exc: @@ -988,7 +991,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # noqa: unused-argument def __str__(self) -> str: title = self.title.strip().title() title = click.style(title + "\n" + "=" * len(title), bold=True) - prompt_lines = [title] + [self.text] + prompt_lines = [title, self.text] prompt_text = "\n".join(str(line).strip() for line in prompt_lines) return f"\n{prompt_text}\n" @@ -1002,7 +1005,6 @@ def validate(self, user_input: str) -> None: sys.exit(1) -# noqa: unused-argument def _remove_readonly( func: Callable, path: Path, excinfo: tuple ) -> None: # pragma: no cover diff --git a/kedro/framework/cli/utils.py b/kedro/framework/cli/utils.py index 6611cabac5..8637c63a85 100644 --- a/kedro/framework/cli/utils.py +++ b/kedro/framework/cli/utils.py @@ -64,7 +64,7 @@ def python_call( module: str, arguments: Iterable[str], **kwargs: Any ) -> None: # pragma: no cover """Run a subprocess command that invokes a Python module.""" - call([sys.executable, "-m", module] + list(arguments), **kwargs) + call([sys.executable, "-m", module, *list(arguments)], **kwargs) def find_stylesheets() -> Iterable[str]: # pragma: no cover diff --git a/kedro/framework/context/context.py b/kedro/framework/context/context.py index ddc197b65b..7cb0720e07 100644 --- a/kedro/framework/context/context.py +++ b/kedro/framework/context/context.py @@ -4,19 +4,21 @@ import logging from copy import deepcopy from pathlib import Path, PurePosixPath, PureWindowsPath -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse from warnings import warn from attrs import define, field from omegaconf import OmegaConf -from pluggy import PluginManager from kedro.config import AbstractConfigLoader, MissingConfigException from kedro.framework.project import settings -from kedro.io import DataCatalog +from kedro.io import DataCatalog # noqa: TCH001 from kedro.pipeline.transcoding import _transcode_split +if TYPE_CHECKING: + from pluggy import PluginManager + def _is_relative_path(path_string: str) -> bool: """Checks whether a path string is a relative path. @@ -124,13 +126,13 @@ def _validate_transcoded_datasets(catalog: DataCatalog) -> None: """Validates transcoded datasets are correctly named Args: - catalog (DataCatalog): The catalog object containing the - datasets to be validated. + catalog: The catalog object containing the datasets to be + validated. Raises: ValueError: If a dataset name does not conform to the expected - transcoding naming conventions,a ValueError is raised by the - `_transcode_split` function. + transcoding naming conventions,a ValueError is raised by the + `_transcode_split` function. """ for dataset_name in catalog._datasets.keys(): @@ -197,7 +199,7 @@ def params(self) -> dict[str, Any]: try: params = self.config_loader["parameters"] except MissingConfigException as exc: - warn(f"Parameters not found in your Kedro project config.\n{str(exc)}") + warn(f"Parameters not found in your Kedro project config.\n{exc!s}") params = {} if self._extra_params: diff --git a/kedro/framework/hooks/specs.py b/kedro/framework/hooks/specs.py index 14431f0362..0d8946cfdb 100644 --- a/kedro/framework/hooks/specs.py +++ b/kedro/framework/hooks/specs.py @@ -4,15 +4,16 @@ """ from __future__ import annotations -from typing import Any - -from kedro.framework.context import KedroContext -from kedro.io import DataCatalog -from kedro.pipeline import Pipeline -from kedro.pipeline.node import Node +from typing import TYPE_CHECKING, Any from .markers import hook_spec +if TYPE_CHECKING: + from kedro.framework.context import KedroContext + from kedro.io import DataCatalog + from kedro.pipeline import Pipeline + from kedro.pipeline.node import Node + class DataCatalogSpecs: """Namespace that defines all specifications for a data catalog's lifecycle hooks.""" diff --git a/kedro/framework/project/__init__.py b/kedro/framework/project/__init__.py index f24b48d17c..9509ec5f88 100644 --- a/kedro/framework/project/__init__.py +++ b/kedro/framework/project/__init__.py @@ -7,12 +7,11 @@ import operator import os import traceback -import types import warnings from collections import UserDict from collections.abc import MutableMapping from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING, Any import dynaconf import importlib_resources @@ -22,6 +21,9 @@ from kedro.pipeline import Pipeline, pipeline +if TYPE_CHECKING: + import types + IMPORT_ERROR_MESSAGE = ( "An error occurred while importing the '{module}' module. Nothing " "defined therein will be returned by 'find_pipelines'.\n\n{tb_exc}" @@ -101,7 +103,7 @@ class _ProjectSettings(LazySettings): ) _SESSION_STORE_CLASS = _IsSubclassValidator( "SESSION_STORE_CLASS", - default=_get_default_class("kedro.framework.session.session.BaseSessionStore"), + default=_get_default_class("kedro.framework.session.store.BaseSessionStore"), ) _SESSION_STORE_ARGS = Validator("SESSION_STORE_ARGS", default={}) _DISABLE_HOOKS_FOR_PLUGINS = Validator("DISABLE_HOOKS_FOR_PLUGINS", default=tuple()) @@ -238,7 +240,7 @@ def __init__(self) -> None: # Fallback to the framework default loggings path = default_logging_path - msg = f"Using '{str(path)}' as logging configuration. " + msg + msg = f"Using '{path!s}' as logging configuration. " + msg # Load and apply the logging configuration logging_config = Path(path).read_text(encoding="utf-8") diff --git a/kedro/framework/session/session.py b/kedro/framework/session/session.py index cf559e4d3d..4880ce1948 100644 --- a/kedro/framework/session/session.py +++ b/kedro/framework/session/session.py @@ -10,13 +10,11 @@ import traceback from copy import deepcopy from pathlib import Path -from typing import Any, Iterable +from typing import TYPE_CHECKING, Any, Iterable import click from kedro import __version__ as kedro_version -from kedro.config import AbstractConfigLoader -from kedro.framework.context import KedroContext from kedro.framework.hooks import _create_hook_manager from kedro.framework.hooks.manager import _register_hooks, _register_hooks_entry_points from kedro.framework.project import ( @@ -24,11 +22,15 @@ settings, validate_settings, ) -from kedro.framework.session.store import BaseSessionStore from kedro.io.core import generate_timestamp from kedro.runner import AbstractRunner, SequentialRunner from kedro.utils import _find_kedro_project +if TYPE_CHECKING: + from kedro.config import AbstractConfigLoader + from kedro.framework.context import KedroContext + from kedro.framework.session.store import BaseSessionStore + def _describe_git(project_path: Path) -> dict[str, dict[str, Any]]: path = str(project_path) diff --git a/kedro/framework/startup.py b/kedro/framework/startup.py index 8649bbc8a7..32d4be0b21 100644 --- a/kedro/framework/startup.py +++ b/kedro/framework/startup.py @@ -104,7 +104,7 @@ def _get_project_metadata(project_path: Path) -> ProjectMetadata: try: return ProjectMetadata(**metadata_dict) except TypeError as exc: - expected_keys = mandatory_keys + ["source_dir", "tools", "example_pipeline"] + expected_keys = [*mandatory_keys, "source_dir", "tools", "example_pipeline"] raise RuntimeError( f"Found unexpected keys in '{_PYPROJECT}'. Make sure " f"it only contains the following keys: {expected_keys}." @@ -142,7 +142,7 @@ def _add_src_to_path(source_dir: Path, project_path: Path) -> None: python_path = os.getenv("PYTHONPATH", "") if str(source_dir) not in python_path: sep = os.pathsep if python_path else "" - os.environ["PYTHONPATH"] = f"{str(source_dir)}{sep}{python_path}" + os.environ["PYTHONPATH"] = f"{source_dir!s}{sep}{python_path}" def bootstrap_project(project_path: str | Path) -> ProjectMetadata: diff --git a/kedro/io/core.py b/kedro/io/core.py index c9c79812f0..ef87cd4df8 100644 --- a/kedro/io/core.py +++ b/kedro/io/core.py @@ -6,7 +6,6 @@ import abc import copy import logging -import os import pprint import re import sys @@ -17,7 +16,7 @@ from glob import iglob from operator import attrgetter from pathlib import Path, PurePath, PurePosixPath -from typing import Any, Callable, Generic, TypeVar +from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar from urllib.parse import urlsplit from cachetools import Cache, cachedmethod @@ -26,6 +25,9 @@ from kedro.utils import load_obj +if TYPE_CHECKING: + import os + VERSION_FORMAT = "%Y-%m-%dT%H.%M.%S.%fZ" VERSIONED_FLAG_KEY = "versioned" VERSION_KEY = "version" @@ -159,7 +161,7 @@ def from_config( except Exception as exc: raise DatasetError( f"An exception occurred when parsing config " - f"for dataset '{name}':\n{str(exc)}" + f"for dataset '{name}':\n{exc!s}" ) from exc try: @@ -223,9 +225,7 @@ def load(self: Self) -> _DO: except Exception as exc: # This exception handling is by design as the composed data sets # can throw any type of exception. - message = ( - f"Failed while loading data from data set {str(self)}.\n{str(exc)}" - ) + message = f"Failed while loading data from data set {self!s}.\n{exc!s}" raise DatasetError(message) from exc load.__annotations__["return"] = load_func.__annotations__.get("return") @@ -249,9 +249,7 @@ def save(self: Self, data: _DI) -> None: except (DatasetError, FileNotFoundError, NotADirectoryError): raise except Exception as exc: - message = ( - f"Failed while saving data to data set {str(self)}.\n{str(exc)}" - ) + message = f"Failed while saving data to data set {self!s}.\n{exc!s}" raise DatasetError(message) from exc save.__annotations__["data"] = save_func.__annotations__.get("data", Any) @@ -371,9 +369,7 @@ def exists(self) -> bool: self._logger.debug("Checking whether target of %s exists", str(self)) return self._exists() except Exception as exc: - message = ( - f"Failed during exists check for data set {str(self)}.\n{str(exc)}" - ) + message = f"Failed during exists check for data set {self!s}.\n{exc!s}" raise DatasetError(message) from exc def _exists(self) -> bool: @@ -394,7 +390,7 @@ def release(self) -> None: self._logger.debug("Releasing %s", str(self)) self._release() except Exception as exc: - message = f"Failed during release for data set {str(self)}.\n{str(exc)}" + message = f"Failed during release for data set {self!s}.\n{exc!s}" raise DatasetError(message) from exc def _release(self) -> None: @@ -705,7 +701,7 @@ def _get_save_path(self) -> PurePosixPath: if self._exists_function(str(versioned_path)): raise DatasetError( - f"Save path '{versioned_path}' for {str(self)} must not exist if " + f"Save path '{versioned_path}' for {self!s} must not exist if " f"versioning is enabled." ) @@ -769,9 +765,7 @@ def exists(self) -> bool: except VersionNotFoundError: return False except Exception as exc: # SKIP_IF_NO_SPARK - message = ( - f"Failed during exists check for data set {str(self)}.\n{str(exc)}" - ) + message = f"Failed during exists check for data set {self!s}.\n{exc!s}" raise DatasetError(message) from exc def _release(self) -> None: diff --git a/kedro/io/shared_memory_dataset.py b/kedro/io/shared_memory_dataset.py index db10722826..e2bd63bf7e 100644 --- a/kedro/io/shared_memory_dataset.py +++ b/kedro/io/shared_memory_dataset.py @@ -1,11 +1,13 @@ from __future__ import annotations import pickle -from multiprocessing.managers import SyncManager -from typing import Any +from typing import TYPE_CHECKING, Any from kedro.io.core import AbstractDataset, DatasetError +if TYPE_CHECKING: + from multiprocessing.managers import SyncManager + class SharedMemoryDataset(AbstractDataset): """``SharedMemoryDataset`` is a wrapper class for a shared MemoryDataset in SyncManager.""" @@ -47,7 +49,7 @@ def _save(self, data: Any) -> None: pickle.dumps(data) except Exception as serialisation_exc: # SKIP_IF_NO_SPARK raise DatasetError( - f"{str(data.__class__)} cannot be serialised. ParallelRunner " + f"{data.__class__!s} cannot be serialised. ParallelRunner " "implicit memory datasets can only be used with serialisable data" ) from serialisation_exc raise exc # pragma: no cover diff --git a/kedro/pipeline/modular_pipeline.py b/kedro/pipeline/modular_pipeline.py index 9eb4caba16..779800aa35 100644 --- a/kedro/pipeline/modular_pipeline.py +++ b/kedro/pipeline/modular_pipeline.py @@ -3,13 +3,15 @@ import copy import difflib -from typing import AbstractSet, Iterable +from typing import TYPE_CHECKING, AbstractSet, Iterable -from kedro.pipeline.node import Node from kedro.pipeline.pipeline import Pipeline from .transcoding import TRANSCODING_SEPARATOR, _strip_transcoding, _transcode_split +if TYPE_CHECKING: + from kedro.pipeline.node import Node + class ModularPipelineError(Exception): """Raised when a modular pipeline is not adapted and integrated diff --git a/kedro/pipeline/node.py b/kedro/pipeline/node.py index 1b718689c5..5643aeb7fc 100644 --- a/kedro/pipeline/node.py +++ b/kedro/pipeline/node.py @@ -205,8 +205,8 @@ def _set_to_str(xset: set | list[str]) -> str: def __repr__(self) -> str: # pragma: no cover return ( - f"Node({self._func_name}, {repr(self._inputs)}, {repr(self._outputs)}, " - f"{repr(self._name)})" + f"Node({self._func_name}, {self._inputs!r}, {self._outputs!r}, " + f"{self._name!r})" ) def __call__(self, **kwargs: Any) -> dict[str, Any]: @@ -396,7 +396,7 @@ def run(self, inputs: dict[str, Any] | None = None) -> dict[str, Any]: def _run_with_no_inputs(self, inputs: dict[str, Any]) -> Any: if inputs: raise ValueError( - f"Node {str(self)} expected no inputs, " + f"Node {self!s} expected no inputs, " f"but got the following {len(inputs)} input(s) instead: " f"{sorted(inputs.keys())}." ) @@ -406,7 +406,7 @@ def _run_with_no_inputs(self, inputs: dict[str, Any]) -> Any: def _run_with_one_input(self, inputs: dict[str, Any], node_input: str) -> Any: if len(inputs) != 1 or node_input not in inputs: raise ValueError( - f"Node {str(self)} expected one input named '{node_input}', " + f"Node {self!s} expected one input named '{node_input}', " f"but got the following {len(inputs)} input(s) instead: " f"{sorted(inputs.keys())}." ) @@ -417,7 +417,7 @@ def _run_with_list(self, inputs: dict[str, Any], node_inputs: list[str]) -> Any: # Node inputs and provided run inputs should completely overlap if set(node_inputs) != set(inputs.keys()): raise ValueError( - f"Node {str(self)} expected {len(node_inputs)} input(s) {node_inputs}, " + f"Node {self!s} expected {len(node_inputs)} input(s) {node_inputs}, " f"but got the following {len(inputs)} input(s) instead: " f"{sorted(inputs.keys())}." ) @@ -430,7 +430,7 @@ def _run_with_dict( # Node inputs and provided run inputs should completely overlap if set(node_inputs.values()) != set(inputs.keys()): raise ValueError( - f"Node {str(self)} expected {len(set(node_inputs.values()))} input(s) " + f"Node {self!s} expected {len(set(node_inputs.values()))} input(s) " f"{sorted(set(node_inputs.values()))}, " f"but got the following {len(inputs)} input(s) instead: " f"{sorted(inputs.keys())}." @@ -457,7 +457,7 @@ def _from_dict() -> dict[str, Any]: ) if set(keys) != set(result.keys()): raise ValueError( - f"Failed to save outputs of node {str(self)}.\n" + f"Failed to save outputs of node {self!s}.\n" f"The node's output keys {set(result.keys())} " f"do not match with the returned output's keys {set(keys)}." ) @@ -477,14 +477,14 @@ def _from_list() -> dict: if not isinstance(result, (list, tuple)): raise ValueError( - f"Failed to save outputs of node {str(self)}.\n" + f"Failed to save outputs of node {self!s}.\n" f"The node definition contains a list of " f"outputs {self._outputs}, whereas the node function " f"returned a '{type(result).__name__}'." ) if len(result) != len(self._outputs): raise ValueError( - f"Failed to save outputs of node {str(self)}.\n" + f"Failed to save outputs of node {self!s}.\n" f"The node function returned {len(result)} output(s), " f"whereas the node definition contains {len(self._outputs)} " f"output(s)." diff --git a/kedro/pipeline/pipeline.py b/kedro/pipeline/pipeline.py index 36be18a7aa..cab109deb5 100644 --- a/kedro/pipeline/pipeline.py +++ b/kedro/pipeline/pipeline.py @@ -25,13 +25,13 @@ def __getattr__(name: str) -> Any: from kedro.pipeline.transcoding import TRANSCODING_SEPARATOR warnings.warn( - f"{repr(name)} has been moved to 'kedro.pipeline.transcoding', " + f"{name!r} has been moved to 'kedro.pipeline.transcoding', " f"and the alias will be removed in Kedro 0.20.0", kedro.KedroDeprecationWarning, stacklevel=2, ) return TRANSCODING_SEPARATOR - raise AttributeError(f"module {repr(__name__)} has no attribute {repr(name)}") + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") class OutputNotUniqueError(Exception): diff --git a/kedro/runner/parallel_runner.py b/kedro/runner/parallel_runner.py index d79e4685be..6d8d7c7142 100644 --- a/kedro/runner/parallel_runner.py +++ b/kedro/runner/parallel_runner.py @@ -12,9 +12,7 @@ from multiprocessing.managers import BaseProxy, SyncManager from multiprocessing.reduction import ForkingPickler from pickle import PicklingError -from typing import Any, Iterable - -from pluggy import PluginManager +from typing import TYPE_CHECKING, Any, Iterable from kedro.framework.hooks.manager import ( _create_hook_manager, @@ -28,10 +26,14 @@ MemoryDataset, SharedMemoryDataset, ) -from kedro.pipeline import Pipeline -from kedro.pipeline.node import Node from kedro.runner.runner import AbstractRunner, run_node +if TYPE_CHECKING: + from pluggy import PluginManager + + from kedro.pipeline import Pipeline + from kedro.pipeline.node import Node + # see https://github.com/python/cpython/blob/master/Lib/concurrent/futures/process.py#L114 _MAX_WINDOWS_WORKERS = 61 diff --git a/kedro/runner/runner.py b/kedro/runner/runner.py index f9cdd08798..870d2e6d35 100644 --- a/kedro/runner/runner.py +++ b/kedro/runner/runner.py @@ -15,15 +15,18 @@ as_completed, wait, ) -from typing import Any, Collection, Iterable, Iterator +from typing import TYPE_CHECKING, Any, Collection, Iterable, Iterator from more_itertools import interleave -from pluggy import PluginManager from kedro.framework.hooks.manager import _NullPluginManager from kedro.io import DataCatalog, MemoryDataset from kedro.pipeline import Pipeline -from kedro.pipeline.node import Node + +if TYPE_CHECKING: + from pluggy import PluginManager + + from kedro.pipeline.node import Node class AbstractRunner(ABC): @@ -404,7 +407,7 @@ def run_node( f"Async data loading and saving does not work with " f"nodes wrapping generator functions. Please make " f"sure you don't use `yield` anywhere " - f"in node {str(node)}." + f"in node {node!s}." ) if is_async: diff --git a/kedro/runner/sequential_runner.py b/kedro/runner/sequential_runner.py index 5e14592dd8..894b2d8552 100644 --- a/kedro/runner/sequential_runner.py +++ b/kedro/runner/sequential_runner.py @@ -6,14 +6,16 @@ from collections import Counter from itertools import chain -from typing import Any +from typing import TYPE_CHECKING, Any -from pluggy import PluginManager - -from kedro.io import DataCatalog -from kedro.pipeline import Pipeline from kedro.runner.runner import AbstractRunner, run_node +if TYPE_CHECKING: + from pluggy import PluginManager + + from kedro.io import DataCatalog + from kedro.pipeline import Pipeline + class SequentialRunner(AbstractRunner): """``SequentialRunner`` is an ``AbstractRunner`` implementation. It can diff --git a/kedro/runner/thread_runner.py b/kedro/runner/thread_runner.py index 3d8ef12111..86c56b553f 100644 --- a/kedro/runner/thread_runner.py +++ b/kedro/runner/thread_runner.py @@ -8,15 +8,17 @@ from collections import Counter from concurrent.futures import FIRST_COMPLETED, ThreadPoolExecutor, wait from itertools import chain -from typing import Any +from typing import TYPE_CHECKING, Any -from pluggy import PluginManager - -from kedro.io import DataCatalog -from kedro.pipeline import Pipeline -from kedro.pipeline.node import Node from kedro.runner.runner import AbstractRunner, run_node +if TYPE_CHECKING: + from pluggy import PluginManager + + from kedro.io import DataCatalog + from kedro.pipeline import Pipeline + from kedro.pipeline.node import Node + class ThreadRunner(AbstractRunner): """``ThreadRunner`` is an ``AbstractRunner`` implementation. It can diff --git a/kedro/utils.py b/kedro/utils.py index 4613ea3620..0d4285e05c 100644 --- a/kedro/utils.py +++ b/kedro/utils.py @@ -57,7 +57,7 @@ def _is_project(project_path: Union[str, Path]) -> bool: try: return "[tool.kedro]" in metadata_file.read_text(encoding="utf-8") - except Exception: # noqa: broad-except + except Exception: return False @@ -73,7 +73,7 @@ def _find_kedro_project(current_dir: Path) -> Any: # pragma: no cover Kedro project associated with a given path, or None if no relevant Kedro project is found. """ - paths_to_check = [current_dir] + list(current_dir.parents) + paths_to_check = [current_dir, *list(current_dir.parents)] for parent_dir in paths_to_check: if _is_project(parent_dir): return parent_dir diff --git a/pyproject.toml b/pyproject.toml index 2dabb4c422..9f8120fa22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -134,7 +134,7 @@ omit = [ "kedro/runner/parallel_runner.py", "*/site-packages/*", ] -exclude_lines = ["pragma: no cover", "raise NotImplementedError"] +exclude_also = ["raise NotImplementedError", "if TYPE_CHECKING:"] [tool.pytest.ini_options] addopts=""" @@ -216,14 +216,16 @@ ignore_imports = [ line-length = 88 show-fixes = true select = [ - "F", # Pyflakes - "W", # pycodestyle - "E", # pycodestyle - "I", # isort - "UP", # pyupgrade - "PL", # Pylint + "F", # Pyflakes + "W", # pycodestyle + "E", # pycodestyle + "I", # isort + "UP", # pyupgrade + "PL", # Pylint "T201", # Print Statement - "S", # flake8-bandit + "S", # flake8-bandit + "TCH", # flake8-type-checking + "RUF", # Ruff-specific rules ] ignore = ["E501"] diff --git a/tests/config/test_omegaconf_config.py b/tests/config/test_omegaconf_config.py index cf64c4c7c7..1c0b26a6d7 100644 --- a/tests/config/test_omegaconf_config.py +++ b/tests/config/test_omegaconf_config.py @@ -655,7 +655,7 @@ def test_load_config_from_tar_file(self, tmp_path): "--exclude=local/*.yml", "-czf", f"{tmp_path}/tar_conf.tar.gz", - f"--directory={str(tmp_path.parent)}", + f"--directory={tmp_path.parent!s}", f"{tmp_path.name}", ] ) diff --git a/tests/framework/cli/conftest.py b/tests/framework/cli/conftest.py index d4639e42f3..2db22389f9 100644 --- a/tests/framework/cli/conftest.py +++ b/tests/framework/cli/conftest.py @@ -132,7 +132,7 @@ def fake_project_cli( # It's safe to remove the new entries from path due to the python # module caching mechanism. Any `reload` on it will not work though. old_path = sys.path.copy() - sys.path = [str(fake_repo_path / "src")] + sys.path + sys.path = [str(fake_repo_path / "src"), *sys.path] import_module(PACKAGE_NAME) configure_project(PACKAGE_NAME) diff --git a/tests/framework/cli/micropkg/test_micropkg_package.py b/tests/framework/cli/micropkg/test_micropkg_package.py index c27e6e8105..d8da033158 100644 --- a/tests/framework/cli/micropkg/test_micropkg_package.py +++ b/tests/framework/cli/micropkg/test_micropkg_package.py @@ -64,7 +64,7 @@ def test_package_micropkg( assert result.exit_code == 0 result = CliRunner().invoke( fake_project_cli, - ["micropkg", "package", f"pipelines.{PIPELINE_NAME}"] + options, + ["micropkg", "package", f"pipelines.{PIPELINE_NAME}", *options], obj=fake_metadata, ) diff --git a/tests/framework/cli/test_cli_hooks.py b/tests/framework/cli/test_cli_hooks.py index 33f13e50fb..1194d667f9 100644 --- a/tests/framework/cli/test_cli_hooks.py +++ b/tests/framework/cli/test_cli_hooks.py @@ -2,13 +2,16 @@ import logging from collections import namedtuple +from typing import TYPE_CHECKING import pytest from click.testing import CliRunner from kedro.framework.cli.cli import KedroCLI from kedro.framework.cli.hooks import cli_hook_impl, get_cli_hook_manager, manager -from kedro.framework.startup import ProjectMetadata + +if TYPE_CHECKING: + from kedro.framework.startup import ProjectMetadata logger = logging.getLogger(__name__) diff --git a/tests/framework/cli/test_starters.py b/tests/framework/cli/test_starters.py index ec8c9ff9a3..f17e133370 100644 --- a/tests/framework/cli/test_starters.py +++ b/tests/framework/cli/test_starters.py @@ -221,7 +221,6 @@ def _assert_requirements_ok( ) -# noqa: PLR0913 def _assert_template_ok( result, tools="none", diff --git a/tests/framework/session/conftest.py b/tests/framework/session/conftest.py index c590131702..e34082829f 100644 --- a/tests/framework/session/conftest.py +++ b/tests/framework/session/conftest.py @@ -3,8 +3,7 @@ import logging from logging.handlers import QueueHandler, QueueListener from multiprocessing import Queue -from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING, Any import pandas as pd import pytest @@ -13,7 +12,6 @@ from dynaconf.validator import Validator from kedro import __version__ as kedro_version -from kedro.framework.context.context import KedroContext from kedro.framework.hooks import hook_impl from kedro.framework.project import ( _ProjectPipelines, @@ -21,11 +19,16 @@ configure_project, ) from kedro.framework.session import KedroSession -from kedro.io import DataCatalog -from kedro.pipeline import Pipeline from kedro.pipeline.modular_pipeline import pipeline as modular_pipeline from kedro.pipeline.node import Node, node +if TYPE_CHECKING: + from pathlib import Path + + from kedro.framework.context.context import KedroContext + from kedro.io import DataCatalog + from kedro.pipeline import Pipeline + logger = logging.getLogger(__name__) MOCK_PACKAGE_NAME = "fake_package" diff --git a/tests/framework/session/test_session.py b/tests/framework/session/test_session.py index 02d1bcdaa8..eef798f83f 100644 --- a/tests/framework/session/test_session.py +++ b/tests/framework/session/test_session.py @@ -16,6 +16,7 @@ from kedro import __version__ as kedro_version from kedro.config import AbstractConfigLoader, OmegaConfigLoader from kedro.framework.cli.utils import _split_params +from kedro.framework.context import KedroContext from kedro.framework.project import ( LOGGING, ValidationError, @@ -25,7 +26,7 @@ _ProjectSettings, ) from kedro.framework.session import KedroSession -from kedro.framework.session.session import KedroContext, KedroSessionError +from kedro.framework.session.session import KedroSessionError from kedro.framework.session.shelvestore import ShelveStore from kedro.framework.session.store import BaseSessionStore @@ -89,9 +90,7 @@ def mock_runner(mocker): def mock_context_class(mocker): mock_cls = create_attrs_autospec(KedroContext) return mocker.patch( - "kedro.framework.session.session.KedroContext", - autospec=True, - return_value=mock_cls, + "kedro.framework.context.KedroContext", autospec=True, return_value=mock_cls ) diff --git a/tests/framework/session/test_session_extension_hooks.py b/tests/framework/session/test_session_extension_hooks.py index 879cfe3e73..81eda65b46 100644 --- a/tests/framework/session/test_session_extension_hooks.py +++ b/tests/framework/session/test_session_extension_hooks.py @@ -2,7 +2,7 @@ import multiprocessing import re import time -from typing import Any +from typing import Any, Optional import pandas as pd import pytest @@ -548,7 +548,7 @@ def sample_node_multiple_outputs(): class LogCatalog(DataCatalog): - def load(self, name: str, version: str = None) -> Any: + def load(self, name: str, version: Optional[str] = None) -> Any: dataset = super().load(name=name, version=version) logger.info("Catalog load") return dataset diff --git a/tests/io/test_core.py b/tests/io/test_core.py index 13d8481cbe..4128ad6da2 100644 --- a/tests/io/test_core.py +++ b/tests/io/test_core.py @@ -66,7 +66,7 @@ def _save(self, data: str) -> None: class MyVersionedDataset(AbstractVersionedDataset[str, str]): - def __init__( # noqa: PLR0913 + def __init__( self, filepath: str, version: Version = None, @@ -110,7 +110,7 @@ def _exists(self) -> bool: class MyLocalVersionedDataset(AbstractVersionedDataset[str, str]): - def __init__( # noqa: PLR0913 + def __init__( self, filepath: str, version: Version = None, @@ -206,7 +206,7 @@ def dummy_data(): class TestCoreFunctions: - @pytest.mark.parametrize("var", [1, True] + FALSE_BUILTINS) + @pytest.mark.parametrize("var", [1, True, *FALSE_BUILTINS]) def test_str_representation(self, var): var_str = pprint.pformat(var) filepath_str = pprint.pformat(PurePosixPath(".")) @@ -292,7 +292,7 @@ def test_get_protocol_and_path_http_with_version(self, filepath): "input", [{"key1": "invalid value"}, {"key2": "invalid;value"}] ) def test_validate_forbidden_chars(self, input): - key = list(input.keys())[0] + key = next(iter(input.keys())) expected_error_message = ( f"Neither white-space nor semicolon are allowed in '{key}'." ) @@ -484,7 +484,7 @@ def _save(self, data: str) -> None: class MyLegacyVersionedDataset(AbstractVersionedDataset[str, str]): - def __init__( # noqa: PLR0913 + def __init__( self, filepath: str, version: Version = None, diff --git a/tests/ipython/conftest.py b/tests/ipython/conftest.py index f263602741..fa13e74422 100644 --- a/tests/ipython/conftest.py +++ b/tests/ipython/conftest.py @@ -10,7 +10,7 @@ from kedro.pipeline import node from kedro.pipeline.modular_pipeline import pipeline as modular_pipeline -from . import dummy_function_fixtures # noqa It is needed for the inspect module +from . import dummy_function_fixtures # It is needed for the inspect module from .dummy_function_fixtures import ( dummy_function, dummy_function_with_loop, diff --git a/tests/pipeline/test_modular_pipeline.py b/tests/pipeline/test_modular_pipeline.py index c1e76867b5..32a4f06f86 100644 --- a/tests/pipeline/test_modular_pipeline.py +++ b/tests/pipeline/test_modular_pipeline.py @@ -219,7 +219,7 @@ def test_empty_output(self): ) def test_missing_dataset_name_no_suggestion( self, func, inputs, outputs, inputs_map, outputs_map, expected_missing - ): # noqa: PLR0913 + ): raw_pipeline = modular_pipeline([node(func, inputs, outputs)]) with pytest.raises( diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 11913e0d37..746ea4794a 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -147,7 +147,7 @@ def pipeline_with_dicts(): node(triconcat, ["H", "I", "M"], "N", name="node1"), node(identity, "H", "I", name="node2"), node(identity, "F", {"M": "M", "N": "G"}, name="node3"), - node(identity, "E", {"O": "F", "P": "H"}, name="node4"), # NOQA + node(identity, "E", {"O": "F", "P": "H"}, name="node4"), node(identity, {"input1": "D"}, None, name="node5"), node(identity, "C", "D", name="node6", tags=["foo"]), node(identity, "B", {"P": "C", "Q": "E"}, name="node7", tags=["foo"]), @@ -160,7 +160,7 @@ def pipeline_with_dicts(): {node(identity, "B", {"P": "C", "Q": "E"}, name="node7", tags=["foo"])}, { node(identity, "C", "D", name="node6", tags=["foo"]), - node(identity, "E", {"O": "F", "P": "H"}, name="node4"), # NOQA + node(identity, "E", {"O": "F", "P": "H"}, name="node4"), }, { node(identity, {"input1": "D"}, None, name="node5"), diff --git a/tests/runner/conftest.py b/tests/runner/conftest.py index 629000686f..19b6c9148c 100644 --- a/tests/runner/conftest.py +++ b/tests/runner/conftest.py @@ -278,17 +278,17 @@ def two_branches_crossed_pipeline_variable_inputs(request): return pipeline( [ - node(first_arg, ["ds0_A"] + extra_inputs, "_ds1_A", name="node1_A"), - node(first_arg, ["ds0_B"] + extra_inputs, "_ds1_B", name="node1_B"), + node(first_arg, ["ds0_A", *extra_inputs], "_ds1_A", name="node1_A"), + node(first_arg, ["ds0_B", *extra_inputs], "_ds1_B", name="node1_B"), node( multi_input_list_output, - ["_ds1_A", "_ds1_B"] + extra_inputs, + ["_ds1_A", "_ds1_B", *extra_inputs], ["ds2_A", "ds2_B"], name="node2", ), - node(first_arg, ["ds2_A"] + extra_inputs, "_ds3_A", name="node3_A"), - node(first_arg, ["ds2_B"] + extra_inputs, "_ds3_B", name="node3_B"), - node(first_arg, ["_ds3_A"] + extra_inputs, "_ds4_A", name="node4_A"), - node(first_arg, ["_ds3_B"] + extra_inputs, "_ds4_B", name="node4_B"), + node(first_arg, ["ds2_A", *extra_inputs], "_ds3_A", name="node3_A"), + node(first_arg, ["ds2_B", *extra_inputs], "_ds3_B", name="node3_B"), + node(first_arg, ["_ds3_A", *extra_inputs], "_ds4_A", name="node4_A"), + node(first_arg, ["_ds3_B", *extra_inputs], "_ds4_B", name="node4_B"), ] ) diff --git a/tests/runner/test_parallel_runner.py b/tests/runner/test_parallel_runner.py index c53a836f17..11165799a0 100644 --- a/tests/runner/test_parallel_runner.py +++ b/tests/runner/test_parallel_runner.py @@ -102,7 +102,7 @@ def test_specified_max_workers_bellow_cpu_cores_count( cpu_cores, user_specified_number, expected_number, - ): # noqa: PLR0913 + ): """ The system has 2 cores, but we initialize the runner with max_workers=4. `fan_out_fan_in` pipeline needs 3 processes. @@ -198,7 +198,7 @@ def test_memory_dataset_not_serialisable(self, is_async, catalog): pipeline = modular_pipeline([node(return_not_serialisable, "A", "B")]) catalog.add_feed_dict(feed_dict={"A": 42}) pattern = ( - rf"{str(data.__class__)} cannot be serialised. ParallelRunner implicit " + rf"{data.__class__!s} cannot be serialised. ParallelRunner implicit " rf"memory datasets can only be used with serialisable data" ) @@ -313,7 +313,6 @@ def test_count_multiple_loads(self, is_async): node(sink, "dataset", None, name="fred"), ] ) - # noqa: no-member catalog = DataCatalog( {"dataset": runner._manager.LoggingDataset(log, "dataset")} ) diff --git a/tests/runner/test_thread_runner.py b/tests/runner/test_thread_runner.py index b570f35cbf..7f43fd0f71 100644 --- a/tests/runner/test_thread_runner.py +++ b/tests/runner/test_thread_runner.py @@ -56,7 +56,7 @@ def test_specified_max_workers( catalog, user_specified_number, expected_number, - ): # noqa: PLR0913 + ): """ We initialize the runner with max_workers=4. `fan_out_fan_in` pipeline needs 3 threads.