Skip to content

Commit

Permalink
Use pyupgrade to replace format strings and more (#1242)
Browse files Browse the repository at this point in the history
  • Loading branch information
deepyaman committed Sep 22, 2021
1 parent e38febe commit 1dafdc0
Show file tree
Hide file tree
Showing 51 changed files with 211 additions and 218 deletions.
12 changes: 9 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,15 @@ repos:
- repo: https://github.com/asottile/blacken-docs
rev: v1.9.2
hooks:
- id: blacken-docs
additional_dependencies: [black==21.5b1]
entry: blacken-docs --skip-errors
- id: blacken-docs
additional_dependencies: [black==21.5b1]
entry: blacken-docs --skip-errors

- repo: https://github.com/asottile/pyupgrade
rev: v2.26.0
hooks:
- id: pyupgrade
args: [--py36-plus]

- repo: local
hooks:
Expand Down
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=ungrouped-imports,bad-continuation,duplicate-code,consider-using-f-string
disable=ungrouped-imports,bad-continuation,duplicate-code

# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
Expand Down
2 changes: 1 addition & 1 deletion RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
## Upcoming deprecations for Kedro 0.18.0

## Thanks for supporting contributions
[Deepyaman Datta](https://github.com/deepyaman)

# Release 0.17.5

Expand All @@ -28,7 +29,6 @@
| `tracking.MetricsDataSet` | Dataset to track numeric metrics for experiment tracking | `kedro.extras.datasets.tracking` |
| `tracking.JSONDataSet` | Dataset to track data for experiment tracking | `kedro.extras.datasets.tracking` |


## Bug fixes and other changes
* Bumped minimum required `fsspec` version to 2021.04.
* Fixed the `kedro install` and `kedro build-reqs` flows when uninstalled dependencies are present in a project's `settings.py`, `context.py` or `hooks.py` ([Issue #829](https://github.com/quantumblacklabs/kedro/issues/829)).
Expand Down
13 changes: 6 additions & 7 deletions docs/conf.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Kedro documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 18 11:31:24 2017.
Expand Down Expand Up @@ -376,7 +375,7 @@ def autolink_replacements(what: str) -> List[Tuple[str, str, str]]:
# first do plural only for classes
replacements += [
(
r"``{}``s".format(obj),
fr"``{obj}``s",
f":{what}:`~{module}.{obj}`\\\\s",
obj,
)
Expand All @@ -385,7 +384,7 @@ def autolink_replacements(what: str) -> List[Tuple[str, str, str]]:

# singular
replacements += [
(r"``{}``".format(obj), f":{what}:`~{module}.{obj}`", obj)
(fr"``{obj}``", f":{what}:`~{module}.{obj}`", obj)
for obj in objects
]

Expand All @@ -395,13 +394,13 @@ def autolink_replacements(what: str) -> List[Tuple[str, str, str]]:
if what == "class":
# first do plural only for classes
suggestions += [
(r"(?<!\w|`){}s(?!\w|`{{2}})".format(obj), f"``{obj}``s", obj)
(fr"(?<!\w|`){obj}s(?!\w|`{{2}})", f"``{obj}``s", obj)
for obj in objects
]

# then singular
suggestions += [
(r"(?<!\w|`){}(?!\w|`{{2}})".format(obj), f"``{obj}``", obj)
(fr"(?<!\w|`){obj}(?!\w|`{{2}})", f"``{obj}``", obj)
for obj in objects
]

Expand All @@ -424,7 +423,7 @@ def log_suggestions(lines: List[str], name: str):
continue

for existing, replacement, obj in suggestions:
new = re.sub(existing, r"{}".format(replacement), lines[i])
new = re.sub(existing, fr"{replacement}", lines[i])
if new == lines[i]:
continue
if ":rtype:" in lines[i] or ":type " in lines[i]:
Expand Down Expand Up @@ -457,7 +456,7 @@ def autolink_classes_and_methods(lines):
continue

for existing, replacement, obj in replacements:
lines[i] = re.sub(existing, r"{}".format(replacement), lines[i])
lines[i] = re.sub(existing, fr"{replacement}", lines[i])


def autodoc_process_docstring(app, what, name, obj, options, lines):
Expand Down
8 changes: 4 additions & 4 deletions features/steps/cli_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,7 @@ def check_output_cells_empty(context):
assert cell["outputs"] == []


@then("jupyter notebook should run on port {port}")
@then("jupyter notebook should run on port {port:d}")
def check_jupyter_nb_proc_on_port(context: behave.runner.Context, port: int):
"""Check that jupyter notebook service is running on specified port.
Expand All @@ -669,7 +669,7 @@ def check_jupyter_nb_proc_on_port(context: behave.runner.Context, port: int):
port: Port to check
"""
url = f"http://localhost:{int(port)}"
url = f"http://localhost:{port}"
try:
util.wait_for(
func=_check_service_up,
Expand All @@ -683,7 +683,7 @@ def check_jupyter_nb_proc_on_port(context: behave.runner.Context, port: int):
context.result.terminate()


@then("Jupyter Lab should run on port {port}")
@then("Jupyter Lab should run on port {port:d}")
def check_jupyter_lab_proc_on_port(context: behave.runner.Context, port: int):
"""Check that jupyter lab service is running on specified port.
Expand All @@ -692,7 +692,7 @@ def check_jupyter_lab_proc_on_port(context: behave.runner.Context, port: int):
port: Port to check
"""
url = "http://localhost:%d" % int(port)
url = f"http://localhost:{port}"
try:
util.wait_for(
func=_check_service_up,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

# Copyright 2021 QuantumBlack Visual Analytics Limited
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@


# get the dependencies and installs
with open("requirements.txt", "r", encoding="utf-8") as f:
with open("requirements.txt", encoding="utf-8") as f:
# Make sure we strip all comments and options (e.g "--extra-index-url")
# that arise from a modified pip.conf file that configure global options
# when running kedro build-reqs
Expand Down
2 changes: 1 addition & 1 deletion features/steps/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def wait_for(

sleep(sleep_for)
raise WaitForException(
"func: %s, didn't return within specified timeout: %d" % (func, timeout_)
f"func: {func}, didn't return within specified timeout: {timeout_}"
)


Expand Down
4 changes: 1 addition & 3 deletions kedro/config/default_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@

CURRENT_DIR = os.path.dirname(__file__)

with open(
os.path.join(CURRENT_DIR, "logging.yml"), "rt", encoding="utf-8"
) as conf_file:
with open(os.path.join(CURRENT_DIR, "logging.yml"), encoding="utf-8") as conf_file:
LOGGING_CONFIG = yaml.safe_load(conf_file.read())
logging.config.dictConfig(LOGGING_CONFIG)
10 changes: 5 additions & 5 deletions kedro/config/templated_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def __init__(
conf_paths: Union[str, Iterable[str]],
*,
globals_pattern: Optional[str] = None,
globals_dict: Optional[Dict[str, Any]] = None
globals_dict: Optional[Dict[str, Any]] = None,
):
"""Instantiate a ``TemplatedConfigLoader``.
Expand Down Expand Up @@ -240,8 +240,8 @@ def _format_string(match):
if value is None:
if match.group("default") is None:
raise ValueError(
"Failed to format pattern '{}': "
"no config value found, no default provided".format(match.group(0))
f"Failed to format pattern '{match.group(0)}': "
f"no config value found, no default provided"
)
return match.group("default")

Expand All @@ -255,8 +255,8 @@ def _format_string(match):
formatted_key = _format_object(key, format_dict)
if not isinstance(formatted_key, str):
raise ValueError(
"When formatting '{}' key, only string values can be used. "
"'{}' found".format(key, formatted_key)
f"When formatting '{key}' key, only string values can be used. "
f"'{formatted_key}' found"
)

key = formatted_key
Expand Down
3 changes: 1 addition & 2 deletions kedro/extras/datasets/api/api_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
"""``APIDataSet`` loads the data from HTTP(S) APIs.
It uses the python requests library: https://requests.readthedocs.io/en/master/
"""
import socket
from typing import Any, Dict, List, Tuple, Union

import requests
Expand Down Expand Up @@ -115,7 +114,7 @@ def _execute_request(self) -> requests.Response:
response.raise_for_status()
except requests.exceptions.HTTPError as exc:
raise DataSetError("Failed to fetch data", exc) from exc
except socket.error as exc:
except OSError as exc:
raise DataSetError("Failed to connect to the remote server") from exc

return response
Expand Down
4 changes: 2 additions & 2 deletions kedro/extras/datasets/pandas/sql_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,8 @@ def _find_known_drivers(module_import_error: ImportError) -> Optional[str]:

if KNOWN_PIP_INSTALL.get(missing_module):
return (
"You can also try installing missing driver with\n"
"\npip install {}".format(KNOWN_PIP_INSTALL.get(missing_module))
f"You can also try installing missing driver with\n"
f"\npip install {KNOWN_PIP_INSTALL.get(missing_module)}"
)

return None
Expand Down
11 changes: 5 additions & 6 deletions kedro/extras/datasets/spark/spark_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,11 +164,11 @@ def hdfs_glob(self, pattern: str) -> List[str]:
for dpath, _, fnames in self.walk(prefix):
if fnmatch(dpath, pattern):
matched.add(dpath)
matched |= set(
matched |= {
f"{dpath}/{fname}"
for fname in fnames
if fnmatch(f"{dpath}/{fname}", pattern)
)
}
except HdfsError: # pragma: no cover
# HdfsError is raised by `self.walk()` if prefix does not exist in HDFS.
# Ignore and return an empty list.
Expand Down Expand Up @@ -271,10 +271,9 @@ def __init__( # pylint: disable=too-many-arguments

elif fs_prefix == "hdfs://" and version:
warn(
"HDFS filesystem support for versioned {} is in beta and uses "
"`hdfs.client.InsecureClient`, please use with caution".format(
self.__class__.__name__
)
f"HDFS filesystem support for versioned {self.__class__.__name__} is "
f"in beta and uses `hdfs.client.InsecureClient`, please use with "
f"caution"
)

# default namenode address
Expand Down
4 changes: 2 additions & 2 deletions kedro/extras/datasets/spark/spark_jdbc_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,8 @@ def __init__(
for cred_key, cred_value in credentials.items():
if cred_value is None:
raise DataSetError(
"Credential property `{}` cannot be None. "
"Please provide a value.".format(cred_key)
f"Credential property `{cred_key}` cannot be None. "
f"Please provide a value."
)

load_properties = self._load_args.get("properties", {})
Expand Down
18 changes: 11 additions & 7 deletions kedro/framework/cli/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,13 @@
)
"""

PipelineArtifacts = NamedTuple(
"PipelineArtifacts",
[("pipeline_dir", Path), ("pipeline_tests", Path), ("pipeline_conf", Path)],
)

class PipelineArtifacts(NamedTuple):
"""An ordered collection of source_path, tests_path, config_paths"""

pipeline_dir: Path
pipeline_tests: Path
pipeline_conf: Path


def _assert_pkg_name_ok(pkg_name: str):
Expand Down Expand Up @@ -1109,14 +1112,15 @@ def _append_package_reqs(
return

sorted_reqs = sorted(str(req) for req in reqs_to_add)
sep = "\n"
with open(requirements_in, "a", encoding="utf-8") as file:
file.write(
f"\n\n# Additional requirements from modular pipeline `{pipeline_name}`:\n"
)
file.write("\n".join(sorted_reqs))
file.write(sep.join(sorted_reqs))
click.secho(
"Added the following requirements from modular pipeline `{}` to "
"requirements.in:\n{}".format(pipeline_name, "\n".join(sorted_reqs))
f"Added the following requirements from modular pipeline `{pipeline_name}` to "
f"requirements.in:\n{sep.join(sorted_reqs)}"
)
click.secho(
"Use `kedro install --build-reqs` to compile and install the updated list of "
Expand Down
2 changes: 1 addition & 1 deletion kedro/framework/cli/starters.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def _fetch_config_from_file(config_path: str) -> Dict[str, str]:
"""
try:
with open(config_path, "r", encoding="utf-8") as config_file:
with open(config_path, encoding="utf-8") as config_file:
config = yaml.safe_load(config_file)

if KedroCliError.VERBOSE_ERROR:
Expand Down
30 changes: 15 additions & 15 deletions kedro/io/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,8 +213,8 @@ def load(self) -> Any:
except Exception as exc:
# This exception handling is by design as the composed data sets
# can throw any type of exception.
message = "Failed while loading data from data set {}.\n{}".format(
str(self), str(exc)
message = (
f"Failed while loading data from data set {str(self)}.\n{str(exc)}"
)
raise DataSetError(message) from exc

Expand Down Expand Up @@ -274,22 +274,22 @@ def _to_str(obj, is_root=False):
@abc.abstractmethod
def _load(self) -> Any:
raise NotImplementedError(
"`{}` is a subclass of AbstractDataSet and"
"it must implement the `_load` method".format(self.__class__.__name__)
f"`{self.__class__.__name__}` is a subclass of AbstractDataSet and "
f"it must implement the `_load` method"
)

@abc.abstractmethod
def _save(self, data: Any) -> None:
raise NotImplementedError(
"`{}` is a subclass of AbstractDataSet and"
"it must implement the `_save` method".format(self.__class__.__name__)
f"`{self.__class__.__name__}` is a subclass of AbstractDataSet and "
f"it must implement the `_save` method"
)

@abc.abstractmethod
def _describe(self) -> Dict[str, Any]:
raise NotImplementedError(
"`{}` is a subclass of AbstractDataSet and"
"it must implement the `_describe` method".format(self.__class__.__name__)
f"`{self.__class__.__name__}` is a subclass of AbstractDataSet and "
f"it must implement the `_describe` method"
)

def exists(self) -> bool:
Expand All @@ -307,8 +307,8 @@ def exists(self) -> bool:
self._logger.debug("Checking whether target of %s exists", str(self))
return self._exists()
except Exception as exc:
message = "Failed during exists check for data set {}.\n{}".format(
str(self), str(exc)
message = (
f"Failed during exists check for data set {str(self)}.\n{str(exc)}"
)
raise DataSetError(message) from exc

Expand Down Expand Up @@ -605,8 +605,8 @@ def _get_save_path(self) -> PurePosixPath:

if self._exists_function(str(versioned_path)):
raise DataSetError(
"Save path `{}` for {} must not exist if versioning "
"is enabled.".format(versioned_path, str(self))
f"Save path `{versioned_path}` for {str(self)} must not exist if "
f"versioning is enabled."
)

return versioned_path
Expand Down Expand Up @@ -661,8 +661,8 @@ def exists(self) -> bool:
except VersionNotFoundError:
return False
except Exception as exc: # SKIP_IF_NO_SPARK
message = "Failed during exists check for data set {}.\n{}".format(
str(self), str(exc)
message = (
f"Failed during exists check for data set {str(self)}.\n{str(exc)}"
)
raise DataSetError(message) from exc

Expand Down Expand Up @@ -696,7 +696,7 @@ def _parse_filepath(filepath: str) -> Dict[str, str]:
if protocol == "file":
windows_path = re.match(r"^/([a-zA-Z])[:|]([\\/].*)$", path)
if windows_path:
path = "{}:{}".format(*windows_path.groups())
path = ":".join(windows_path.groups())

options = {"protocol": protocol, "path": path}

Expand Down
Loading

0 comments on commit 1dafdc0

Please sign in to comment.