Skip to content

Commit

Permalink
Update release notes post release (#1240)
Browse files Browse the repository at this point in the history
  • Loading branch information
Lorena Bălan committed Sep 20, 2021
1 parent be13c63 commit 7582375
Show file tree
Hide file tree
Showing 11 changed files with 46 additions and 49 deletions.
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=ungrouped-imports,bad-continuation,duplicate-code
disable=ungrouped-imports,bad-continuation,duplicate-code,consider-using-f-string

# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
Expand Down
14 changes: 12 additions & 2 deletions RELEASE.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,15 @@
# Upcoming Release 0.17.6

## Major features and improvements

## Bug fixes and other changes

## Minor breaking changes to the API

## Upcoming deprecations for Kedro 0.18.0

## Thanks for supporting contributions

# Release 0.17.5

## Major features and improvements
Expand All @@ -22,8 +34,6 @@
* Imports are now refactored at `kedro pipeline package` and `kedro pipeline pull` time, so that _aliasing_ a modular pipeline doesn't break it.
* Pinned `dynaconf` to `<3.1.6` because the method signature for `_validate_items` changed which is used in Kedro.

## Minor breaking changes to the API

## Upcoming deprecations for Kedro 0.18.0
* `kedro pipeline list` and `kedro pipeline describe` are being deprecated in favour of new commands `kedro registry list ` and `kedro registry describe`.
* `kedro install` is being deprecated in favour of using `pip install -r src/requirements.txt` to install project dependencies.
Expand Down
2 changes: 1 addition & 1 deletion features/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def _install_project_requirements(context):
Path(
"kedro/templates/project/{{ cookiecutter.repo_name }}/src/requirements.txt"
)
.read_text()
.read_text(encoding="utf-8")
.splitlines()
)
install_reqs = [req for req in install_reqs if "{" not in req]
Expand Down
2 changes: 1 addition & 1 deletion features/steps/cli_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,7 +669,7 @@ def check_jupyter_nb_proc_on_port(context: behave.runner.Context, port: int):
port: Port to check
"""
url = "http://localhost:%d" % int(port)
url = f"http://localhost:{int(port)}"
try:
util.wait_for(
func=_check_service_up,
Expand Down
2 changes: 1 addition & 1 deletion kedro/extras/extensions/ipython.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def load_ipython_extension(ipython):
"""Main entry point when %load_ext is executed"""

global project_path
global startup_path
global startup_path # pylint:disable=global-variable-not-assigned

ipython.register_magic_function(init_kedro, "line")
ipython.register_magic_function(reload_kedro, "line", "reload_kedro")
Expand Down
2 changes: 1 addition & 1 deletion kedro/framework/cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def get_pkg_version(reqs_path: (Union[str, Path]), package_name: str) -> str:
raise KedroCliError(f"Given path `{reqs_path}` is not a regular file.")

pattern = re.compile(package_name + r"([^\w]|$)")
with reqs_path.open("r") as reqs_file:
with reqs_path.open("r", encoding="utf-8") as reqs_file:
for req_line in reqs_file:
req_line = req_line.strip()
if pattern.search(req_line):
Expand Down
4 changes: 2 additions & 2 deletions kedro/io/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,8 @@ def from_config(
)
except Exception as exc:
raise DataSetError(
"An exception occurred when parsing config "
"for DataSet `{}`:\n{}".format(name, str(exc))
f"An exception occurred when parsing config "
f"for DataSet `{name}`:\n{str(exc)}"
) from exc

try:
Expand Down
10 changes: 3 additions & 7 deletions kedro/io/data_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def __init__(self, datasets):
# Don't allow users to add/change attributes on the fly
def __setattr__(self, key, value):
msg = "Operation not allowed! "
if key in self.__dict__.keys():
if key in self.__dict__:
msg += "Please change datasets through configuration."
else:
msg += "Please use DataCatalog.add() instead."
Expand Down Expand Up @@ -218,9 +218,7 @@ def _check_and_normalize_transformers(self):

if excess_transformers:
raise DataSetNotFoundError(
"Unexpected transformers for missing data_sets {}".format(
", ".join(excess_transformers)
)
f"Unexpected transformers for missing data_sets {', '.join(excess_transformers)}"
)

for data_set_name in missing_transformers:
Expand Down Expand Up @@ -624,9 +622,7 @@ def add_transformer(

if not isinstance(transformer, AbstractTransformer):
raise TypeError(
"Object of type {} is not an instance of AbstractTransformer".format(
type(transformer)
)
f"Object of type {type(transformer)} is not an instance of AbstractTransformer"
)
if data_set_names is None:
self._default_transformers.append(transformer)
Expand Down
33 changes: 9 additions & 24 deletions tests/framework/cli/pipeline/test_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,7 @@ def pipelines_dict():
class TestPipelineCreateCommand:
@pytest.mark.parametrize("env", [None, "local"])
def test_create_pipeline( # pylint: disable=too-many-locals
self,
fake_repo_path,
fake_project_cli,
fake_metadata,
env,
fake_package_path,
self, fake_repo_path, fake_project_cli, fake_metadata, env, fake_package_path
):
"""Test creation of a pipeline"""
pipelines_dir = fake_package_path / "pipelines"
Expand Down Expand Up @@ -419,9 +414,7 @@ def test_bad_pipeline_name(
):
"""Test error message when bad pipeline name was provided."""
result = CliRunner().invoke(
fake_project_cli,
["pipeline", "delete", "-y", bad_name],
obj=fake_metadata,
fake_project_cli, ["pipeline", "delete", "-y", bad_name], obj=fake_metadata
)
assert result.exit_code
assert error_message in result.output
Expand Down Expand Up @@ -547,9 +540,7 @@ def test_describe_pipeline(
pipelines_dict,
):
result = CliRunner().invoke(
fake_project_cli,
["pipeline", "describe", pipeline_name],
obj=fake_metadata,
fake_project_cli, ["pipeline", "describe", pipeline_name], obj=fake_metadata
)

assert not result.exit_code
Expand All @@ -569,16 +560,10 @@ def test_not_found_pipeline(self, fake_project_cli, fake_metadata):
assert expected_output in result.output

def test_describe_pipeline_default(
self,
fake_project_cli,
fake_metadata,
yaml_dump_mock,
pipelines_dict,
self, fake_project_cli, fake_metadata, yaml_dump_mock, pipelines_dict
):
result = CliRunner().invoke(
fake_project_cli,
["pipeline", "describe"],
obj=fake_metadata,
fake_project_cli, ["pipeline", "describe"], obj=fake_metadata
)

assert not result.exit_code
Expand All @@ -597,7 +582,7 @@ def source(self, tmp_path) -> Path:
source_dir.mkdir()
(source_dir / "existing").mkdir()
(source_dir / "existing" / "source_file").touch()
(source_dir / "existing" / "common").write_text("source")
(source_dir / "existing" / "common").write_text("source", encoding="utf-8")
(source_dir / "new").mkdir()
(source_dir / "new" / "source_file").touch()
return source_dir
Expand All @@ -608,7 +593,7 @@ def test_sync_target_exists(self, source, tmp_path):
target.mkdir()
(target / "existing").mkdir()
(target / "existing" / "target_file").touch()
(target / "existing" / "common").write_text("target")
(target / "existing" / "common").write_text("target", encoding="utf-8")

_sync_dirs(source, target)

Expand All @@ -618,7 +603,7 @@ def test_sync_target_exists(self, source, tmp_path):
assert (source / "new" / "source_file").is_file()

assert (target / "existing" / "source_file").is_file()
assert (target / "existing" / "common").read_text() == "target"
assert (target / "existing" / "common").read_text(encoding="utf-8") == "target"
assert (target / "existing" / "target_file").exists()
assert (target / "new" / "source_file").is_file()

Expand All @@ -634,6 +619,6 @@ def test_sync_no_target(self, source, tmp_path):
assert (source / "new" / "source_file").is_file()

assert (target / "existing" / "source_file").is_file()
assert (target / "existing" / "common").read_text() == "source"
assert (target / "existing" / "common").read_text(encoding="utf-8") == "source"
assert not (target / "existing" / "target_file").exists()
assert (target / "new" / "source_file").is_file()
20 changes: 13 additions & 7 deletions tests/framework/cli/test_starters.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,11 @@ def _assert_template_ok(
assert len(generated_files) == FILES_IN_TEMPLATE
assert full_path.exists()
assert (full_path / ".gitignore").is_file()
assert project_name in (full_path / "README.md").read_text()
assert "KEDRO" in (full_path / ".gitignore").read_text()
assert kedro_version in (full_path / "src" / "requirements.txt").read_text()
assert project_name in (full_path / "README.md").read_text(encoding="utf-8")
assert "KEDRO" in (full_path / ".gitignore").read_text(encoding="utf-8")
assert kedro_version in (full_path / "src" / "requirements.txt").read_text(
encoding="utf-8"
)
assert (full_path / "src" / python_package / "__init__.py").is_file()


Expand All @@ -110,8 +112,12 @@ def test_starter_list(fake_kedro_cli):

def test_cookiecutter_json_matches_prompts_yml():
"""Validate the contents of the default config file."""
cookiecutter_json = json.loads((TEMPLATE_PATH / "cookiecutter.json").read_text())
prompts_yml = yaml.safe_load((TEMPLATE_PATH / "prompts.yml").read_text())
cookiecutter_json = json.loads(
(TEMPLATE_PATH / "cookiecutter.json").read_text(encoding="utf-8")
)
prompts_yml = yaml.safe_load(
(TEMPLATE_PATH / "prompts.yml").read_text(encoding="utf-8")
)
assert set(cookiecutter_json) == set(prompts_yml) | {"kedro_version"}


Expand Down Expand Up @@ -245,7 +251,7 @@ def test_prompt_no_title(self, fake_kedro_cli):

def test_prompt_bad_yaml(self, fake_kedro_cli):
shutil.copytree(TEMPLATE_PATH, "template")
(Path("template") / "prompts.yml").write_text("invalid\tyaml")
(Path("template") / "prompts.yml").write_text("invalid\tyaml", encoding="utf-8")
result = CliRunner().invoke(fake_kedro_cli, ["new", "--starter", "template"])
assert result.exit_code != 0
assert "Failed to generate project: could not load prompts.yml" in result.output
Expand Down Expand Up @@ -380,7 +386,7 @@ def test_config_empty(self, fake_kedro_cli):

def test_config_bad_yaml(self, fake_kedro_cli):
"""Check the error if config YAML is invalid."""
Path("config.yml").write_text("invalid\tyaml")
Path("config.yml").write_text("invalid\tyaml", encoding="utf-8")
result = CliRunner().invoke(fake_kedro_cli, ["new", "-v", "-c", "config.yml"])
assert result.exit_code != 0
assert "Failed to generate project: could not load config" in result.output
Expand Down
4 changes: 2 additions & 2 deletions tests/framework/session/test_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,11 +222,11 @@ def fake_project(tmp_path, local_logging_config, mock_package_name):
}
}
toml_str = toml.dumps(payload)
pyproject_toml_path.write_text(toml_str)
pyproject_toml_path.write_text(toml_str, encoding="utf-8")

env_logging = fake_project_dir / "conf" / "base" / "logging.yml"
env_logging.parent.mkdir(parents=True)
env_logging.write_text(json.dumps(local_logging_config))
env_logging.write_text(json.dumps(local_logging_config), encoding="utf-8")
(fake_project_dir / "conf" / "local").mkdir()
return fake_project_dir

Expand Down

0 comments on commit 7582375

Please sign in to comment.