-
Notifications
You must be signed in to change notification settings - Fork 906
/
test_micropkg_requirements.py
266 lines (225 loc) · 10.8 KB
/
test_micropkg_requirements.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
import pytest
from click.testing import CliRunner
from kedro.framework.cli.micropkg import _get_sdist_name, _safe_parse_requirements
PIPELINE_NAME = "my_pipeline"
# Inspired by test cases given in https://www.python.org/dev/peps/pep-0508/.
# These are all valid requirement specifications that can be used in both
# requirements.txt and in METADATA Requires-Dist.
SIMPLE_REQUIREMENTS = """A
A.B-C_D
aa
name
name<=1
name>=3
name>=3,<2
name==1.2.3
name!=1.2.3 # inline comment
# whole line comment
name@http://foo.com
name [fred,bar] @ http://foo.com ; python_version=='2.7'
name[quux, strange];python_version<'2.7' and platform_version=='2'
name; os_name=='a' or os_name=='b'
requests [security,tests] >= 2.8.1, == 2.8.* ; python_version < "2.7"
pip @ https://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ees
"""
# These requirements can be used in requirements.txt but not in METADATA Requires-Dist.
# They cannot be parsed by pkg_resources.
COMPLEX_REQUIREMENTS = """--extra-index-url https://this.wont.work
-r other_requirements.txt
./path/to/package.whl
http://some.website.com/package.whl
"""
@pytest.mark.usefixtures("chdir_to_dummy_project", "cleanup_dist")
class TestMicropkgRequirements:
"""Many of these tests follow the pattern:
- create a pipeline with some sort of requirements.txt
- package the pipeline/micro-package
- delete the pipeline and pull in the packaged one
- assert the project's modified requirements.txt is as expected
"""
def call_pipeline_create(self, cli, metadata):
result = CliRunner().invoke(
cli, ["pipeline", "create", PIPELINE_NAME], obj=metadata
)
assert result.exit_code == 0
def call_micropkg_package(self, cli, metadata):
result = CliRunner().invoke(
cli,
["micropkg", "package", f"pipelines.{PIPELINE_NAME}"],
obj=metadata,
)
assert result.exit_code == 0
def call_pipeline_delete(self, cli, metadata):
result = CliRunner().invoke(
cli, ["pipeline", "delete", "-y", PIPELINE_NAME], obj=metadata
)
assert result.exit_code == 0
def call_micropkg_pull(self, cli, metadata, repo_path):
sdist_file = (
repo_path / "dist" / _get_sdist_name(name=PIPELINE_NAME, version="0.1")
)
assert sdist_file.is_file()
result = CliRunner().invoke(
cli,
["micropkg", "pull", str(sdist_file)],
obj=metadata,
)
assert result.exit_code == 0
def test_existing_complex_project_requirements_txt(
self, fake_project_cli, fake_metadata, fake_package_path, fake_repo_path
):
"""Pipeline requirements.txt and project requirements.txt."""
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
with open(project_requirements_txt, "a", encoding="utf-8") as file:
file.write(COMPLEX_REQUIREMENTS)
existing_requirements = _safe_parse_requirements(
project_requirements_txt.read_text()
)
self.call_pipeline_create(fake_project_cli, fake_metadata)
pipeline_requirements_txt = (
fake_package_path / "pipelines" / PIPELINE_NAME / "requirements.txt"
)
pipeline_requirements_txt.write_text(SIMPLE_REQUIREMENTS)
self.call_micropkg_package(fake_project_cli, fake_metadata)
self.call_pipeline_delete(fake_project_cli, fake_metadata)
self.call_micropkg_pull(fake_project_cli, fake_metadata, fake_repo_path)
packaged_requirements = _safe_parse_requirements(SIMPLE_REQUIREMENTS)
pulled_requirements = _safe_parse_requirements(
project_requirements_txt.read_text()
)
# The project requirements.txt afterwards should be the requirements that already existed in
# project requirements.txt + those pulled in from pipeline requirements.txt.
# Unparseable COMPLEX_REQUIREMENTS should still be there.
assert pulled_requirements == existing_requirements | packaged_requirements
assert COMPLEX_REQUIREMENTS in project_requirements_txt.read_text()
def test_existing_project_requirements_txt(
self, fake_project_cli, fake_metadata, fake_package_path, fake_repo_path
):
"""Pipeline requirements.txt and project requirements.txt."""
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
existing_requirements = _safe_parse_requirements(
project_requirements_txt.read_text()
)
self.call_pipeline_create(fake_project_cli, fake_metadata)
pipeline_requirements_txt = (
fake_package_path / "pipelines" / PIPELINE_NAME / "requirements.txt"
)
pipeline_requirements_txt.write_text(SIMPLE_REQUIREMENTS)
self.call_micropkg_package(fake_project_cli, fake_metadata)
self.call_pipeline_delete(fake_project_cli, fake_metadata)
self.call_micropkg_pull(fake_project_cli, fake_metadata, fake_repo_path)
packaged_requirements = _safe_parse_requirements(SIMPLE_REQUIREMENTS)
pulled_requirements = _safe_parse_requirements(
project_requirements_txt.read_text()
)
# Project requirements.txt afterwards should be the requirements that already existed in
# project requirements.txt + those pulled in from pipeline requirements.txt.
assert pulled_requirements == existing_requirements | packaged_requirements
def test_missing_project_requirements_txt(
self,
fake_project_cli,
fake_metadata,
fake_package_path,
fake_repo_path,
):
"""Pipeline requirements.txt without requirements.txt at
project level."""
# Remove project requirements.txt
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt.unlink()
self.call_pipeline_create(fake_project_cli, fake_metadata)
pipeline_requirements_txt = (
fake_package_path / "pipelines" / PIPELINE_NAME / "requirements.txt"
)
pipeline_requirements_txt.write_text(SIMPLE_REQUIREMENTS)
packaged_requirements = _safe_parse_requirements(SIMPLE_REQUIREMENTS)
self.call_micropkg_package(fake_project_cli, fake_metadata)
self.call_pipeline_delete(fake_project_cli, fake_metadata)
self.call_micropkg_pull(fake_project_cli, fake_metadata, fake_repo_path)
assert project_requirements_txt.exists()
pulled_requirements = _safe_parse_requirements(
project_requirements_txt.read_text()
)
assert packaged_requirements == pulled_requirements
def test_no_requirements(
self,
fake_project_cli,
fake_metadata,
fake_repo_path,
):
"""No pipeline requirements.txt, and also no requirements.txt
at project level."""
# Remove project requirements.txt
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt.unlink()
self.call_pipeline_create(fake_project_cli, fake_metadata)
self.call_micropkg_package(fake_project_cli, fake_metadata)
self.call_pipeline_delete(fake_project_cli, fake_metadata)
self.call_micropkg_pull(fake_project_cli, fake_metadata, fake_repo_path)
assert not project_requirements_txt.exists()
def test_all_requirements_already_covered(
self, fake_project_cli, fake_metadata, fake_repo_path, fake_package_path
):
"""All requirements from pipeline requirements.txt already exist at project
level requirements.txt."""
self.call_pipeline_create(fake_project_cli, fake_metadata)
pipeline_requirements_txt = (
fake_package_path / "pipelines" / PIPELINE_NAME / "requirements.txt"
)
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
pipeline_requirements_txt.write_text(SIMPLE_REQUIREMENTS)
project_requirements_txt.write_text(SIMPLE_REQUIREMENTS)
self.call_micropkg_package(fake_project_cli, fake_metadata)
self.call_pipeline_delete(fake_project_cli, fake_metadata)
self.call_micropkg_pull(fake_project_cli, fake_metadata, fake_repo_path)
# Pipeline requirements.txt expected to be copied into project requirements.txt without any
# addition
assert project_requirements_txt.read_text() == SIMPLE_REQUIREMENTS
def test_no_pipeline_requirements_txt(
self, fake_project_cli, fake_metadata, fake_repo_path
):
"""No pipeline requirements.txt and no project requirements.txt does not
create project requirements.txt."""
# Remove project requirements.txt
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt.unlink()
self.call_pipeline_create(fake_project_cli, fake_metadata)
self.call_micropkg_package(fake_project_cli, fake_metadata)
self.call_pipeline_delete(fake_project_cli, fake_metadata)
self.call_micropkg_pull(fake_project_cli, fake_metadata, fake_repo_path)
assert not project_requirements_txt.exists()
def test_empty_pipeline_requirements_txt(
self, fake_project_cli, fake_metadata, fake_package_path, fake_repo_path
):
"""Empty pipeline requirements.txt and no project requirements.txt does not
create project requirements.txt."""
# Remove project requirements.txt
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt.unlink()
self.call_pipeline_create(fake_project_cli, fake_metadata)
pipeline_requirements_txt = (
fake_package_path / "pipelines" / PIPELINE_NAME / "requirements.txt"
)
pipeline_requirements_txt.touch()
self.call_micropkg_package(fake_project_cli, fake_metadata)
self.call_pipeline_delete(fake_project_cli, fake_metadata)
self.call_micropkg_pull(fake_project_cli, fake_metadata, fake_repo_path)
assert not project_requirements_txt.exists()
@pytest.mark.parametrize("requirement", COMPLEX_REQUIREMENTS.splitlines())
def test_complex_requirements(
self, requirement, fake_project_cli, fake_metadata, fake_package_path
):
"""Options that are valid in requirements.txt but cannot be packaged using
setup.py."""
self.call_pipeline_create(fake_project_cli, fake_metadata)
pipeline_requirements_txt = (
fake_package_path / "pipelines" / PIPELINE_NAME / "requirements.txt"
)
pipeline_requirements_txt.write_text(requirement)
result = CliRunner().invoke(
fake_project_cli,
["micropkg", "package", f"pipelines.{PIPELINE_NAME}"],
obj=fake_metadata,
)
assert result.exit_code == 1
assert "InvalidRequirement: Parse error" in result.output