Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move imagepullsecrets sample to samples/core #1767

Merged
merged 15 commits into from
Aug 9, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update the location of imagepullsecrets so that it can serve as an ex…
…ample.
  • Loading branch information
numerology committed Aug 8, 2019
commit ad7a965b374f76dbdd7c0fceac911a610b837994
255 changes: 139 additions & 116 deletions sdk/python/tests/compiler/compiler_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@


class TestCompiler(unittest.TestCase):
# Define the places of samples covered by unit tests.
core_sample_path = os.path.join(os.path.dirname(__file__), '..', '..', '..',
'..', 'samples', 'core',)

def test_operator_to_template(self):
"""Test converting operator to template"""
Expand All @@ -53,94 +56,94 @@ def test_operator_to_template(self):
name='GOOGLE_APPLICATION_CREDENTIALS',
value='/secret/gcp-credentials/user-gcp-sa.json'))
res = dsl.ResourceOp(
name="test-resource",
k8s_resource=k8s_client.V1PersistentVolumeClaim(
api_version="v1",
kind=kind,
metadata=k8s_client.V1ObjectMeta(
name="resource"
)
),
attribute_outputs={"out": json}
name="test-resource",
k8s_resource=k8s_client.V1PersistentVolumeClaim(
api_version="v1",
kind=kind,
metadata=k8s_client.V1ObjectMeta(
name="resource"
)
),
attribute_outputs={"out": json}
)
golden_output = {
'container': {
'image': 'image',
'args': [
'echo {{inputs.parameters.msg1}} {{inputs.parameters.msg2}} | tee /tmp/message.txt'
],
'command': ['sh', '-c'],
'env': [
{
'name': 'GOOGLE_APPLICATION_CREDENTIALS',
'value': '/secret/gcp-credentials/user-gcp-sa.json'
}
],
'volumeMounts':[
{
'mountPath': '/secret/gcp-credentials',
'name': 'gcp-credentials',
}
]
},
'inputs': {'parameters':
[
{'name': 'msg1'},
{'name': 'msg2', 'value': 'value2'},
]},
'name': 'echo',
'outputs': {
'parameters': [
{'name': 'echo-merged',
'valueFrom': {'path': '/tmp/message.txt'}
}],
'artifacts': [{
'name': 'mlpipeline-ui-metadata',
'path': '/mlpipeline-ui-metadata.json',
'optional': True,
},{
'name': 'mlpipeline-metrics',
'path': '/mlpipeline-metrics.json',
'optional': True,
}]
}
'container': {
'image': 'image',
'args': [
'echo {{inputs.parameters.msg1}} {{inputs.parameters.msg2}} | tee /tmp/message.txt'
],
'command': ['sh', '-c'],
'env': [
{
'name': 'GOOGLE_APPLICATION_CREDENTIALS',
'value': '/secret/gcp-credentials/user-gcp-sa.json'
}
],
'volumeMounts':[
{
'mountPath': '/secret/gcp-credentials',
'name': 'gcp-credentials',
}
]
},
'inputs': {'parameters':
[
{'name': 'msg1'},
{'name': 'msg2', 'value': 'value2'},
]},
'name': 'echo',
'outputs': {
'parameters': [
{'name': 'echo-merged',
'valueFrom': {'path': '/tmp/message.txt'}
}],
'artifacts': [{
'name': 'mlpipeline-ui-metadata',
'path': '/mlpipeline-ui-metadata.json',
'optional': True,
},{
'name': 'mlpipeline-metrics',
'path': '/mlpipeline-metrics.json',
'optional': True,
}]
}
}
res_output = {
'inputs': {
'parameters': [{
'name': 'json'
}, {
'name': 'kind'
}]
},
'name': 'test-resource',
'outputs': {
'parameters': [{
'name': 'test-resource-manifest',
'valueFrom': {
'jsonPath': '{}'
}
}, {
'name': 'test-resource-name',
'valueFrom': {
'jsonPath': '{.metadata.name}'
}
}, {
'name': 'test-resource-out',
'valueFrom': {
'jsonPath': '{{inputs.parameters.json}}'
}
}]
},
'resource': {
'action': 'create',
'manifest': (
"apiVersion: v1\n"
"kind: '{{inputs.parameters.kind}}'\n"
"metadata:\n"
" name: resource\n"
)
}
'inputs': {
'parameters': [{
'name': 'json'
}, {
'name': 'kind'
}]
},
'name': 'test-resource',
'outputs': {
'parameters': [{
'name': 'test-resource-manifest',
'valueFrom': {
'jsonPath': '{}'
}
}, {
'name': 'test-resource-name',
'valueFrom': {
'jsonPath': '{.metadata.name}'
}
}, {
'name': 'test-resource-out',
'valueFrom': {
'jsonPath': '{{inputs.parameters.json}}'
}
}]
},
'resource': {
'action': 'create',
'manifest': (
"apiVersion: v1\n"
"kind: '{{inputs.parameters.kind}}'\n"
"metadata:\n"
" name: resource\n"
)
}
}

self.maxDiff = None
Expand Down Expand Up @@ -282,6 +285,27 @@ def _test_py_compile_yaml(self, file_base_name):
finally:
shutil.rmtree(tmpdir)

def _test_sample_py_compile_yaml(self, file_base_name):
# Jump back to sample dir.
test_data_dir = os.path.join(self.core_sample_path, file_base_name)
py_file = os.path.join(test_data_dir, file_base_name + '.py')
tmpdir = tempfile.mkdtemp()
try:
target_yaml = os.path.join(tmpdir, file_base_name + '-pipeline.yaml')
subprocess.check_call(
['dsl-compile', '--py', py_file, '--output', target_yaml])
with open(os.path.join(test_data_dir, file_base_name + '.yaml'),
'r') as f:
golden = yaml.safe_load(f)

with open(os.path.join(test_data_dir, target_yaml), 'r') as f:
compiled = yaml.safe_load(f)

self.maxDiff = None
self.assertEqual(golden, compiled)
finally:
shutil.rmtree(tmpdir)

def test_py_compile_artifact_location(self):
"""Test configurable artifact location pipeline."""
self._test_py_compile_yaml('artifact_location')
Expand Down Expand Up @@ -314,9 +338,9 @@ def test_py_retry(self):
"""Test retry functionality."""
self._test_py_compile_yaml('retry')

def test_py_image_pull_secret(self):
def test_py_image_pull_secrets(self):
"""Test pipeline imagepullsecret."""
self._test_py_compile_yaml('imagepullsecret')
self._test_sample_py_compile_yaml('imagepullsecrets')

def test_py_timeout(self):
"""Test pipeline timeout."""
Expand Down Expand Up @@ -463,8 +487,8 @@ def my_pipeline(a: {'GCRPath': {'openapi_schema_validator': {"type": "string", "
def test_compile_pipeline_with_after(self):
def op():
return dsl.ContainerOp(
name='Some component name',
image='image'
name='Some component name',
image='image'
)

@dsl.pipeline(name='Pipeline', description='')
Expand All @@ -489,16 +513,16 @@ def _test_op_to_template_yaml(self, ops, file_base_name):
def test_tolerations(self):
"""Test a pipeline with a tolerations."""
op1 = dsl.ContainerOp(
name='download',
image='busybox',
command=['sh', '-c'],
arguments=['sleep 10; wget localhost:5678 -O /tmp/results.txt'],
file_outputs={'downloaded': '/tmp/results.txt'}) \
name='download',
image='busybox',
command=['sh', '-c'],
arguments=['sleep 10; wget localhost:5678 -O /tmp/results.txt'],
file_outputs={'downloaded': '/tmp/results.txt'}) \
.add_toleration(V1Toleration(
effect='NoSchedule',
key='gpu',
operator='Equal',
value='run'))
effect='NoSchedule',
key='gpu',
operator='Equal',
value='run'))

self._test_op_to_template_yaml(op1, file_base_name='tolerations')

Expand All @@ -507,12 +531,12 @@ def test_set_display_name(self):

import kfp
op1 = kfp.components.load_component_from_text(
'''
name: Component name
implementation:
container:
image: busybox
'''
'''
name: Component name
implementation:
container:
image: busybox
'''
)

@dsl.pipeline()
Expand All @@ -526,11 +550,11 @@ def some_pipeline():
def test_set_ttl_seconds_after_finished(self):
"""Test a pipeline with ttl after finished."""
def some_op():
return dsl.ContainerOp(
name='sleep',
image='busybox',
command=['sleep 1'],
)
return dsl.ContainerOp(
name='sleep',
image='busybox',
command=['sleep 1'],
)

@dsl.pipeline()
def some_pipeline():
Expand Down Expand Up @@ -567,17 +591,17 @@ def test_add_pod_env(self):

def test_init_container(self):
echo = dsl.UserContainer(
name='echo',
image='alpine:latest',
command=['echo', 'bye'])
name='echo',
image='alpine:latest',
command=['echo', 'bye'])

@dsl.pipeline(name='InitContainer', description='A pipeline with init container.')
def init_container_pipeline():
dsl.ContainerOp(
name='hello',
image='alpine:latest',
command=['echo', 'hello'],
init_containers=[echo])
name='hello',
image='alpine:latest',
command=['echo', 'hello'],
init_containers=[echo])

workflow_dict = compiler.Compiler()._compile(init_container_pipeline)
for template in workflow_dict['spec']['templates']:
Expand All @@ -586,4 +610,3 @@ def init_container_pipeline():
self.assertEqual(len(init_containers),1)
init_container = init_containers[0]
self.assertEqual(init_container, {'image':'alpine:latest', 'command': ['echo', 'bye'], 'name': 'echo'})