Skip to content

Commit

Permalink
SDK - Fix pipeline metadata serialization
Browse files Browse the repository at this point in the history
Two PRs have been merged that turned out to be slightly incompatible. This PR fixes the failing tests.
Root causes:
* The pipeline parameter default values were not properly serialized when constructing the metadata object.
* The `ParameterMeta` class did not validate the default value type, so the lack of serialization has not been caught. The `ParameterMeta` was replaced by `InputSpec` which has strict type validation.
* Previously we did not have samples with complex pipeline parameter default values (e.g. lists) that could trigger the failures. Then two samples were added that had complex default values.
* Travis does not re-run tests before merging
* Prow does not re-run Travis tests before merging
  • Loading branch information
Ark-kun committed Sep 17, 2019
1 parent 4166e7a commit 6a265f3
Show file tree
Hide file tree
Showing 9 changed files with 15 additions and 9 deletions.
6 changes: 6 additions & 0 deletions sdk/python/kfp/dsl/_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import warnings
from .types import BaseType, _check_valid_type_dict, _instance_to_dict
from ..components._data_passing import serialize_value
from ..components._structures import ComponentSpec, InputSpec, OutputSpec


Expand Down Expand Up @@ -67,6 +68,8 @@ def _extract_component_metadata(func):
arg_default = arg_default.value
if arg in annotations:
arg_type = _annotation_to_typemeta(annotations[arg])
if arg_default is not None:
arg_default = serialize_value(arg_default, type_name=str(arg_type) if arg_type else None) # TODO: Improve _annotation_to_typemeta or just replace the whole function with kfp.component._python_op._extract_component_interface
inputs.append(InputSpec(name=arg, type=arg_type, default=arg_default))
# Outputs
outputs = []
Expand Down Expand Up @@ -124,7 +127,10 @@ def _extract_pipeline_metadata(func):
if isinstance(schema_object, str):
# In case the property value for the schema validator is a string instead of a dict.
schema_object = json.loads(schema_object)
# Only validating non-serialized values
validate(instance=arg_default, schema=schema_object)
if arg_default is not None:
arg_default = serialize_value(arg_default, type_name=str(arg_type) if arg_type else None) # TODO: Improve _annotation_to_typemeta or just replace the whole function with kfp.component._python_op._extract_component_interface
inputs.append(InputSpec(name=arg, type=arg_type, default=arg_default))

#TODO: add descriptions to the metadata
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/pipelineparams.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"description": "A pipeline with multiple
pipeline params.", "inputs": [{"default": "latest", "name":
"tag"}, {"default": 10, "name": "sleep_ms"}], "name": "PipelineParams"}'
"tag"}, {"default": "10", "name": "sleep_ms"}], "name": "PipelineParams"}'
generateName: pipelineparams-
spec:
entrypoint: pipelineparams
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/recursive_while.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"description": "shows how to use dsl.Condition.",
"inputs": [{"default": 12, "name": "maxVal"}],
"inputs": [{"default": "12", "name": "maxVal"}],
"name": "pipeline flip coin"}'
generateName: pipeline-flip-coin-
spec:
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/withitem_basic.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": 10, "name": "my_pipe_param"}],
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "10", "name": "my_pipe_param"}],
"name": "my-pipeline"}'
generateName: my-pipeline-
spec:
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/withitem_nested.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": 10, "name": "my_pipe_param"}],
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "10", "name": "my_pipe_param"}],
"name": "my-pipeline"}'
generateName: my-pipeline-
spec:
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/withparam_global.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": [3, 5, 7, 9], "name":
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "[3, 5, 7, 9]", "name":
"loopidy_doop"}], "name": "my-pipeline"}'
generateName: my-pipeline-
spec:
Expand Down
4 changes: 2 additions & 2 deletions sdk/python/tests/compiler/testdata/withparam_global_dict.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": [{"a": 1, "b":
2}, {"a": 10, "b": 20}], "name": "loopidy_doop"}], "name": "my-pipeline"}'
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "[{\"a\": 1, \"b\":
2}, {\"a\": 10, \"b\": 20}]", "name": "loopidy_doop"}], "name": "my-pipeline"}'
generateName: my-pipeline-
spec:
arguments:
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/dsl/component_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def componentA(a: {'ArtifactA': {'file_type': 'csv'}}, b: Integer() = 12, c: {'A

golden_meta = ComponentSpec(name='componentA', inputs=[], outputs=[])
golden_meta.inputs.append(InputSpec(name='a', type={'ArtifactA': {'file_type': 'csv'}}))
golden_meta.inputs.append(InputSpec(name='b', type={'Integer': {'openapi_schema_validator': {"type": "integer"}}}, default=12))
golden_meta.inputs.append(InputSpec(name='b', type={'Integer': {'openapi_schema_validator': {"type": "integer"}}}, default="12"))
golden_meta.inputs.append(InputSpec(name='c', type={'ArtifactB': {'path_type':'file', 'file_type': 'tsv'}}, default='gs://hello/world'))
golden_meta.outputs.append(OutputSpec(name='model', type={'Integer': {'openapi_schema_validator': {"type": "integer"}}}))

Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/dsl/pipeline_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def my_pipeline1(a: {'Schema': {'file_type': 'csv'}}='good', b: Integer()=12):

golden_meta = ComponentSpec(name='p1', description='description1', inputs=[])
golden_meta.inputs.append(InputSpec(name='a', type={'Schema': {'file_type': 'csv'}}, default='good'))
golden_meta.inputs.append(InputSpec(name='b', type={'Integer': {'openapi_schema_validator': {"type": "integer"}}}, default=12))
golden_meta.inputs.append(InputSpec(name='b', type={'Integer': {'openapi_schema_validator': {"type": "integer"}}}, default="12"))

pipeline_meta = _extract_pipeline_metadata(my_pipeline1)
self.assertEqual(pipeline_meta, golden_meta)

0 comments on commit 6a265f3

Please sign in to comment.