Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SDK - Unified the function signature parsing implementations #2689

Prev Previous commit
Next Next commit
Changged _extract_pipeline_metadata to use _extract_component_interface
  • Loading branch information
Ark-kun committed Dec 3, 2019
commit e46a43e72ea7096dbfe1229c00ef6ae458635618
25 changes: 7 additions & 18 deletions sdk/python/kfp/dsl/_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,9 @@ def _annotation_to_typemeta(annotation):
def _extract_pipeline_metadata(func):
'''Creates pipeline metadata structure instance based on the function signature.'''

# Importing here to prevent circular import failures
#TODO: Change _pipeline_param to stop importing _metadata
# Most of thsi code is only needed for verifying the default values against "openapi_schema_validator" type properties.
Ark-kun marked this conversation as resolved.
Show resolved Hide resolved
# TODO: Move the value verification code to some other place

from ._pipeline_param import PipelineParam

import inspect
Expand All @@ -59,8 +60,6 @@ def _extract_pipeline_metadata(func):
for arg, default in zip(reversed(fullargspec.args), reversed(fullargspec.defaults)):
arg_defaults[arg] = default

# Inputs
inputs = []
for arg in args:
arg_type = None
arg_default = arg_defaults[arg] if arg in arg_defaults else None
Expand All @@ -79,19 +78,9 @@ def _extract_pipeline_metadata(func):
schema_object = json.loads(schema_object)
# Only validating non-serialized values
validate(instance=arg_default, schema=schema_object)
if arg_default is not None:
arg_default = serialize_value(arg_default, type_name=str(arg_type) if arg_type else None) # TODO: Improve _annotation_to_typemeta or just replace the whole function with kfp.component._python_op._extract_component_interface
inputs.append(InputSpec(name=arg, type=arg_type, default=arg_default))

#TODO: add descriptions to the metadata
#docstring parser:
# https://github.com/rr-/docstring_parser
# https://github.com/terrencepreilly/darglint/blob/master/darglint/parse.py

# Construct the ComponentSpec
pipeline_meta = ComponentSpec(
name=getattr(func, '_pipeline_name', func.__name__),
description=getattr(func, '_pipeline_description', func.__doc__),
inputs=inputs if inputs else None,
)
return pipeline_meta

from kfp.components._python_op import _extract_component_interface
component_spec = _extract_component_interface(func)
return component_spec
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/artifact_location.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"description": "hello world", "inputs": [{"name": "tag"}, {"default": "kubeflow", "name": "namespace"}, {"default": "foobar", "name": "bucket"}], "name": "artifact-location-pipeine"}'
pipelines.kubeflow.org/pipeline_spec: '{"description": "hello world", "inputs": [{"name": "tag", "type": "String"}, {"default": "kubeflow", "name": "namespace", "type": "String"}, {"default": "foobar", "name": "bucket", "type": "String"}], "name": "artifact-location-pipeine"}'
generateName: artifact-location-pipeine-
spec:
arguments:
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/basic.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"description": "Get Most Frequent Word and Save to GCS", "inputs": [{"name": "message"}, {"name": "outputpath"}], "name": "Save Most Frequent"}'
pipelines.kubeflow.org/pipeline_spec: '{"description": "Get Most Frequent Word and Save to GCS", "inputs": [{"name": "message", "type": "String"}, {"name": "outputpath", "type": "String"}], "name": "Save Most Frequent"}'
generateName: save-most-frequent-
spec:
arguments:
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/imagepullsecrets.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
pipelines.kubeflow.org/pipeline_spec: '{"description": "Get Most Frequent Word and Save to GCS", "inputs": [{"name": "message"}], "name": "Save Most Frequent"}'
pipelines.kubeflow.org/pipeline_spec: '{"description": "Get Most Frequent Word and Save to GCS", "inputs": [{"name": "message", "type": "String"}], "name": "Save Most Frequent"}'
generateName: save-most-frequent-
spec:
arguments:
Expand Down
6 changes: 3 additions & 3 deletions sdk/python/tests/dsl/pipeline_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@ def my_pipeline1(a: {'Schema': {'file_type': 'csv'}}='good', b: Integer()=12):
pass

golden_meta = ComponentSpec(name='p1', description='description1', inputs=[])
golden_meta.inputs.append(InputSpec(name='a', type={'Schema': {'file_type': 'csv'}}, default='good'))
golden_meta.inputs.append(InputSpec(name='b', type={'Integer': {'openapi_schema_validator': {"type": "integer"}}}, default="12"))
golden_meta.inputs.append(InputSpec(name='a', type={'Schema': {'file_type': 'csv'}}, default='good', optional=True))
golden_meta.inputs.append(InputSpec(name='b', type={'Integer': {'openapi_schema_validator': {"type": "integer"}}}, default="12", optional=True))

pipeline_meta = _extract_pipeline_metadata(my_pipeline1)
self.assertEqual(pipeline_meta, golden_meta)
self.assertEqual(pipeline_meta, golden_meta)