From de538aa3e3560d9237181b76675481513e5f1940 Mon Sep 17 00:00:00 2001 From: Ning Date: Fri, 16 Aug 2019 15:53:07 -0700 Subject: [PATCH] add compile step in the samples to generate zip files (#1866) * add compile step in the samples to generate zip files --- .../ai-platform/Chicago Crime Pipeline.ipynb | 13 ++++++-- .../artifact_location/artifact_location.py | 4 +++ samples/core/condition/condition.py | 1 - .../DSL Static Type Checking.ipynb | 31 ++++++++++++------- .../core/imagepullsecrets/imagepullsecrets.py | 4 +++ .../kubeflow_tf_serving.ipynb} | 0 samples/core/recursion/recursion.py | 2 +- samples/core/resource_ops/resourceop_basic.py | 11 +++---- samples/core/sidecar/sidecar.py | 5 ++- samples/core/volume_ops/volumeop.py | 8 ++--- .../volume_snapshot_ops/volume_snapshot_op.py | 9 ++---- samples/core/xgboost_training_cm/README.md | 4 +-- 12 files changed, 56 insertions(+), 36 deletions(-) rename samples/core/{model_serving_component/model_serving_component.ipynb => kubeflow_tf_serving/kubeflow_tf_serving.ipynb} (100%) diff --git a/samples/core/ai-platform/Chicago Crime Pipeline.ipynb b/samples/core/ai-platform/Chicago Crime Pipeline.ipynb index b72cfd5e7c9..0d700cf5886 100644 --- a/samples/core/ai-platform/Chicago Crime Pipeline.ipynb +++ b/samples/core/ai-platform/Chicago Crime Pipeline.ipynb @@ -267,7 +267,7 @@ "metadata": {}, "outputs": [], "source": [ - "pipeline_filename = PIPELINE_FILENAME_PREFIX + '.pipeline.tar.gz'\n", + "pipeline_filename = PIPELINE_FILENAME_PREFIX + '.pipeline.zip'\n", "\n", "compiler.Compiler().compile(pipeline_func, pipeline_filename)" ] @@ -318,8 +318,17 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.7" + }, + "pycharm": { + "stem_cell": { + "cell_type": "raw", + "source": [], + "metadata": { + "collapsed": false + } + } } }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/samples/core/artifact_location/artifact_location.py b/samples/core/artifact_location/artifact_location.py index effc59e73ad..6bb886a778b 100644 --- a/samples/core/artifact_location/artifact_location.py +++ b/samples/core/artifact_location/artifact_location.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import kfp from kfp import dsl from kubernetes.client import V1SecretKeySelector @@ -40,3 +41,6 @@ def custom_artifact_location( # artifacts in this op are stored to endpoint `minio-service.:9000` op = dsl.ContainerOp(name="foo", image="busybox:%s" % tag) + +if __name__ == '__main__': + kfp.compiler.Compiler().compile(custom_artifact_location, __file__ + '.zip') diff --git a/samples/core/condition/condition.py b/samples/core/condition/condition.py index 62125c20f37..7bcec319a1b 100755 --- a/samples/core/condition/condition.py +++ b/samples/core/condition/condition.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - import kfp from kfp import dsl diff --git a/samples/core/dsl_static_type_checking/DSL Static Type Checking.ipynb b/samples/core/dsl_static_type_checking/DSL Static Type Checking.ipynb index f37ebc01c0f..64ef3b6bb9c 100644 --- a/samples/core/dsl_static_type_checking/DSL Static Type Checking.ipynb +++ b/samples/core/dsl_static_type_checking/DSL Static Type Checking.ipynb @@ -259,7 +259,7 @@ " a = task_factory_a(field_l=12)\n", " b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n", "\n", - "compiler.Compiler().compile(pipeline_a, 'pipeline_a.tar.gz', type_check=True)" + "compiler.Compiler().compile(pipeline_a, 'pipeline_a.zip', type_check=True)" ] }, { @@ -365,7 +365,7 @@ " b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n", "\n", "try:\n", - " compiler.Compiler().compile(pipeline_b, 'pipeline_b.tar.gz', type_check=True)\n", + " compiler.Compiler().compile(pipeline_b, 'pipeline_b.zip', type_check=True)\n", "except InconsistentTypeException as e:\n", " print(e)" ] @@ -384,7 +384,7 @@ "outputs": [], "source": [ "# Disable the type_check\n", - "compiler.Compiler().compile(pipeline_b, 'pipeline_b.tar.gz', type_check=False)" + "compiler.Compiler().compile(pipeline_b, 'pipeline_b.zip', type_check=False)" ] }, { @@ -474,7 +474,7 @@ " a = task_factory_a(field_l=12)\n", " b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n", "\n", - "compiler.Compiler().compile(pipeline_c, 'pipeline_c.tar.gz', type_check=True)" + "compiler.Compiler().compile(pipeline_c, 'pipeline_c.zip', type_check=True)" ] }, { @@ -572,7 +572,7 @@ " b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n", "\n", "try:\n", - " compiler.Compiler().compile(pipeline_d, 'pipeline_d.tar.gz', type_check=True)\n", + " compiler.Compiler().compile(pipeline_d, 'pipeline_d.zip', type_check=True)\n", "except InconsistentTypeException as e:\n", " print(e)" ] @@ -597,7 +597,7 @@ " a = task_factory_a(field_l=12)\n", " # For each of the arguments, authors can also ignore the types by calling ignore_type function.\n", " b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'].ignore_type())\n", - "compiler.Compiler().compile(pipeline_d, 'pipeline_d.tar.gz', type_check=True)" + "compiler.Compiler().compile(pipeline_d, 'pipeline_d.zip', type_check=True)" ] }, { @@ -684,7 +684,7 @@ " a = task_factory_a(field_l=12)\n", " b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n", "\n", - "compiler.Compiler().compile(pipeline_e, 'pipeline_e.tar.gz', type_check=True)" + "compiler.Compiler().compile(pipeline_e, 'pipeline_e.zip', type_check=True)" ] }, { @@ -707,7 +707,7 @@ " a = task_factory_a(field_l=12)\n", " b = task_factory_b(a.outputs['field_n'], a.outputs['field_o'], field_z=a.outputs['field_m'])\n", "\n", - "compiler.Compiler().compile(pipeline_f, 'pipeline_f.tar.gz', type_check=True)" + "compiler.Compiler().compile(pipeline_f, 'pipeline_f.zip', type_check=True)" ] }, { @@ -750,7 +750,7 @@ " task_factory_a(field_m=a, field_o=b)\n", "\n", "try:\n", - " compiler.Compiler().compile(pipeline_g, 'pipeline_g.tar.gz', type_check=True)\n", + " compiler.Compiler().compile(pipeline_g, 'pipeline_g.zip', type_check=True)\n", "except InconsistentTypeException as e:\n", " print(e)" ] @@ -769,7 +769,7 @@ "outputs": [], "source": [ "from pathlib import Path\n", - "for p in Path(\".\").glob(\"pipeline_[a-g].tar.gz\"):\n", + "for p in Path(\".\").glob(\"pipeline_[a-g].zip\"):\n", " p.unlink()" ] } @@ -792,8 +792,17 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.5" + }, + "pycharm": { + "stem_cell": { + "cell_type": "raw", + "source": [], + "metadata": { + "collapsed": false + } + } } }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/samples/core/imagepullsecrets/imagepullsecrets.py b/samples/core/imagepullsecrets/imagepullsecrets.py index 880ab9ab2ad..d31e030dbca 100644 --- a/samples/core/imagepullsecrets/imagepullsecrets.py +++ b/samples/core/imagepullsecrets/imagepullsecrets.py @@ -15,6 +15,7 @@ container registry. """ +import kfp import kfp.dsl as dsl from kubernetes import client as k8s_client @@ -52,3 +53,6 @@ def save_most_frequent_word(message: str): # Call set_image_pull_secrets after get_pipeline_conf(). dsl.get_pipeline_conf()\ .set_image_pull_secrets([k8s_client.V1ObjectReference(name="secretA")]) + +if __name__ == '__main__': + kfp.compiler.Compiler().compile(save_most_frequent_word, __file__ + '.zip') diff --git a/samples/core/model_serving_component/model_serving_component.ipynb b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb similarity index 100% rename from samples/core/model_serving_component/model_serving_component.ipynb rename to samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb diff --git a/samples/core/recursion/recursion.py b/samples/core/recursion/recursion.py index a5f219dac3f..0d57e6994dd 100644 --- a/samples/core/recursion/recursion.py +++ b/samples/core/recursion/recursion.py @@ -66,4 +66,4 @@ def flipcoin(): if __name__ == '__main__': - kfp.compiler.Compiler().compile(flipcoin, __file__ + '.tar.gz') + kfp.compiler.Compiler().compile(flipcoin, __file__ + '.zip') diff --git a/samples/core/resource_ops/resourceop_basic.py b/samples/core/resource_ops/resourceop_basic.py index 3079379cbdb..8593006d81d 100644 --- a/samples/core/resource_ops/resourceop_basic.py +++ b/samples/core/resource_ops/resourceop_basic.py @@ -18,10 +18,9 @@ It is not a good practice to put password as a pipeline argument, since it will be visible on KFP UI. """ - -from kubernetes import client as k8s_client +import kfp import kfp.dsl as dsl - +from kubernetes import client as k8s_client @dsl.pipeline( name="ResourceOp Basic", @@ -54,7 +53,5 @@ def resourceop_basic(username, password): pvolumes={"/etc/secret-volume": secret} ) - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(resourceop_basic, __file__ + ".tar.gz") +if __name__ == '__main__': + kfp.compiler.Compiler().compile(resourceop_basic, __file__ + '.zip') diff --git a/samples/core/sidecar/sidecar.py b/samples/core/sidecar/sidecar.py index 620040a6001..096ec475036 100644 --- a/samples/core/sidecar/sidecar.py +++ b/samples/core/sidecar/sidecar.py @@ -13,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import kfp import kfp.dsl as dsl - @dsl.pipeline( name="pipeline_with_sidecar", description="A pipeline that demonstrates how to add a sidecar to an operation." @@ -47,3 +47,6 @@ def pipeline_with_sidecar(sleep_ms: int = 10): command=["sh", "-c"], arguments=["echo %s" % op1.output], # print out content of op1 output ) + +if __name__ == '__main__': + kfp.compiler.Compiler().compile(pipeline_with_sidecar, __file__ + '.zip') \ No newline at end of file diff --git a/samples/core/volume_ops/volumeop.py b/samples/core/volume_ops/volumeop.py index babf12db6d1..5d91a3d1867 100644 --- a/samples/core/volume_ops/volumeop.py +++ b/samples/core/volume_ops/volumeop.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - +import kfp import kfp.dsl as dsl @@ -36,7 +36,5 @@ def volumeop_basic(size): pvolumes={"/mnt": vop.volume} ) - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(volumeop_basic, __file__ + ".tar.gz") +if __name__ == '__main__': + kfp.compiler.Compiler().compile(volumeop_basic, __file__ + '.zip') \ No newline at end of file diff --git a/samples/core/volume_snapshot_ops/volume_snapshot_op.py b/samples/core/volume_snapshot_ops/volume_snapshot_op.py index 2b8500ec963..621e55cf3c0 100644 --- a/samples/core/volume_snapshot_ops/volume_snapshot_op.py +++ b/samples/core/volume_snapshot_ops/volume_snapshot_op.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - +import kfp import kfp.dsl as dsl @@ -80,8 +80,5 @@ def volume_snapshotop_sequential(url): pvolumes={"/data": step3.pvolume} ) - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(volume_snapshotop_sequential, - __file__ + ".tar.gz") +if __name__ == '__main__': + kfp.compiler.Compiler().compile(volume_snapshotop_sequential, __file__ + '.zip') \ No newline at end of file diff --git a/samples/core/xgboost_training_cm/README.md b/samples/core/xgboost_training_cm/README.md index 14759859324..2e093c63090 100644 --- a/samples/core/xgboost_training_cm/README.md +++ b/samples/core/xgboost_training_cm/README.md @@ -14,11 +14,11 @@ Preprocessing uses Google Cloud DataProc. Therefore, you must enable the [DataPr ## Compile -Follow the guide to [building a pipeline](https://www.kubeflow.org/docs/guides/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK and compile the sample Python into a workflow specification. The specification takes the form of a YAML file compressed into a `.tar.gz` file. +Follow the guide to [building a pipeline](https://www.kubeflow.org/docs/guides/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK and compile the sample Python into a workflow specification. The specification takes the form of a YAML file compressed into a `.zip` file. ## Deploy -Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compiled specification (`.tar.gz` file) as a new pipeline template. +Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compiled specification (`.zip` file) as a new pipeline template. ## Run