From 8cb8377d7dba7ae1b5ea34630cbeeef20910e906 Mon Sep 17 00:00:00 2001 From: Ning Date: Mon, 25 Feb 2019 13:29:04 -0800 Subject: [PATCH] Add postsubmit component test (#613) * add postsubmit script and yaml * remove old sample tests component file * extract deploy-pipeline.sh, deploy-kubeflow.sh and test-prep.sh from presubmit and postsubmit scripts --- test/check-argo-status.sh | 4 + test/deploy-kubeflow.sh | 67 +++ test/deploy-pipeline.sh | 56 ++ ...stsubmit-tests-with-pipeline-deployment.sh | 138 +++++ ...resubmit-tests-with-pipeline-deployment.sh | 88 +-- test/sample_test.yaml | 235 ++++++++ test/sample_test_components.yaml | 548 ------------------ test/test-prep.sh | 34 ++ 8 files changed, 541 insertions(+), 629 deletions(-) create mode 100755 test/deploy-kubeflow.sh create mode 100755 test/deploy-pipeline.sh create mode 100755 test/postsubmit-tests-with-pipeline-deployment.sh create mode 100644 test/sample_test.yaml delete mode 100644 test/sample_test_components.yaml create mode 100644 test/test-prep.sh diff --git a/test/check-argo-status.sh b/test/check-argo-status.sh index 47a8a1ba0e7..096d3b7a7c6 100644 --- a/test/check-argo-status.sh +++ b/test/check-argo-status.sh @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +ARTIFACT_DIR=$WORKSPACE/_artifacts +WORKFLOW_COMPLETE_KEYWORD="completed=true" +WORKFLOW_FAILED_KEYWORD="phase=Failed" +PULL_ARGO_WORKFLOW_STATUS_MAX_ATTEMPT=$(expr $TIMEOUT_SECONDS / 20 ) echo "check status of argo workflow $ARGO_WORKFLOW...." # probing the argo workflow status until it completed. Timeout after 30 minutes diff --git a/test/deploy-kubeflow.sh b/test/deploy-kubeflow.sh new file mode 100755 index 00000000000..64dc9469f90 --- /dev/null +++ b/test/deploy-kubeflow.sh @@ -0,0 +1,67 @@ +#!/bin/bash +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -x + +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" + +# Install ksonnet +KS_VERSION="0.13.0" +curl -LO https://github.com/ksonnet/ksonnet/releases/download/v${KS_VERSION}/ks_${KS_VERSION}_linux_amd64.tar.gz +tar -xzf ks_${KS_VERSION}_linux_amd64.tar.gz +chmod +x ./ks_${KS_VERSION}_linux_amd64/ks +mv ./ks_${KS_VERSION}_linux_amd64/ks /usr/local/bin/ + +# Download kubeflow master +KUBEFLOW_MASTER=${DIR}/kubeflow_master +git clone https://github.com/kubeflow/kubeflow.git ${KUBEFLOW_MASTER} + +## Download latest kubeflow release source code +KUBEFLOW_SRC=${DIR}/kubeflow_latest_release +mkdir ${KUBEFLOW_SRC} +cd ${KUBEFLOW_SRC} +export KUBEFLOW_TAG=v0.3.1 +curl https://raw.githubusercontent.com/kubeflow/kubeflow/${KUBEFLOW_TAG}/scripts/download.sh | bash + +## Override the pipeline config with code from master +cp -r ${KUBEFLOW_MASTER}/kubeflow/pipeline ${KUBEFLOW_SRC}/kubeflow/pipeline +cp -r ${KUBEFLOW_MASTER}/kubeflow/argo ${KUBEFLOW_SRC}/kubeflow/argo + +# TODO temporarily set KUBEFLOW_SRC as KUBEFLOW_MASTER. This should be deleted when latest release have the pipeline entry +KUBEFLOW_SRC=${KUBEFLOW_MASTER} + +export CLIENT_ID=${RANDOM} +export CLIENT_SECRET=${RANDOM} +KFAPP=${TEST_CLUSTER} + +function clean_up { + echo "Clean up..." + cd ${KFAPP} + ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all + # delete the storage + gcloud deployment-manager --project=${PROJECT} deployments delete ${KFAPP}-storage --quiet +} +trap clean_up EXIT + +${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform ${PLATFORM} --project ${PROJECT} --skipInitProject + +cd ${KFAPP} +${KUBEFLOW_SRC}/scripts/kfctl.sh generate platform +${KUBEFLOW_SRC}/scripts/kfctl.sh apply platform +${KUBEFLOW_SRC}/scripts/kfctl.sh generate k8s +${KUBEFLOW_SRC}/scripts/kfctl.sh apply k8s + +gcloud container clusters get-credentials ${TEST_CLUSTER} \ No newline at end of file diff --git a/test/deploy-pipeline.sh b/test/deploy-pipeline.sh new file mode 100755 index 00000000000..39c73a19a19 --- /dev/null +++ b/test/deploy-pipeline.sh @@ -0,0 +1,56 @@ +#!/bin/bash +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -x + + +usage() +{ + echo "usage: deploy.sh + [--gcr_image_base_dir the gcr image base directory including images such as apiImage and persistenceAgentImage] + [--gcr_image_tag the tags for images such as apiImage and persistenceAgentImage] + [-h help]" +} +GCR_IMAGE_TAG=latest + +while [ "$1" != "" ]; do + case $1 in + --gcr_image_base_dir ) shift + GCR_IMAGE_BASE_DIR=$1 + ;; + --gcr_image_tag ) shift + GCR_IMAGE_TAG=$1 + ;; + -h | --help ) usage + exit + ;; + * ) usage + exit 1 + esac + shift +done + +cd ${KUBEFLOW_SRC} +cd ${KFAPP} + +## Update pipeline component image +pushd ks_app +ks param set pipeline apiImage ${GCR_IMAGE_BASE_DIR}/api:${GCR_IMAGE_TAG} +ks param set pipeline persistenceAgentImage ${GCR_IMAGE_BASE_DIR}/persistenceagent:${GCR_IMAGE_TAG} +ks param set pipeline scheduledWorkflowImage ${GCR_IMAGE_BASE_DIR}/scheduledworkflow:${GCR_IMAGE_TAG} +ks param set pipeline uiImage ${GCR_IMAGE_BASE_DIR}/frontend:${GCR_IMAGE_TAG} +ks apply default -c pipeline +popd \ No newline at end of file diff --git a/test/postsubmit-tests-with-pipeline-deployment.sh b/test/postsubmit-tests-with-pipeline-deployment.sh new file mode 100755 index 00000000000..812d7024019 --- /dev/null +++ b/test/postsubmit-tests-with-pipeline-deployment.sh @@ -0,0 +1,138 @@ +#!/bin/bash +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -x + +usage() +{ + echo "usage: deploy.sh + [--platform the deployment platform. Valid values are: [gcp, minikube]. Default is gcp.] + [--workflow_file the file name of the argo workflow to run] + [--test_result_bucket the gcs bucket that argo workflow store the result to. Default is ml-pipeline-test + [--test_result_folder the gcs folder that argo workflow store the result to. Always a relative directory to gs:///[PULL_SHA]] + [--timeout timeout of the tests in seconds. Default is 1800 seconds. ] + [-h help]" +} + +PLATFORM=gcp +PROJECT=ml-pipeline-test +TEST_RESULT_BUCKET=ml-pipeline-test +GCR_IMAGE_BASE_DIR=gcr.io/ml-pipeline-staging/ +TARGET_IMAGE_BASE_DIR=gcr.io/ml-pipeline-test/${PULL_BASE_SHA} +TIMEOUT_SECONDS=1800 +NAMESPACE=kubeflow + +while [ "$1" != "" ]; do + case $1 in + --platform ) shift + PLATFORM=$1 + ;; + --workflow_file ) shift + WORKFLOW_FILE=$1 + ;; + --test_result_bucket ) shift + TEST_RESULT_BUCKET=$1 + ;; + --test_result_folder ) shift + TEST_RESULT_FOLDER=$1 + ;; + --timeout ) shift + TIMEOUT_SECONDS=$1 + ;; + -h | --help ) usage + exit + ;; + * ) usage + exit 1 + esac + shift +done + +#Variables +# Refer to https://github.com/kubernetes/test-infra/blob/e357ffaaeceafe737bd6ab89d2feff132d92ea50/prow/jobs.md for the Prow job environment variables +TEST_RESULTS_GCS_DIR=gs://${TEST_RESULT_BUCKET}/${PULL_BASE_SHA}/${TEST_RESULT_FOLDER} +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" + +echo "postsubmit test starts" + +source "${DIR}/test-prep.sh" +source "${DIR}/deploy-kubeflow.sh" + +# Install Argo +source "${DIR}/install-argo.sh" + +## Wait for the cloudbuild job to be started +CLOUDBUILD_TIMEOUT_SECONDS=3600 +PULL_CLOUDBUILD_STATUS_MAX_ATTEMPT=$(expr ${CLOUDBUILD_TIMEOUT_SECONDS} / 20 ) +CLOUDBUILD_STARTED=TIMEOUT + +for i in $(seq 1 ${PULL_CLOUDBUILD_STATUS_MAX_ATTEMPT}) +do + output=`gcloud builds list --filter="sourceProvenance.resolvedRepoSource.commitSha:${PULL_BASE_SHA}"` + if [[ ${output} != "" ]]; then + CLOUDBUILD_STARTED=True + break + fi + sleep 20 +done + +if [[ ${CLOUDBUILD_STARTED} == TIMEOUT ]];then + echo "Wait for cloudbuild job to start, timeout exiting..." + exit 1 +fi + +## Wait for the cloudbuild job to complete +CLOUDBUILD_FINISHED=TIMEOUT +for i in $(seq 1 ${PULL_CLOUDBUILD_STATUS_MAX_ATTEMPT}) +do + output=`gcloud builds list --filter="sourceProvenance.resolvedRepoSource.commitSha:${PULL_BASE_SHA}"` + if [[ ${output} == *"SUCCESS"* ]]; then + CLOUDBUILD_FINISHED=SUCCESS + break + elif [[ ${output} == *"FAILURE"* ]]; then + CLOUDBUILD_FINISHED=FAILURE + break + fi + sleep 20 +done + +if [[ ${CLOUDBUILD_FINISHED} == FAILURE ]];then + echo "Cloud build failure, postsubmit tests cannot proceed. exiting..." + exit 1 +elif [[ ${CLOUDBUILD_FINISHED} == TIMEOUT ]];then + echo "Wait for cloudbuild job to finish, timeout exiting..." + exit 1 +fi + +# Deploy the pipeline +source ${DIR}/deploy-pipeline.sh --gcr_image_base_dir ${GCR_IMAGE_BASE_DIR} --gcr_image_tag ${PULL_BASE_SHA} + +# Submit the argo job and check the results +echo "submitting argo workflow for commit ${PULL_BASE_SHA}..." +ARGO_WORKFLOW=`argo submit ${DIR}/${WORKFLOW_FILE} \ +-p image-build-context-gcs-uri="$remote_code_archive_uri" \ +-p commit-sha="${PULL_BASE_SHA}" \ +-p component-image-prefix="${GCR_IMAGE_BASE_DIR}" \ +-p target-image-prefix="${TARGET_IMAGE_BASE_DIR}/" \ +-p test-results-gcs-dir="${TEST_RESULTS_GCS_DIR}" \ +-p cluster-type="${CLUSTER_TYPE}" \ +-n ${NAMESPACE} \ +--serviceaccount test-runner \ +-o name +` +echo "argo workflow submitted successfully" +source "${DIR}/check-argo-status.sh" +echo "test workflow completed" diff --git a/test/presubmit-tests-with-pipeline-deployment.sh b/test/presubmit-tests-with-pipeline-deployment.sh index 4892b2069f6..d7219e43d33 100755 --- a/test/presubmit-tests-with-pipeline-deployment.sh +++ b/test/presubmit-tests-with-pipeline-deployment.sh @@ -60,82 +60,18 @@ while [ "$1" != "" ]; do shift done +# Variables TEST_RESULTS_GCS_DIR=gs://${TEST_RESULT_BUCKET}/${PULL_PULL_SHA}/${TEST_RESULT_FOLDER} -ARTIFACT_DIR=$WORKSPACE/_artifacts -WORKFLOW_COMPLETE_KEYWORD="completed=true" -WORKFLOW_FAILED_KEYWORD="phase=Failed" -PULL_ARGO_WORKFLOW_STATUS_MAX_ATTEMPT=$(expr $TIMEOUT_SECONDS / 20 ) DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" echo "presubmit test starts" +source "${DIR}/test-prep.sh" +source "${DIR}/deploy-kubeflow.sh" -# activating the service account -gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" -gcloud config set compute/zone us-east1-b -gcloud config set core/project ${PROJECT} - -#Uploading the source code to GCS: -local_code_archive_file=$(mktemp) -date_string=$(TZ=PST8PDT date +%Y-%m-%d_%H-%M-%S_%Z) -code_archive_prefix="gs://${TEST_RESULT_BUCKET}/${PULL_PULL_SHA}/source_code" -remote_code_archive_uri="${code_archive_prefix}_${PULL_BASE_SHA}_${date_string}.tar.gz" - -tar -czf "$local_code_archive_file" . -gsutil cp "$local_code_archive_file" "$remote_code_archive_uri" - -# Install ksonnet -KS_VERSION="0.13.0" -curl -LO https://github.com/ksonnet/ksonnet/releases/download/v${KS_VERSION}/ks_${KS_VERSION}_linux_amd64.tar.gz -tar -xzf ks_${KS_VERSION}_linux_amd64.tar.gz -chmod +x ./ks_${KS_VERSION}_linux_amd64/ks -mv ./ks_${KS_VERSION}_linux_amd64/ks /usr/local/bin/ - -# Download kubeflow master -KUBEFLOW_MASTER=${DIR}/kubeflow_master -git clone https://github.com/kubeflow/kubeflow.git ${KUBEFLOW_MASTER} - -## Download latest kubeflow release source code -KUBEFLOW_SRC=${DIR}/kubeflow_latest_release -mkdir ${KUBEFLOW_SRC} -cd ${KUBEFLOW_SRC} -export KUBEFLOW_TAG=v0.3.1 -curl https://raw.githubusercontent.com/kubeflow/kubeflow/${KUBEFLOW_TAG}/scripts/download.sh | bash - -## Override the pipeline config with code from master -cp -r ${KUBEFLOW_MASTER}/kubeflow/pipeline ${KUBEFLOW_SRC}/kubeflow/pipeline -cp -r ${KUBEFLOW_MASTER}/kubeflow/argo ${KUBEFLOW_SRC}/kubeflow/argo - -# TODO temporarily set KUBEFLOW_SRC as KUBEFLOW_MASTER. This should be deleted when latest release have the pipeline entry -KUBEFLOW_SRC=${KUBEFLOW_MASTER} - -TEST_CLUSTER_PREFIX=${WORKFLOW_FILE%.*} -TEST_CLUSTER=$(echo $TEST_CLUSTER_PREFIX | cut -d _ -f 1)-${PULL_PULL_SHA:0:7}-${RANDOM} - -export CLIENT_ID=${RANDOM} -export CLIENT_SECRET=${RANDOM} -KFAPP=${TEST_CLUSTER} - -function clean_up { - echo "Clean up..." - cd ${KFAPP} - ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all - # delete the storage - gcloud deployment-manager --project=${PROJECT} deployments delete ${KFAPP}-storage --quiet -} -trap clean_up EXIT - -${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform ${PLATFORM} --project ${PROJECT} --skipInitProject - -cd ${KFAPP} -${KUBEFLOW_SRC}/scripts/kfctl.sh generate platform -${KUBEFLOW_SRC}/scripts/kfctl.sh apply platform -${KUBEFLOW_SRC}/scripts/kfctl.sh generate k8s -${KUBEFLOW_SRC}/scripts/kfctl.sh apply k8s - -gcloud container clusters get-credentials ${TEST_CLUSTER} - +# Install Argo source "${DIR}/install-argo.sh" +# Build Images echo "submitting argo workflow to build docker images for commit ${PULL_PULL_SHA}..." ARGO_WORKFLOW=`argo submit ${DIR}/build_image.yaml \ -p image-build-context-gcs-uri="$remote_code_archive_uri" \ @@ -151,19 +87,11 @@ ARGO_WORKFLOW=`argo submit ${DIR}/build_image.yaml \ -o name ` echo "build docker images workflow submitted successfully" - source "${DIR}/check-argo-status.sh" - echo "build docker images workflow completed" -## Update pipeline component with the newly built image -pushd ks_app -ks param set pipeline apiImage ${GCR_IMAGE_BASE_DIR}/api -ks param set pipeline persistenceAgentImage ${GCR_IMAGE_BASE_DIR}/persistenceagent -ks param set pipeline scheduledWorkflowImage ${GCR_IMAGE_BASE_DIR}/scheduledworkflow -ks param set pipeline uiImage ${GCR_IMAGE_BASE_DIR}/frontend -ks apply default -c pipeline -popd +# Deploy the pipeline +source ${DIR}/deploy-pipeline.sh --gcr_image_base_dir ${GCR_IMAGE_BASE_DIR} echo "submitting argo workflow to run tests for commit ${PULL_PULL_SHA}..." ARGO_WORKFLOW=`argo submit ${DIR}/${WORKFLOW_FILE} \ @@ -177,7 +105,5 @@ ARGO_WORKFLOW=`argo submit ${DIR}/${WORKFLOW_FILE} \ ` echo "test workflow submitted successfully" - source "${DIR}/check-argo-status.sh" - echo "test workflow completed" diff --git a/test/sample_test.yaml b/test/sample_test.yaml new file mode 100644 index 00000000000..3b286ae9025 --- /dev/null +++ b/test/sample_test.yaml @@ -0,0 +1,235 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#TODO: change the yaml file name to component_integration_test after committing to test-infra +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: sample-test- +spec: + entrypoint: sample-test + volumes: + - name: gcp-credentials + secret: + secretName: user-gcp-sa + arguments: + parameters: + - name: image-build-context-gcs-uri + - name: commit-sha + - name: component-image-prefix + - name: target-image-prefix + - name: test-results-gcs-dir + - name: sample-tests-image-suffix + value: sample-tests + - name: namespace + value: kubeflow + templates: + - name: sample-test + inputs: + parameters: + - name: commit-sha + - name: component-image-prefix + - name: target-image-prefix + - name: test-results-gcs-dir + - name: sample-tests-image-suffix + - name: namespace + steps: + - - name: build-sample-tests-image + template: build-image-by-dockerfile + arguments: + parameters: + - name: docker-path + value: . + - name: docker-file + value: test/sample-test/Dockerfile + - name: image-name + value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" + - - name: run-tfx-tests + template: run-sample-tests + arguments: + parameters: + - name: test-results-gcs-dir + value: "{{inputs.parameters.test-results-gcs-dir}}" + - name: target-image-prefix + value: "{{inputs.parameters.target-image-prefix}}" + - name: dataflow-tft-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataflow-tft:{{inputs.parameters.commit-sha}}" + - name: dataflow-predict-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataflow-tf-predict:{{inputs.parameters.commit-sha}}" + - name: dataflow-tfma-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataflow-tfma:{{inputs.parameters.commit-sha}}" + - name: dataflow-tfdv-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataflow-tfdv:{{inputs.parameters.commit-sha}}" + - name: dataproc-create-cluster-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-create-cluster:{{inputs.parameters.commit-sha}}" + - name: dataproc-delete-cluster-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-delete-cluster:{{inputs.parameters.commit-sha}}" + - name: dataproc-analyze-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-analyze:{{inputs.parameters.commit-sha}}" + - name: dataproc-transform-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-transform:{{inputs.parameters.commit-sha}}" + - name: dataproc-train-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-train:{{inputs.parameters.commit-sha}}" + - name: dataproc-predict-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-predict:{{inputs.parameters.commit-sha}}" + - name: kubeflow-dnntrainer-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-kubeflow-tf-trainer:{{inputs.parameters.commit-sha}}" + - name: kubeflow-deployer-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-kubeflow-deployer:{{inputs.parameters.commit-sha}}" + - name: local-confusionmatrix-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-local-confusion-matrix:{{inputs.parameters.commit-sha}}" + - name: local-roc-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-local-roc:{{inputs.parameters.commit-sha}}" + - name: sample-tests-image + value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" + - name: namespace + value: "{{inputs.parameters.namespace}}" + - name: test-name + value: "tfx" + - name: run-xgboost-tests + template: run-sample-tests + arguments: + parameters: + - name: test-results-gcs-dir + value: "{{inputs.parameters.test-results-gcs-dir}}" + - name: target-image-prefix + value: "{{inputs.parameters.target-image-prefix}}" + - name: dataflow-tft-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataflow-tft:{{inputs.parameters.commit-sha}}" + - name: dataflow-predict-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataflow-tf-predict:{{inputs.parameters.commit-sha}}" + - name: dataflow-tfma-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataflow-tfma:{{inputs.parameters.commit-sha}}" + - name: dataflow-tfdv-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataflow-tfdv:{{inputs.parameters.commit-sha}}" + - name: dataproc-create-cluster-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-create-cluster:{{inputs.parameters.commit-sha}}" + - name: dataproc-delete-cluster-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-delete-cluster:{{inputs.parameters.commit-sha}}" + - name: dataproc-analyze-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-analyze:{{inputs.parameters.commit-sha}}" + - name: dataproc-transform-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-transform:{{inputs.parameters.commit-sha}}" + - name: dataproc-train-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-train:{{inputs.parameters.commit-sha}}" + - name: dataproc-predict-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-dataproc-predict:{{inputs.parameters.commit-sha}}" + - name: kubeflow-dnntrainer-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-kubeflow-tf-trainer:{{inputs.parameters.commit-sha}}" + - name: kubeflow-deployer-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-kubeflow-deployer:{{inputs.parameters.commit-sha}}" + - name: local-confusionmatrix-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-local-confusion-matrix:{{inputs.parameters.commit-sha}}" + - name: local-roc-image + value: "{{inputs.parameters.component-image-prefix}}ml-pipeline-local-roc:{{inputs.parameters.commit-sha}}" + - name: sample-tests-image + value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" + - name: namespace + value: "{{inputs.parameters.namespace}}" + - name: test-name + value: "xgboost" + + # Build and push image + - name: build-image-by-dockerfile + inputs: + parameters: + # GCS URI prefix pointing to a .tar.gz archive of Docker build context + - name: image-build-context-gcs-uri + value: "{{workflow.parameters.image-build-context-gcs-uri}}" + # The relative code path to the Dockerfile + - name: docker-path + # Name of the Docker file to use. "Dockerfile" by default + - name: docker-file + value: Dockerfile + - name: image-name + outputs: + parameters: + - name: strict-image-name + valueFrom: + path: /outputs/strict-image-name/file + container: + image: gcr.io/ml-pipeline-test/image-builder:v20181128-0.1.3-rc.1-109-ga5a14dc-e3b0c4 + imagePullPolicy: 'Always' + args: [ + "--image-build-context-gcs-uri", "{{inputs.parameters.image-build-context-gcs-uri}}", + "--docker_path", "{{inputs.parameters.docker-path}}", + "--docker_file", "{{inputs.parameters.docker-file}}", + "--image_name", "{{inputs.parameters.image-name}}", + ] + env: + - name: DOCKER_HOST + value: 127.0.0.1 + - name: GOOGLE_APPLICATION_CREDENTIALS + value: /secret/gcp-credentials/user-gcp-sa.json + volumeMounts: + - name: gcp-credentials + mountPath: /secret/gcp-credentials + sidecars: + - name: dind + image: docker:17.10-dind + securityContext: + privileged: true + mirrorVolumeMounts: true + + - name: run-sample-tests + inputs: + parameters: + - name: test-results-gcs-dir + - name: target-image-prefix + - name: dataflow-tft-image + - name: dataflow-predict-image + - name: dataflow-tfma-image + - name: dataflow-tfdv-image + - name: dataproc-create-cluster-image + - name: dataproc-delete-cluster-image + - name: dataproc-analyze-image + - name: dataproc-transform-image + - name: dataproc-train-image + - name: dataproc-predict-image + - name: kubeflow-dnntrainer-image + - name: kubeflow-deployer-image + - name: local-confusionmatrix-image + - name: local-roc-image + - name: sample-tests-image + - name: namespace + - name: test-name + container: + image: "{{inputs.parameters.sample-tests-image}}" + args: [ + "--results-gcs-dir", "{{inputs.parameters.test-results-gcs-dir}}", + "--target-image-prefix", "{{inputs.parameters.target-image-prefix}}", + "--dataflow-tft-image","{{inputs.parameters.dataflow-tft-image}}", + "--dataflow-predict-image","{{inputs.parameters.dataflow-predict-image}}", + "--dataflow-tfma-image","{{inputs.parameters.dataflow-tfma-image}}", + "--dataflow-tfdv-image","{{inputs.parameters.dataflow-tfdv-image}}", + "--dataproc-create-cluster-image","{{inputs.parameters.dataproc-create-cluster-image}}", + "--dataproc-delete-cluster-image","{{inputs.parameters.dataproc-delete-cluster-image}}", + "--dataproc-analyze-image","{{inputs.parameters.dataproc-analyze-image}}", + "--dataproc-transform-image","{{inputs.parameters.dataproc-transform-image}}", + "--dataproc-train-image","{{inputs.parameters.dataproc-train-image}}", + "--dataproc-predict-image","{{inputs.parameters.dataproc-predict-image}}", + "--kubeflow-dnntrainer-image","{{inputs.parameters.kubeflow-dnntrainer-image}}", + "--kubeflow-deployer-image","{{inputs.parameters.kubeflow-deployer-image}}", + "--local-confusionmatrix-image", "{{inputs.parameters.local-confusionmatrix-image}}", + "--local-roc-image", "{{inputs.parameters.local-roc-image}}", + "--namespace", "{{inputs.parameters.namespace}}", + "--test-name", "{{inputs.parameters.test-name}}", + ] + env: + - name: GOOGLE_APPLICATION_CREDENTIALS + value: /secret/gcp-credentials/user-gcp-sa.json + volumeMounts: + - name: gcp-credentials + mountPath: /secret/gcp-credentials + diff --git a/test/sample_test_components.yaml b/test/sample_test_components.yaml deleted file mode 100644 index 7182cf489c5..00000000000 --- a/test/sample_test_components.yaml +++ /dev/null @@ -1,548 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: sample-test- -spec: - entrypoint: sample-test - volumes: - - name: gcp-credentials - secret: - secretName: user-gcp-sa - arguments: - parameters: - - name: image-build-context-gcs-uri - - name: target-image-prefix - - name: test-results-gcs-dir - - name: dataflow-tft-image-suffix - value: dataflow-tft - - name: dataflow-predict-image-suffix - value: dataflow-predict - - name: dataflow-tfma-image-suffix - value: dataflow-tfma - - name: dataflow-tfdv-image-suffix - value: dataflow-tfdv - - name: dataproc-create-cluster-image-suffix - value: dataproc-create-cluster - - name: dataproc-delete-cluster-image-suffix - value: dataproc-delete-cluster - - name: dataproc-analyze-image-suffix - value: dataproc-analyze - - name: dataproc-transform-image-suffix - value: dataproc-transform - - name: dataproc-train-image-suffix - value: dataproc-train - - name: dataproc-predict-image-suffix - value: dataproc-predict - - name: kubeflow-dnntrainer-image-suffix - value: kubeflow-dnntrainer - - name: kubeflow-deployer-image-suffix - value: kubeflow-deployer - - name: local-confusionmatrix-image-suffix - value: local-confusionmatrix - - name: local-roc-image-suffix - value: local-roc - - name: sample-tests-image-suffix - value: sample-tests - - name: namespace - value: kubeflow - templates: - - name: sample-test - inputs: - parameters: - - name: target-image-prefix - - name: test-results-gcs-dir - - name: dataflow-tft-image-suffix - - name: dataflow-predict-image-suffix - - name: dataflow-tfma-image-suffix - - name: dataflow-tfdv-image-suffix - - name: dataproc-create-cluster-image-suffix - - name: dataproc-delete-cluster-image-suffix - - name: dataproc-analyze-image-suffix - - name: dataproc-transform-image-suffix - - name: dataproc-train-image-suffix - - name: dataproc-predict-image-suffix - - name: kubeflow-dnntrainer-image-suffix - - name: kubeflow-deployer-image-suffix - - name: local-confusionmatrix-image-suffix - - name: local-roc-image-suffix - - name: sample-tests-image-suffix - - name: namespace - steps: - - - name: build-sample-tests-image - template: build-image-by-dockerfile - arguments: - parameters: - - name: docker-path - value: . - - name: docker-file - value: test/sample-test/Dockerfile - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" - - name: build-dataflow-tft-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tft-image-suffix}}" - - name: build-script - value: components/dataflow/tft/build_image.sh - - name: build-dataflow-predict-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-predict-image-suffix}}" - - name: build-script - value: components/dataflow/predict/build_image.sh - - name: build-dataflow-tfma-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfma-image-suffix}}" - - name: build-script - value: components/dataflow/tfma/build_image.sh - - name: build-dataflow-tfdv-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfdv-image-suffix}}" - - name: build-script - value: components/dataflow/tfdv/build_image.sh - - name: build-dataproc-create-cluster-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-create-cluster-image-suffix}}" - - name: build-script - value: components/dataproc/create_cluster/build_image.sh - - name: build-dataproc-delete-cluster-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-delete-cluster-image-suffix}}" - - name: build-script - value: components/dataproc/delete_cluster/build_image.sh - - name: build-dataproc-analyze-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-analyze-image-suffix}}" - - name: build-script - value: components/dataproc/analyze/build_image.sh - - name: build-dataproc-transform-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-transform-image-suffix}}" - - name: build-script - value: components/dataproc/transform/build_image.sh - - name: build-dataproc-train-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-train-image-suffix}}" - - name: build-script - value: components/dataproc/train/build_image.sh - - name: build-dataproc-predict-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-predict-image-suffix}}" - - name: build-script - value: components/dataproc/predict/build_image.sh - - name: build-kubeflow-dnntrainer-image - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-dnntrainer-image-suffix}}" - - name: build-script - value: components/kubeflow/dnntrainer/build_image.sh - - name: build-kubeflow-deployer-image - template: build-image-by-dockerfile - arguments: - parameters: - - name: docker-path - value: components/kubeflow/deployer - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-deployer-image-suffix}}" - - name: build-local-confusion-matrix - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-confusionmatrix-image-suffix}}" - - name: build-script - value: components/local/confusion_matrix/build_image.sh - - name: build-local-roc - template: build-image-by-script - arguments: - parameters: - - name: image-name - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-roc-image-suffix}}" - - name: build-script - value: components/local/roc/build_image.sh - - - name: run-tf-training-tests - template: run-sample-tests - arguments: - parameters: - - name: test-results-gcs-dir - value: "{{inputs.parameters.test-results-gcs-dir}}" - - name: target-image-prefix - value: "{{inputs.parameters.target-image-prefix}}" - - name: dataflow-tft-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tft-image-suffix}}" - - name: dataflow-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-predict-image-suffix}}" - - name: dataflow-tfma-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfma-image-suffix}}" - - name: dataflow-tfdv-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfdv-image-suffix}}" - - name: dataproc-create-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-create-cluster-image-suffix}}" - - name: dataproc-delete-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-delete-cluster-image-suffix}}" - - name: dataproc-analyze-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-analyze-image-suffix}}" - - name: dataproc-transform-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-transform-image-suffix}}" - - name: dataproc-train-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-train-image-suffix}}" - - name: dataproc-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-predict-image-suffix}}" - - name: kubeflow-dnntrainer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-dnntrainer-image-suffix}}" - - name: kubeflow-deployer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-deployer-image-suffix}}" - - name: local-confusionmatrix-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-confusionmatrix-image-suffix}}" - - name: local-roc-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-roc-image-suffix}}" - - name: sample-tests-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" - - name: namespace - value: "{{inputs.parameters.namespace}}" - - name: test-name - value: "tf-training" - - name: run-tfx-tests - template: run-sample-tests - arguments: - parameters: - - name: test-results-gcs-dir - value: "{{inputs.parameters.test-results-gcs-dir}}" - - name: target-image-prefix - value: "{{inputs.parameters.target-image-prefix}}" - - name: dataflow-tft-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tft-image-suffix}}" - - name: dataflow-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-predict-image-suffix}}" - - name: dataflow-tfma-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfma-image-suffix}}" - - name: dataflow-tfdv-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfdv-image-suffix}}" - - name: dataproc-create-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-create-cluster-image-suffix}}" - - name: dataproc-delete-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-delete-cluster-image-suffix}}" - - name: dataproc-analyze-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-analyze-image-suffix}}" - - name: dataproc-transform-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-transform-image-suffix}}" - - name: dataproc-train-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-train-image-suffix}}" - - name: dataproc-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-predict-image-suffix}}" - - name: kubeflow-dnntrainer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-dnntrainer-image-suffix}}" - - name: kubeflow-deployer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-deployer-image-suffix}}" - - name: local-confusionmatrix-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-confusionmatrix-image-suffix}}" - - name: local-roc-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-roc-image-suffix}}" - - name: sample-tests-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" - - name: namespace - value: "{{inputs.parameters.namespace}}" - - name: test-name - value: "tfx" - - name: run-xgboost-tests - template: run-sample-tests - arguments: - parameters: - - name: test-results-gcs-dir - value: "{{inputs.parameters.test-results-gcs-dir}}" - - name: target-image-prefix - value: "{{inputs.parameters.target-image-prefix}}" - - name: dataflow-tft-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tft-image-suffix}}" - - name: dataflow-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-predict-image-suffix}}" - - name: dataflow-tfma-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfma-image-suffix}}" - - name: dataflow-tfdv-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfdv-image-suffix}}" - - name: dataproc-create-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-create-cluster-image-suffix}}" - - name: dataproc-delete-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-delete-cluster-image-suffix}}" - - name: dataproc-analyze-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-analyze-image-suffix}}" - - name: dataproc-transform-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-transform-image-suffix}}" - - name: dataproc-train-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-train-image-suffix}}" - - name: dataproc-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-predict-image-suffix}}" - - name: kubeflow-dnntrainer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-dnntrainer-image-suffix}}" - - name: kubeflow-deployer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-deployer-image-suffix}}" - - name: local-confusionmatrix-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-confusionmatrix-image-suffix}}" - - name: local-roc-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-roc-image-suffix}}" - - name: sample-tests-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" - - name: namespace - value: "{{inputs.parameters.namespace}}" - - name: test-name - value: "xgboost" - - name: run-notebook-tfx-tests - template: run-sample-tests - arguments: - parameters: - - name: test-results-gcs-dir - value: "{{inputs.parameters.test-results-gcs-dir}}" - - name: target-image-prefix - value: "{{inputs.parameters.target-image-prefix}}" - - name: dataflow-tft-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tft-image-suffix}}" - - name: dataflow-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-predict-image-suffix}}" - - name: dataflow-tfma-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfma-image-suffix}}" - - name: dataflow-tfdv-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfdv-image-suffix}}" - - name: dataproc-create-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-create-cluster-image-suffix}}" - - name: dataproc-delete-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-delete-cluster-image-suffix}}" - - name: dataproc-analyze-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-analyze-image-suffix}}" - - name: dataproc-transform-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-transform-image-suffix}}" - - name: dataproc-train-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-train-image-suffix}}" - - name: dataproc-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-predict-image-suffix}}" - - name: kubeflow-dnntrainer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-dnntrainer-image-suffix}}" - - name: kubeflow-deployer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-deployer-image-suffix}}" - - name: local-confusionmatrix-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-confusionmatrix-image-suffix}}" - - name: local-roc-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-roc-image-suffix}}" - - name: sample-tests-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" - - name: namespace - value: "{{inputs.parameters.namespace}}" - - name: test-name - value: "notebook-tfx" - - name: run-notebook-lightweight-tests - template: run-sample-tests - arguments: - parameters: - - name: test-results-gcs-dir - value: "{{inputs.parameters.test-results-gcs-dir}}" - - name: target-image-prefix - value: "{{inputs.parameters.target-image-prefix}}" - - name: dataflow-tft-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tft-image-suffix}}" - - name: dataflow-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-predict-image-suffix}}" - - name: dataflow-tfma-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfma-image-suffix}}" - - name: dataflow-tfdv-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataflow-tfdv-image-suffix}}" - - name: dataproc-create-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-create-cluster-image-suffix}}" - - name: dataproc-delete-cluster-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-delete-cluster-image-suffix}}" - - name: dataproc-analyze-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-analyze-image-suffix}}" - - name: dataproc-transform-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-transform-image-suffix}}" - - name: dataproc-train-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-train-image-suffix}}" - - name: dataproc-predict-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.dataproc-predict-image-suffix}}" - - name: kubeflow-dnntrainer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-dnntrainer-image-suffix}}" - - name: kubeflow-deployer-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.kubeflow-deployer-image-suffix}}" - - name: local-confusionmatrix-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-confusionmatrix-image-suffix}}" - - name: local-roc-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.local-roc-image-suffix}}" - - name: sample-tests-image - value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" - - name: namespace - value: "{{inputs.parameters.namespace}}" - - name: test-name - value: "notebook-lightweight" - - - # Build and push image - - name: build-image-by-dockerfile - inputs: - parameters: - # GCS URI prefix pointing to a .tar.gz archive of Docker build context - - name: image-build-context-gcs-uri - value: "{{workflow.parameters.image-build-context-gcs-uri}}" - # The relative code path to the Dockerfile - - name: docker-path - # Name of the Docker file to use. "Dockerfile" by default - - name: docker-file - value: Dockerfile - - name: image-name - outputs: - parameters: - - name: strict-image-name - valueFrom: - path: /outputs/strict-image-name/file - container: - image: gcr.io/ml-pipeline-test/image-builder:v20181128-0.1.3-rc.1-109-ga5a14dc-e3b0c4 - imagePullPolicy: 'Always' - args: [ - "--image-build-context-gcs-uri", "{{inputs.parameters.image-build-context-gcs-uri}}", - "--docker_path", "{{inputs.parameters.docker-path}}", - "--docker_file", "{{inputs.parameters.docker-file}}", - "--image_name", "{{inputs.parameters.image-name}}", - ] - env: - - name: DOCKER_HOST - value: 127.0.0.1 - - name: GOOGLE_APPLICATION_CREDENTIALS - value: /secret/gcp-credentials/user-gcp-sa.json - volumeMounts: - - name: gcp-credentials - mountPath: /secret/gcp-credentials - sidecars: - - name: dind - image: docker:17.10-dind - securityContext: - privileged: true - mirrorVolumeMounts: true - - - name: build-image-by-script - inputs: - parameters: - # GCS URI prefix pointing to a .tar.gz archive of Docker build context - - name: image-build-context-gcs-uri - value: "{{workflow.parameters.image-build-context-gcs-uri}}" - # the path to the build script - - name: build-script - - name: image-name - outputs: - parameters: - - name: strict-image-name - valueFrom: - path: /outputs/strict-image-name/file - container: - image: gcr.io/ml-pipeline-test/image-builder:v20181128-0.1.3-rc.1-109-ga5a14dc-e3b0c4 - imagePullPolicy: 'Always' - args: [ - "--image-build-context-gcs-uri", "{{inputs.parameters.image-build-context-gcs-uri}}", - "--build_script", "{{inputs.parameters.build-script}}", - "--image_name", "{{inputs.parameters.image-name}}", - ] - env: - - name: DOCKER_HOST - value: 127.0.0.1 - - name: GOOGLE_APPLICATION_CREDENTIALS - value: /secret/gcp-credentials/user-gcp-sa.json - volumeMounts: - - name: gcp-credentials - mountPath: /secret/gcp-credentials - sidecars: - - name: dind - image: docker:17.10-dind - securityContext: - privileged: true - mirrorVolumeMounts: true - - - name: run-sample-tests - inputs: - parameters: - - name: test-results-gcs-dir - - name: target-image-prefix - - name: dataflow-tft-image - - name: dataflow-predict-image - - name: dataflow-tfma-image - - name: dataflow-tfdv-image - - name: dataproc-create-cluster-image - - name: dataproc-delete-cluster-image - - name: dataproc-analyze-image - - name: dataproc-transform-image - - name: dataproc-train-image - - name: dataproc-predict-image - - name: kubeflow-dnntrainer-image - - name: kubeflow-deployer-image - - name: local-confusionmatrix-image - - name: local-roc-image - - name: sample-tests-image - - name: namespace - - name: test-name - container: - image: "{{inputs.parameters.sample-tests-image}}" - args: [ - "--results-gcs-dir", "{{inputs.parameters.test-results-gcs-dir}}", - "--target-image-prefix", "{{inputs.parameters.target-image-prefix}}", - "--dataflow-tft-image","{{inputs.parameters.dataflow-tft-image}}", - "--dataflow-predict-image","{{inputs.parameters.dataflow-predict-image}}", - "--dataflow-tfma-image","{{inputs.parameters.dataflow-tfma-image}}", - "--dataflow-tfdv-image","{{inputs.parameters.dataflow-tfdv-image}}", - "--dataproc-create-cluster-image","{{inputs.parameters.dataproc-create-cluster-image}}", - "--dataproc-delete-cluster-image","{{inputs.parameters.dataproc-delete-cluster-image}}", - "--dataproc-analyze-image","{{inputs.parameters.dataproc-analyze-image}}", - "--dataproc-transform-image","{{inputs.parameters.dataproc-transform-image}}", - "--dataproc-train-image","{{inputs.parameters.dataproc-train-image}}", - "--dataproc-predict-image","{{inputs.parameters.dataproc-predict-image}}", - "--kubeflow-dnntrainer-image","{{inputs.parameters.kubeflow-dnntrainer-image}}", - "--kubeflow-deployer-image","{{inputs.parameters.kubeflow-deployer-image}}", - "--local-confusionmatrix-image", "{{inputs.parameters.local-confusionmatrix-image}}", - "--local-roc-image", "{{inputs.parameters.local-roc-image}}", - "--namespace", "{{inputs.parameters.namespace}}", - "--test-name", "{{inputs.parameters.test-name}}", - ] - env: - - name: GOOGLE_APPLICATION_CREDENTIALS - value: /secret/gcp-credentials/user-gcp-sa.json - volumeMounts: - - name: gcp-credentials - mountPath: /secret/gcp-credentials - diff --git a/test/test-prep.sh b/test/test-prep.sh new file mode 100644 index 00000000000..b8ea15d8a50 --- /dev/null +++ b/test/test-prep.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -x + +# activating the service account +gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" +gcloud config set compute/zone us-east1-b +gcloud config set core/project ${PROJECT} + +#Uploading the source code to GCS: +local_code_archive_file=$(mktemp) +date_string=$(TZ=PST8PDT date +%Y-%m-%d_%H-%M-%S_%Z) +code_archive_prefix="gs://${TEST_RESULT_BUCKET}/${PULL_BASE_SHA}/source_code" +remote_code_archive_uri="${code_archive_prefix}_${PULL_BASE_SHA}_${date_string}.tar.gz" + +tar -czf "$local_code_archive_file" . +gsutil cp "$local_code_archive_file" "$remote_code_archive_uri" + +TEST_CLUSTER_PREFIX=${WORKFLOW_FILE%.*} +TEST_CLUSTER=$(echo $TEST_CLUSTER_PREFIX | cut -d _ -f 1)-${PULL_BASE_SHA:0:7}-${RANDOM}