From 548baddfd0a72cc4829572e9b7a2489e390e0255 Mon Sep 17 00:00:00 2001 From: Matias Quaranta Date: Mon, 14 Jun 2021 13:29:26 -0700 Subject: [PATCH] Cosmos DB: Spark CI for Databricks pipeline (#21214) * move original stage * add new file * renamed * Rename * path * moving to root * version * parameters * removing condition --- sdk/cosmos/ci.spark.databricks.yml | 13 +++++++------ sdk/cosmos/ci.spark.yml | 15 +++++++++++++++ sdk/cosmos/tests.yml | 8 -------- 3 files changed, 22 insertions(+), 14 deletions(-) create mode 100644 sdk/cosmos/ci.spark.yml diff --git a/sdk/cosmos/ci.spark.databricks.yml b/sdk/cosmos/ci.spark.databricks.yml index abb64763c5187..cce0939a5290e 100644 --- a/sdk/cosmos/ci.spark.databricks.yml +++ b/sdk/cosmos/ci.spark.databricks.yml @@ -7,7 +7,8 @@ parameters: type: string - name: DatabricksToken type: string - + - name: SparkVersion + type: string stages: - stage: @@ -26,7 +27,7 @@ stages: inputs: mavenPOMFile: pom.xml goals: 'package' - options: '$(DefaultOptions) -Ppackage-assembly -DskipTests -Dgpg.skip -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dspotbugs.skip=true -Drevapi.skip=true -pl com.azure:azure-cosmos,com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12' + options: '$(DefaultOptions) -Ppackage-assembly -DskipTests -Dgpg.skip -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dspotbugs.skip=true -Drevapi.skip=true -pl com.azure:azure-cosmos,com.azure.cosmos.spark:${{ parameters.SparkVersion }}' javaHomeOption: 'JDKVersion' jdkVersionOption: $(JavaBuildVersion) jdkArchitectureOption: 'x64' @@ -58,10 +59,10 @@ stages: - task: Bash@3 displayName: Importing Jars inputs: - filePath: $(build.sourcesdirectory)/sdk/cosmos/azure-cosmos-spark_3-1_2-12/test-databricks/databricks-jar-install.sh - arguments: 'oltp-ci-2workers-ds3v2 $(build.sourcesdirectory)/sdk/cosmos/azure-cosmos-spark_3-1_2-12/target' + filePath: $(build.sourcesdirectory)/sdk/cosmos/${{ parameters.SparkVersion }}/test-databricks/databricks-jar-install.sh + arguments: 'oltp-ci-2workers-ds3v2 $(build.sourcesdirectory)/sdk/cosmos/${{ parameters.SparkVersion }}/target' - task: Bash@3 displayName: Importing and executing notebooks inputs: - filePath: $(build.sourcesdirectory)/sdk/cosmos/azure-cosmos-spark_3-1_2-12/test-databricks/databricks-notebooks-install.sh - arguments: oltp-ci-2workers-ds3v2 $(build.sourcesdirectory)/sdk/cosmos/azure-cosmos-spark_3-1_2-12/test-databricks/notebooks ${{ parameters.CosmosEndpoint }} ${{ parameters.CosmosKey }} + filePath: $(build.sourcesdirectory)/sdk/cosmos/${{ parameters.SparkVersion }}/test-databricks/databricks-notebooks-install.sh + arguments: oltp-ci-2workers-ds3v2 $(build.sourcesdirectory)/sdk/cosmos/${{ parameters.SparkVersion }}/test-databricks/notebooks ${{ parameters.CosmosEndpoint }} ${{ parameters.CosmosKey }} diff --git a/sdk/cosmos/ci.spark.yml b/sdk/cosmos/ci.spark.yml new file mode 100644 index 0000000000000..568156e9e9e26 --- /dev/null +++ b/sdk/cosmos/ci.spark.yml @@ -0,0 +1,15 @@ +trigger: none + +variables: + - template: ../../eng/pipelines/templates/variables/globals.yml + - name: AdditionalArgs + value: '' + +stages: + - template: /sdk/cosmos/ci.spark.databricks.yml + parameters: + CosmosEndpoint: $(spark-databricks-cosmos-endpoint) + CosmosKey: $(spark-databricks-cosmos-key) + DatabricksEndpoint: $(spark-databricks-endpoint) + DatabricksToken: $(spark-databricks-token) + SparkVersion: 'azure-cosmos-spark_3-1_2-12' diff --git a/sdk/cosmos/tests.yml b/sdk/cosmos/tests.yml index 363e53f3ccc4c..4f365498890dd 100644 --- a/sdk/cosmos/tests.yml +++ b/sdk/cosmos/tests.yml @@ -62,11 +62,3 @@ stages: goals: clean verify options: '$(DefaultTestOptions) $(ProfileFlag)' mavenOptions: '$(DefaultTestMavenOptions)' - - - ${{ if or(eq(variables['Build.Reason'], 'Manual'), eq(variables['Build.Reason'], 'PullRequest')) }}: - - template: /sdk/cosmos/ci.spark.databricks.yml - parameters: - CosmosEndpoint: $(spark-databricks-cosmos-endpoint) - CosmosKey: $(spark-databricks-cosmos-key) - DatabricksEndpoint: $(spark-databricks-endpoint) - DatabricksToken: $(spark-databricks-token)