Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
148 changes: 0 additions & 148 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,42 +11,14 @@ variables:
_SignType: real
_TeamName: DotNetSpark
MSBUILDSINGLELOADCONTEXT: 1
# forwardCompatibleRelease/backwardCompatibleRelease is the "oldest" releases that work with the current release
forwardCompatibleRelease: '0.9.0'
backwardCompatibleRelease: '0.9.0'
forwardCompatibleTestsToFilterOut: "(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithArrayType)"
backwardCompatibleTestsToFilterOut: "(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestDataFrameGroupedMapUdf)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestDataFrameVectorUdf)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.BroadcastTests.TestDestroy)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.BroadcastTests.TestMultipleBroadcast)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.BroadcastTests.TestUnpersist)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithArrayType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithArrayOfArrayType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithRowArrayType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithSimpleArrayType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithMapType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithMapOfMapType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithReturnAsMapType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSimpleTypesTests.TestUdfWithReturnAsTimestampType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSimpleTypesTests.TestUdfWithTimestampType)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.SparkSessionTests.TestCreateDataFrameWithTimestamp)"
ArtifactPath: '$(Build.ArtifactStagingDirectory)\Microsoft.Spark.Binaries'
CurrentDotnetWorkerDir: '$(ArtifactPath)\Microsoft.Spark.Worker\netcoreapp3.1\win-x64'
BackwardCompatibleDotnetWorkerDir: $(Build.BinariesDirectory)\Microsoft.Spark.Worker-$(backwardCompatibleRelease)

# Azure DevOps variables are transformed into environment variables, with these variables we
# avoid the first time experience and telemetry to speed up the build.
DOTNET_CLI_TELEMETRY_OPTOUT: 1
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1

resources:
repositories:
- repository: forwardCompatibleRelease
type: github
endpoint: dotnet
name: dotnet/spark
ref: refs/tags/v$(forwardCompatibleRelease)

stages:
- stage: Build
displayName: Build Sources
Expand Down Expand Up @@ -210,123 +182,3 @@ stages:
- '2.4.7'
- '3.0.0'
- '3.0.1'

- stage: ForwardCompatibility
displayName: E2E Forward Compatibility Tests
dependsOn: Build
jobs:
- job: Run
pool: Hosted VS2017

variables:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
_OfficialBuildIdArgs: /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
DOTNET_WORKER_DIR: $(CurrentDotnetWorkerDir)

steps:
- checkout: forwardCompatibleRelease
path: s\$(forwardCompatibleRelease)

- task: Maven@3
displayName: 'Maven build src for forward compatible release v$(forwardCompatibleRelease)'
inputs:
mavenPomFile: src/scala/pom.xml

- task: DownloadBuildArtifacts@0
displayName: Download Build Artifacts
inputs:
artifactName: Microsoft.Spark.Binaries
downloadPath: $(Build.ArtifactStagingDirectory)

- task: BatchScript@1
displayName: Download Spark Distros & Winutils.exe
inputs:
filename: script\download-spark-distros.cmd
arguments: $(Build.BinariesDirectory)

- template: azure-pipelines-e2e-tests-template.yml
parameters:
versions:
- '2.3.0'
- '2.3.1'
- '2.3.2'
- '2.3.3'
- '2.3.4'
- '2.4.0'
- '2.4.1'
- '2.4.3'
- '2.4.4'
- '2.4.5'
testOptions: '--filter $(forwardCompatibleTestsToFilterOut)'

# Forward compatibility is tested only up to Spark 2.4.5 since it is the lastest Spark version
# tested for "forwardCompatibleRelease". This can be updated when "forwardCompatibleRelease" is updated.

- stage: BackwardCompatibility
displayName: E2E Backward Compatibility Tests
dependsOn: Build
jobs:
- job: Run
pool: Hosted VS2017

variables:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
_OfficialBuildIdArgs: /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
DOTNET_WORKER_DIR: $(BackwardCompatibleDotnetWorkerDir)

steps:
- task: DownloadBuildArtifacts@0
displayName: Download Build Artifacts
inputs:
artifactName: Microsoft.Spark.Binaries
downloadPath: $(Build.ArtifactStagingDirectory)

- task: CopyFiles@2
displayName: Copy jars
inputs:
sourceFolder: $(ArtifactPath)/Jars
contents: '**/*.jar'
targetFolder: $(Build.SourcesDirectory)/src/scala

- task: BatchScript@1
displayName: Download Spark Distros & Winutils.exe
inputs:
filename: script\download-spark-distros.cmd
arguments: $(Build.BinariesDirectory)

- task: BatchScript@1
displayName: Download backward compatible worker v$(backwardCompatibleRelease)
inputs:
filename: script\download-worker-release.cmd
arguments: '$(Build.BinariesDirectory) $(backwardCompatibleRelease)'

- template: azure-pipelines-e2e-tests-template.yml
parameters:
versions:
- '2.3.0'
- '2.3.1'
- '2.3.2'
- '2.3.3'
- '2.3.4'
- '2.4.0'
- '2.4.1'
- '2.4.3'
- '2.4.4'
- '2.4.5'
- '2.4.6'
- '2.4.7'
testOptions: '--filter $(backwardCompatibleTestsToFilterOut)'

# Spark 3.0.* uses Arrow 0.15.1, which contains a new Arrow spec. This breaks backward
# compatibility when using Microsoft.Spark.Worker with incompatible versions of Arrow.
# Skip Arrow tests until the backward compatibility Worker version is updated.
- template: azure-pipelines-e2e-tests-template.yml
parameters:
versions:
- '3.0.0'
- '3.0.1'
testOptions: "--filter $(backwardCompatibleTestsToFilterOut)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestGroupedMapUdf)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestVectorUdf)"