Run E2E tests on Linux in build pipeline and add Backward/Forward E2E tests (#737)

This commit is contained in:
Steve Suh 2020-11-03 11:54:28 -08:00 коммит произвёл GitHub
Родитель d252590bb7
Коммит 27e67ac447
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
6 изменённых файлов: 307 добавлений и 102 удалений

Просмотреть файл

@ -1,25 +1,28 @@
parameters: parameters:
- name: 'versions' - name: tests
type: object type: object
default: {} default: {}
- name: 'testOptions' - name: backwardCompatibleRelease
type: string
default: ''
- name: forwardCompatibleRelease
type: string type: string
default: '' default: ''
stages: stages:
- ${{ each version in parameters.versions }}: - ${{ each test in parameters.tests }}:
- stage: E2E_Tests_${{ replace(version, '.', '_') }} - stage: E2E_Tests_${{ replace(test.version, '.', '_') }}
displayName: E2E tests for Spark ${{ version }} displayName: E2E tests for Spark ${{ test.version }}
dependsOn: Build dependsOn: Build
jobs: jobs:
- job: Run - ${{ each option in test.jobOptions }}:
pool: Hosted VS2017 - job: Run_${{ replace(option.pool, ' ', '_') }}
pool: ${{ option.pool }}
variables: variables:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
_OfficialBuildIdArgs: /p:OfficialBuildId=$(BUILD.BUILDNUMBER) _OfficialBuildIdArgs: /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
DOTNET_WORKER_DIR: $(CurrentDotnetWorkerDir)
steps: steps:
- task: DownloadBuildArtifacts@0 - task: DownloadBuildArtifacts@0
@ -28,30 +31,115 @@ stages:
artifactName: Microsoft.Spark.Binaries artifactName: Microsoft.Spark.Binaries
downloadPath: $(Build.ArtifactStagingDirectory) downloadPath: $(Build.ArtifactStagingDirectory)
- pwsh: |
$framework = "netcoreapp3.1"
if ($env:AGENT_OS -eq 'Windows_NT') {
$runtimeIdentifier = "win-x64"
} else {
$runtimeIdentifier = "linux-x64"
}
$pathSeparator = [IO.Path]::DirectorySeparatorChar
$artifactPath = "$(Build.ArtifactStagingDirectory)${pathSeparator}Microsoft.Spark.Binaries"
echo "##vso[task.setvariable variable=PATH_SEPARATOR]$pathSeparator"
echo "##vso[task.setvariable variable=ArtifactPath]$artifactPath"
$backwardCompatibleRelease = "${{ parameters.backwardCompatibleRelease }}"
echo "##vso[task.setvariable variable=BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR]$(Build.BinariesDirectory)${pathSeparator}Microsoft.Spark.Worker-${backwardCompatibleRelease}"
echo "##vso[task.setvariable variable=BACKWARD_COMPATIBLE_WORKER_URL]https://github.com/dotnet/spark/releases/download/v${backwardCompatibleRelease}/Microsoft.Spark.Worker.${framework}.${runtimeIdentifier}-${backwardCompatibleRelease}.zip"
$dotnetWorkerDir = "${artifactPath}${pathSeparator}Microsoft.Spark.Worker${pathSeparator}${framework}${pathSeparator}${runtimeIdentifier}"
echo "##vso[task.setvariable variable=CURRENT_DOTNET_WORKER_DIR]$dotnetWorkerDir"
if ($env:AGENT_OS -eq 'Linux') {
chmod +x "${dotnetWorkerDir}${pathSeparator}Microsoft.Spark.Worker"
}
displayName: 'Setup Variables and Permissions'
- checkout: self
path: s$(PATH_SEPARATOR)dotnet-spark
- task: CopyFiles@2 - task: CopyFiles@2
displayName: Copy jars displayName: Copy jars
inputs: inputs:
sourceFolder: $(ArtifactPath)/Jars sourceFolder: $(ArtifactPath)$(PATH_SEPARATOR)Jars
contents: '**/*.jar' contents: '**$(PATH_SEPARATOR)*.jar'
targetFolder: $(Build.SourcesDirectory)/src/scala targetFolder: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark$(PATH_SEPARATOR)src$(PATH_SEPARATOR)scala
- task: BatchScript@1 - task: PowerShell@2
condition: eq( variables['Agent.OS'], 'Windows_NT' )
displayName: Download Winutils.exe displayName: Download Winutils.exe
inputs: inputs:
filename: script\download-hadoop-utils.cmd workingDirectory: $(Build.BinariesDirectory)
arguments: $(Build.BinariesDirectory) pwsh: true
targetType: inline
script: |
echo "Download Hadoop utils for Windows."
curl -k -L -o hadoop.zip https://github.com/steveloughran/winutils/releases/download/tag_2017-08-29-hadoop-2.8.1-native/hadoop-2.8.1.zip
unzip hadoop.zip
New-Item -ItemType Directory -Force -Path hadoop\bin
cp hadoop-2.8.1\winutils.exe hadoop\bin
- task: BatchScript@1 - pwsh: |
displayName: 'Download Spark Distro ${{ version }}' echo "Downloading Spark ${{ test.version }}"
inputs: curl -k -L -o spark-${{ test.version }}.tgz https://archive.apache.org/dist/spark/spark-${{ test.version }}/spark-${{ test.version }}-bin-hadoop2.7.tgz
filename: script\download-spark-distros.cmd tar xzvf spark-${{ test.version }}.tgz
arguments: $(Build.BinariesDirectory) ${{ version }} displayName: 'Download Spark Distro ${{ test.version }}'
workingDirectory: $(Build.BinariesDirectory)
- task: DotNetCoreCLI@2 - task: DotNetCoreCLI@2
displayName: 'E2E tests' displayName: 'E2E tests'
inputs: inputs:
command: test command: test
projects: '**/Microsoft.Spark*.E2ETest/*.csproj' projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
arguments: '--configuration $(buildConfiguration) ${{ parameters.testOptions }}' arguments: '--configuration $(buildConfiguration) ${{ option.testOptions }}'
workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark
env: env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-${{ version }}-bin-hadoop2.7 HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
DOTNET_WORKER_DIR: $(CURRENT_DOTNET_WORKER_DIR)
- pwsh: |
echo "Downloading ${env:BACKWARD_COMPATIBLE_WORKER_URL}"
curl -k -L -o Microsoft.Spark.Worker-${{ parameters.backwardCompatibleRelease }}.zip ${env:BACKWARD_COMPATIBLE_WORKER_URL}
unzip Microsoft.Spark.Worker-${{ parameters.backwardCompatibleRelease }}.zip -d $([System.IO.Directory]::GetParent($env:BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR).FullName)
if ($env:AGENT_OS -eq 'Linux') {
chmod +x "${env:BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR}${env:PATH_SEPARATOR}Microsoft.Spark.Worker"
}
displayName: 'Setup Backward Compatible Microsoft Spark Worker ${{ parameters.backwardCompatibleRelease }}'
workingDirectory: $(Build.BinariesDirectory)
env:
SPARK_VERSION: ${{ test.version }}
- task: DotNetCoreCLI@2
displayName: 'E2E Backward Compatibility Tests'
inputs:
command: test
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
arguments: '--configuration $(buildConfiguration) ${{ option.backwardCompatibleTestOptions }}'
workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark
env:
HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
DOTNET_WORKER_DIR: $(BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR)
- checkout: forwardCompatibleRelease
path: s$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}
- task: Maven@3
displayName: 'Maven build src for forward compatible release v${{ parameters.forwardCompatibleRelease }}'
inputs:
mavenPomFile: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}$(PATH_SEPARATOR)src$(PATH_SEPARATOR)scala$(PATH_SEPARATOR)pom.xml
- task: DotNetCoreCLI@2
displayName: 'E2E Forward Compatibility Tests'
inputs:
command: test
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
arguments: '--configuration $(buildConfiguration) ${{ option.forwardCompatibleTestOptions }}'
workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}
env:
HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
DOTNET_WORKER_DIR: $(CURRENT_DOTNET_WORKER_DIR)

Просмотреть файл

@ -12,13 +12,45 @@ variables:
_TeamName: DotNetSpark _TeamName: DotNetSpark
MSBUILDSINGLELOADCONTEXT: 1 MSBUILDSINGLELOADCONTEXT: 1
ArtifactPath: '$(Build.ArtifactStagingDirectory)\Microsoft.Spark.Binaries' ArtifactPath: '$(Build.ArtifactStagingDirectory)\Microsoft.Spark.Binaries'
CurrentDotnetWorkerDir: '$(ArtifactPath)\Microsoft.Spark.Worker\netcoreapp3.1\win-x64'
backwardCompatibleRelease: '1.0.0'
forwardCompatibleRelease: '1.0.0'
backwardCompatibleTestOptions_Windows_2_3: ""
forwardCompatibleTestOptions_Windows_2_3: ""
backwardCompatibleTestOptions_Linux_2_3: ""
forwardCompatibleTestOptions_Linux_2_3: ""
backwardCompatibleTestOptions_Windows_2_4: ""
forwardCompatibleTestOptions_Windows_2_4: ""
backwardCompatibleTestOptions_Linux_2_4: ""
# Filter HyperspaceTests not due to functionality changes, but to incompatible tests running on Linux.
# Please see https://github.com/dotnet/spark/pull/737 for the fix.
forwardCompatibleTestOptions_Linux_2_4: "--filter \
(FullyQualifiedName!=Microsoft.Spark.Extensions.Hyperspace.E2ETest.HyperspaceTests.TestExplainAPI)&\
(FullyQualifiedName!=Microsoft.Spark.Extensions.Hyperspace.E2ETest.HyperspaceTests.TestIndexCreateAndDelete)&\
(FullyQualifiedName!=Microsoft.Spark.Extensions.Hyperspace.E2ETest.HyperspaceTests.TestSignatures)"
backwardCompatibleTestOptions_Windows_3: "--filter \
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestDataFrameGroupedMapUdf)&\
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestGroupedMapUdf)"
forwardCompatibleTestOptions_Windows_3: ""
backwardCompatibleTestOptions_Linux_3: $(backwardCompatibleTestOptions_Windows_3)
forwardCompatibleTestOptions_Linux_3: $(forwardCompatibleTestOptions_Linux_2_4)
# Azure DevOps variables are transformed into environment variables, with these variables we # Azure DevOps variables are transformed into environment variables, with these variables we
# avoid the first time experience and telemetry to speed up the build. # avoid the first time experience and telemetry to speed up the build.
DOTNET_CLI_TELEMETRY_OPTOUT: 1 DOTNET_CLI_TELEMETRY_OPTOUT: 1
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1 DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
resources:
repositories:
- repository: forwardCompatibleRelease
type: github
endpoint: dotnet
name: dotnet/spark
ref: refs/tags/v$(forwardCompatibleRelease)
stages: stages:
- stage: Build - stage: Build
displayName: Build Sources displayName: Build Sources
@ -173,18 +205,143 @@ stages:
- template: azure-pipelines-e2e-tests-template.yml - template: azure-pipelines-e2e-tests-template.yml
parameters: parameters:
versions: backwardCompatibleRelease: $(backwardCompatibleRelease)
- '2.3.0' forwardCompatibleRelease: $(forwardCompatibleRelease)
- '2.3.1' tests:
- '2.3.2' - version: '2.3.0'
- '2.3.3' jobOptions:
- '2.3.4' # 'Hosted Ubuntu 1604' test is disabled due to https://github.com/dotnet/spark/issues/753
- '2.4.0' - pool: 'Hosted VS2017'
- '2.4.1' testOptions: ''
- '2.4.3' backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
- '2.4.4' forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- '2.4.5' - version: '2.3.1'
- '2.4.6' jobOptions:
- '2.4.7' - pool: 'Hosted VS2017'
- '3.0.0' testOptions: ""
- '3.0.1' backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
- version: '2.3.2'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
- version: '2.3.3'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
- version: '2.3.4'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
- version: '2.4.0'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.1'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.3'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.4'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.5'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.6'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.7'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '3.0.0'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3)
- version: '3.0.1'
jobOptions:
- pool: 'Hosted VS2017'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3)
- pool: 'Hosted Ubuntu 1604'
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3)

Просмотреть файл

@ -1,14 +0,0 @@
@echo off
setlocal
set OutputDir=%1
cd %OutputDir%
echo "Download Hadoop utils for Windows."
curl -k -L -o hadoop.zip https://github.com/steveloughran/winutils/releases/download/tag_2017-08-29-hadoop-2.8.1-native/hadoop-2.8.1.zip
unzip hadoop.zip
mkdir -p hadoop\bin
cp hadoop-2.8.1\winutils.exe hadoop\bin
endlocal

Просмотреть файл

@ -1,14 +0,0 @@
@echo off
setlocal
set OutputDir=%1
set SparkVersion=%2
cd %OutputDir%
echo "Downloading Spark distros."
curl -k -L -o spark-%SparkVersion%.tgz https://archive.apache.org/dist/spark/spark-%SparkVersion%/spark-%SparkVersion%-bin-hadoop2.7.tgz && tar xzvf spark-%SparkVersion%.tgz
endlocal

Просмотреть файл

@ -1,13 +0,0 @@
@echo off
setlocal
set OutputDir=%1
set OldestCompatibleWorkerVersion=%2
cd %OutputDir%
echo "Download oldest backwards compatible worker release - %OldestCompatibleWorkerVersion%"
curl -k -L -o Microsoft.Spark.Worker.zip https://github.com/dotnet/spark/releases/download/v%OldestCompatibleWorkerVersion%/Microsoft.Spark.Worker.netcoreapp3.1.win-x64-%OldestCompatibleWorkerVersion%.zip
unzip Microsoft.Spark.Worker.zip
endlocal

Просмотреть файл

@ -3,6 +3,7 @@
// See the LICENSE file in the project root for more information. // See the LICENSE file in the project root for more information.
using System; using System;
using Microsoft.Spark.E2ETest;
using Microsoft.Spark.E2ETest.Utils; using Microsoft.Spark.E2ETest.Utils;
using Microsoft.Spark.Extensions.Hyperspace.Index; using Microsoft.Spark.Extensions.Hyperspace.Index;
using Microsoft.Spark.Sql; using Microsoft.Spark.Sql;
@ -36,7 +37,7 @@ namespace Microsoft.Spark.Extensions.Hyperspace.E2ETest
_sampleDataFrame = _spark.Read() _sampleDataFrame = _spark.Read()
.Option("header", true) .Option("header", true)
.Option("delimiter", ";") .Option("delimiter", ";")
.Csv("Resources\\people.csv"); .Csv($"{TestEnvironment.ResourceDirectory}people.csv");
_sampleIndexName = "sample_dataframe"; _sampleIndexName = "sample_dataframe";
_sampleIndexConfig = new IndexConfig(_sampleIndexName, new[] { "job" }, new[] { "name" }); _sampleIndexConfig = new IndexConfig(_sampleIndexName, new[] { "job" }, new[] { "name" });
_hyperspace.CreateIndex(_sampleDataFrame, _sampleIndexConfig); _hyperspace.CreateIndex(_sampleDataFrame, _sampleIndexConfig);