зеркало из https://github.com/dotnet/spark.git
Run E2E tests on Linux in build pipeline and add Backward/Forward E2E tests (#737)
This commit is contained in:
Родитель
d252590bb7
Коммит
27e67ac447
|
@ -1,57 +1,145 @@
|
||||||
parameters:
|
parameters:
|
||||||
- name: 'versions'
|
- name: tests
|
||||||
type: object
|
type: object
|
||||||
default: {}
|
default: {}
|
||||||
- name: 'testOptions'
|
- name: backwardCompatibleRelease
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
- name: forwardCompatibleRelease
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
|
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- ${{ each version in parameters.versions }}:
|
- ${{ each test in parameters.tests }}:
|
||||||
- stage: E2E_Tests_${{ replace(version, '.', '_') }}
|
- stage: E2E_Tests_${{ replace(test.version, '.', '_') }}
|
||||||
displayName: E2E tests for Spark ${{ version }}
|
displayName: E2E tests for Spark ${{ test.version }}
|
||||||
dependsOn: Build
|
dependsOn: Build
|
||||||
jobs:
|
jobs:
|
||||||
- job: Run
|
- ${{ each option in test.jobOptions }}:
|
||||||
pool: Hosted VS2017
|
- job: Run_${{ replace(option.pool, ' ', '_') }}
|
||||||
|
pool: ${{ option.pool }}
|
||||||
variables:
|
|
||||||
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
|
||||||
_OfficialBuildIdArgs: /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
|
|
||||||
HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
|
|
||||||
DOTNET_WORKER_DIR: $(CurrentDotnetWorkerDir)
|
|
||||||
|
|
||||||
steps:
|
variables:
|
||||||
- task: DownloadBuildArtifacts@0
|
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||||
displayName: Download Build Artifacts
|
_OfficialBuildIdArgs: /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
|
||||||
inputs:
|
|
||||||
artifactName: Microsoft.Spark.Binaries
|
|
||||||
downloadPath: $(Build.ArtifactStagingDirectory)
|
|
||||||
|
|
||||||
- task: CopyFiles@2
|
steps:
|
||||||
displayName: Copy jars
|
- task: DownloadBuildArtifacts@0
|
||||||
inputs:
|
displayName: Download Build Artifacts
|
||||||
sourceFolder: $(ArtifactPath)/Jars
|
inputs:
|
||||||
contents: '**/*.jar'
|
artifactName: Microsoft.Spark.Binaries
|
||||||
targetFolder: $(Build.SourcesDirectory)/src/scala
|
downloadPath: $(Build.ArtifactStagingDirectory)
|
||||||
|
|
||||||
- task: BatchScript@1
|
- pwsh: |
|
||||||
displayName: Download Winutils.exe
|
$framework = "netcoreapp3.1"
|
||||||
inputs:
|
|
||||||
filename: script\download-hadoop-utils.cmd
|
|
||||||
arguments: $(Build.BinariesDirectory)
|
|
||||||
|
|
||||||
- task: BatchScript@1
|
if ($env:AGENT_OS -eq 'Windows_NT') {
|
||||||
displayName: 'Download Spark Distro ${{ version }}'
|
$runtimeIdentifier = "win-x64"
|
||||||
inputs:
|
} else {
|
||||||
filename: script\download-spark-distros.cmd
|
$runtimeIdentifier = "linux-x64"
|
||||||
arguments: $(Build.BinariesDirectory) ${{ version }}
|
}
|
||||||
|
|
||||||
- task: DotNetCoreCLI@2
|
$pathSeparator = [IO.Path]::DirectorySeparatorChar
|
||||||
displayName: 'E2E tests'
|
$artifactPath = "$(Build.ArtifactStagingDirectory)${pathSeparator}Microsoft.Spark.Binaries"
|
||||||
inputs:
|
echo "##vso[task.setvariable variable=PATH_SEPARATOR]$pathSeparator"
|
||||||
command: test
|
echo "##vso[task.setvariable variable=ArtifactPath]$artifactPath"
|
||||||
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
|
|
||||||
arguments: '--configuration $(buildConfiguration) ${{ parameters.testOptions }}'
|
$backwardCompatibleRelease = "${{ parameters.backwardCompatibleRelease }}"
|
||||||
env:
|
echo "##vso[task.setvariable variable=BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR]$(Build.BinariesDirectory)${pathSeparator}Microsoft.Spark.Worker-${backwardCompatibleRelease}"
|
||||||
SPARK_HOME: $(Build.BinariesDirectory)\spark-${{ version }}-bin-hadoop2.7
|
echo "##vso[task.setvariable variable=BACKWARD_COMPATIBLE_WORKER_URL]https://github.com/dotnet/spark/releases/download/v${backwardCompatibleRelease}/Microsoft.Spark.Worker.${framework}.${runtimeIdentifier}-${backwardCompatibleRelease}.zip"
|
||||||
|
|
||||||
|
$dotnetWorkerDir = "${artifactPath}${pathSeparator}Microsoft.Spark.Worker${pathSeparator}${framework}${pathSeparator}${runtimeIdentifier}"
|
||||||
|
echo "##vso[task.setvariable variable=CURRENT_DOTNET_WORKER_DIR]$dotnetWorkerDir"
|
||||||
|
if ($env:AGENT_OS -eq 'Linux') {
|
||||||
|
chmod +x "${dotnetWorkerDir}${pathSeparator}Microsoft.Spark.Worker"
|
||||||
|
}
|
||||||
|
displayName: 'Setup Variables and Permissions'
|
||||||
|
|
||||||
|
- checkout: self
|
||||||
|
path: s$(PATH_SEPARATOR)dotnet-spark
|
||||||
|
|
||||||
|
- task: CopyFiles@2
|
||||||
|
displayName: Copy jars
|
||||||
|
inputs:
|
||||||
|
sourceFolder: $(ArtifactPath)$(PATH_SEPARATOR)Jars
|
||||||
|
contents: '**$(PATH_SEPARATOR)*.jar'
|
||||||
|
targetFolder: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark$(PATH_SEPARATOR)src$(PATH_SEPARATOR)scala
|
||||||
|
|
||||||
|
- task: PowerShell@2
|
||||||
|
condition: eq( variables['Agent.OS'], 'Windows_NT' )
|
||||||
|
displayName: Download Winutils.exe
|
||||||
|
inputs:
|
||||||
|
workingDirectory: $(Build.BinariesDirectory)
|
||||||
|
pwsh: true
|
||||||
|
targetType: inline
|
||||||
|
script: |
|
||||||
|
echo "Download Hadoop utils for Windows."
|
||||||
|
curl -k -L -o hadoop.zip https://github.com/steveloughran/winutils/releases/download/tag_2017-08-29-hadoop-2.8.1-native/hadoop-2.8.1.zip
|
||||||
|
unzip hadoop.zip
|
||||||
|
New-Item -ItemType Directory -Force -Path hadoop\bin
|
||||||
|
cp hadoop-2.8.1\winutils.exe hadoop\bin
|
||||||
|
|
||||||
|
- pwsh: |
|
||||||
|
echo "Downloading Spark ${{ test.version }}"
|
||||||
|
curl -k -L -o spark-${{ test.version }}.tgz https://archive.apache.org/dist/spark/spark-${{ test.version }}/spark-${{ test.version }}-bin-hadoop2.7.tgz
|
||||||
|
tar xzvf spark-${{ test.version }}.tgz
|
||||||
|
displayName: 'Download Spark Distro ${{ test.version }}'
|
||||||
|
workingDirectory: $(Build.BinariesDirectory)
|
||||||
|
|
||||||
|
- task: DotNetCoreCLI@2
|
||||||
|
displayName: 'E2E tests'
|
||||||
|
inputs:
|
||||||
|
command: test
|
||||||
|
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
|
||||||
|
arguments: '--configuration $(buildConfiguration) ${{ option.testOptions }}'
|
||||||
|
workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark
|
||||||
|
env:
|
||||||
|
HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
|
||||||
|
SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
|
||||||
|
DOTNET_WORKER_DIR: $(CURRENT_DOTNET_WORKER_DIR)
|
||||||
|
|
||||||
|
- pwsh: |
|
||||||
|
echo "Downloading ${env:BACKWARD_COMPATIBLE_WORKER_URL}"
|
||||||
|
curl -k -L -o Microsoft.Spark.Worker-${{ parameters.backwardCompatibleRelease }}.zip ${env:BACKWARD_COMPATIBLE_WORKER_URL}
|
||||||
|
unzip Microsoft.Spark.Worker-${{ parameters.backwardCompatibleRelease }}.zip -d $([System.IO.Directory]::GetParent($env:BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR).FullName)
|
||||||
|
|
||||||
|
if ($env:AGENT_OS -eq 'Linux') {
|
||||||
|
chmod +x "${env:BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR}${env:PATH_SEPARATOR}Microsoft.Spark.Worker"
|
||||||
|
}
|
||||||
|
displayName: 'Setup Backward Compatible Microsoft Spark Worker ${{ parameters.backwardCompatibleRelease }}'
|
||||||
|
workingDirectory: $(Build.BinariesDirectory)
|
||||||
|
env:
|
||||||
|
SPARK_VERSION: ${{ test.version }}
|
||||||
|
|
||||||
|
- task: DotNetCoreCLI@2
|
||||||
|
displayName: 'E2E Backward Compatibility Tests'
|
||||||
|
inputs:
|
||||||
|
command: test
|
||||||
|
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
|
||||||
|
arguments: '--configuration $(buildConfiguration) ${{ option.backwardCompatibleTestOptions }}'
|
||||||
|
workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark
|
||||||
|
env:
|
||||||
|
HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
|
||||||
|
SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
|
||||||
|
DOTNET_WORKER_DIR: $(BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR)
|
||||||
|
|
||||||
|
- checkout: forwardCompatibleRelease
|
||||||
|
path: s$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}
|
||||||
|
|
||||||
|
- task: Maven@3
|
||||||
|
displayName: 'Maven build src for forward compatible release v${{ parameters.forwardCompatibleRelease }}'
|
||||||
|
inputs:
|
||||||
|
mavenPomFile: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}$(PATH_SEPARATOR)src$(PATH_SEPARATOR)scala$(PATH_SEPARATOR)pom.xml
|
||||||
|
|
||||||
|
- task: DotNetCoreCLI@2
|
||||||
|
displayName: 'E2E Forward Compatibility Tests'
|
||||||
|
inputs:
|
||||||
|
command: test
|
||||||
|
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
|
||||||
|
arguments: '--configuration $(buildConfiguration) ${{ option.forwardCompatibleTestOptions }}'
|
||||||
|
workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}
|
||||||
|
env:
|
||||||
|
HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
|
||||||
|
SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
|
||||||
|
DOTNET_WORKER_DIR: $(CURRENT_DOTNET_WORKER_DIR)
|
|
@ -12,13 +12,45 @@ variables:
|
||||||
_TeamName: DotNetSpark
|
_TeamName: DotNetSpark
|
||||||
MSBUILDSINGLELOADCONTEXT: 1
|
MSBUILDSINGLELOADCONTEXT: 1
|
||||||
ArtifactPath: '$(Build.ArtifactStagingDirectory)\Microsoft.Spark.Binaries'
|
ArtifactPath: '$(Build.ArtifactStagingDirectory)\Microsoft.Spark.Binaries'
|
||||||
CurrentDotnetWorkerDir: '$(ArtifactPath)\Microsoft.Spark.Worker\netcoreapp3.1\win-x64'
|
|
||||||
|
backwardCompatibleRelease: '1.0.0'
|
||||||
|
forwardCompatibleRelease: '1.0.0'
|
||||||
|
|
||||||
|
backwardCompatibleTestOptions_Windows_2_3: ""
|
||||||
|
forwardCompatibleTestOptions_Windows_2_3: ""
|
||||||
|
backwardCompatibleTestOptions_Linux_2_3: ""
|
||||||
|
forwardCompatibleTestOptions_Linux_2_3: ""
|
||||||
|
|
||||||
|
backwardCompatibleTestOptions_Windows_2_4: ""
|
||||||
|
forwardCompatibleTestOptions_Windows_2_4: ""
|
||||||
|
backwardCompatibleTestOptions_Linux_2_4: ""
|
||||||
|
# Filter HyperspaceTests not due to functionality changes, but to incompatible tests running on Linux.
|
||||||
|
# Please see https://github.com/dotnet/spark/pull/737 for the fix.
|
||||||
|
forwardCompatibleTestOptions_Linux_2_4: "--filter \
|
||||||
|
(FullyQualifiedName!=Microsoft.Spark.Extensions.Hyperspace.E2ETest.HyperspaceTests.TestExplainAPI)&\
|
||||||
|
(FullyQualifiedName!=Microsoft.Spark.Extensions.Hyperspace.E2ETest.HyperspaceTests.TestIndexCreateAndDelete)&\
|
||||||
|
(FullyQualifiedName!=Microsoft.Spark.Extensions.Hyperspace.E2ETest.HyperspaceTests.TestSignatures)"
|
||||||
|
|
||||||
|
backwardCompatibleTestOptions_Windows_3: "--filter \
|
||||||
|
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestDataFrameGroupedMapUdf)&\
|
||||||
|
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestGroupedMapUdf)"
|
||||||
|
forwardCompatibleTestOptions_Windows_3: ""
|
||||||
|
backwardCompatibleTestOptions_Linux_3: $(backwardCompatibleTestOptions_Windows_3)
|
||||||
|
forwardCompatibleTestOptions_Linux_3: $(forwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
|
||||||
# Azure DevOps variables are transformed into environment variables, with these variables we
|
# Azure DevOps variables are transformed into environment variables, with these variables we
|
||||||
# avoid the first time experience and telemetry to speed up the build.
|
# avoid the first time experience and telemetry to speed up the build.
|
||||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
|
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
|
||||||
|
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: forwardCompatibleRelease
|
||||||
|
type: github
|
||||||
|
endpoint: dotnet
|
||||||
|
name: dotnet/spark
|
||||||
|
ref: refs/tags/v$(forwardCompatibleRelease)
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- stage: Build
|
- stage: Build
|
||||||
displayName: Build Sources
|
displayName: Build Sources
|
||||||
|
@ -103,7 +135,7 @@ stages:
|
||||||
inputs:
|
inputs:
|
||||||
artifactName: Microsoft.Spark.Binaries
|
artifactName: Microsoft.Spark.Binaries
|
||||||
downloadPath: $(Build.ArtifactStagingDirectory)
|
downloadPath: $(Build.ArtifactStagingDirectory)
|
||||||
|
|
||||||
- task: MicroBuildSigningPlugin@2
|
- task: MicroBuildSigningPlugin@2
|
||||||
displayName: Install MicroBuild plugin
|
displayName: Install MicroBuild plugin
|
||||||
inputs:
|
inputs:
|
||||||
|
@ -173,18 +205,143 @@ stages:
|
||||||
|
|
||||||
- template: azure-pipelines-e2e-tests-template.yml
|
- template: azure-pipelines-e2e-tests-template.yml
|
||||||
parameters:
|
parameters:
|
||||||
versions:
|
backwardCompatibleRelease: $(backwardCompatibleRelease)
|
||||||
- '2.3.0'
|
forwardCompatibleRelease: $(forwardCompatibleRelease)
|
||||||
- '2.3.1'
|
tests:
|
||||||
- '2.3.2'
|
- version: '2.3.0'
|
||||||
- '2.3.3'
|
jobOptions:
|
||||||
- '2.3.4'
|
# 'Hosted Ubuntu 1604' test is disabled due to https://github.com/dotnet/spark/issues/753
|
||||||
- '2.4.0'
|
- pool: 'Hosted VS2017'
|
||||||
- '2.4.1'
|
testOptions: ''
|
||||||
- '2.4.3'
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
|
||||||
- '2.4.4'
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
|
||||||
- '2.4.5'
|
- version: '2.3.1'
|
||||||
- '2.4.6'
|
jobOptions:
|
||||||
- '2.4.7'
|
- pool: 'Hosted VS2017'
|
||||||
- '3.0.0'
|
testOptions: ""
|
||||||
- '3.0.1'
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
|
||||||
|
- version: '2.3.2'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
|
||||||
|
- version: '2.3.3'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
|
||||||
|
- version: '2.3.4'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
|
||||||
|
- version: '2.4.0'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
- version: '2.4.1'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
- version: '2.4.3'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
- version: '2.4.4'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
- version: '2.4.5'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
- version: '2.4.6'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
- version: '2.4.7'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_4)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
|
||||||
|
- version: '3.0.0'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3)
|
||||||
|
- version: '3.0.1'
|
||||||
|
jobOptions:
|
||||||
|
- pool: 'Hosted VS2017'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3)
|
||||||
|
- pool: 'Hosted Ubuntu 1604'
|
||||||
|
testOptions: ""
|
||||||
|
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3)
|
||||||
|
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3)
|
|
@ -1,14 +0,0 @@
|
||||||
@echo off
|
|
||||||
|
|
||||||
setlocal
|
|
||||||
|
|
||||||
set OutputDir=%1
|
|
||||||
cd %OutputDir%
|
|
||||||
|
|
||||||
echo "Download Hadoop utils for Windows."
|
|
||||||
curl -k -L -o hadoop.zip https://github.com/steveloughran/winutils/releases/download/tag_2017-08-29-hadoop-2.8.1-native/hadoop-2.8.1.zip
|
|
||||||
unzip hadoop.zip
|
|
||||||
mkdir -p hadoop\bin
|
|
||||||
cp hadoop-2.8.1\winutils.exe hadoop\bin
|
|
||||||
|
|
||||||
endlocal
|
|
|
@ -1,14 +0,0 @@
|
||||||
@echo off
|
|
||||||
|
|
||||||
setlocal
|
|
||||||
|
|
||||||
set OutputDir=%1
|
|
||||||
set SparkVersion=%2
|
|
||||||
|
|
||||||
cd %OutputDir%
|
|
||||||
|
|
||||||
echo "Downloading Spark distros."
|
|
||||||
|
|
||||||
curl -k -L -o spark-%SparkVersion%.tgz https://archive.apache.org/dist/spark/spark-%SparkVersion%/spark-%SparkVersion%-bin-hadoop2.7.tgz && tar xzvf spark-%SparkVersion%.tgz
|
|
||||||
|
|
||||||
endlocal
|
|
|
@ -1,13 +0,0 @@
|
||||||
@echo off
|
|
||||||
|
|
||||||
setlocal
|
|
||||||
|
|
||||||
set OutputDir=%1
|
|
||||||
set OldestCompatibleWorkerVersion=%2
|
|
||||||
cd %OutputDir%
|
|
||||||
|
|
||||||
echo "Download oldest backwards compatible worker release - %OldestCompatibleWorkerVersion%"
|
|
||||||
curl -k -L -o Microsoft.Spark.Worker.zip https://github.com/dotnet/spark/releases/download/v%OldestCompatibleWorkerVersion%/Microsoft.Spark.Worker.netcoreapp3.1.win-x64-%OldestCompatibleWorkerVersion%.zip
|
|
||||||
unzip Microsoft.Spark.Worker.zip
|
|
||||||
|
|
||||||
endlocal
|
|
|
@ -3,6 +3,7 @@
|
||||||
// See the LICENSE file in the project root for more information.
|
// See the LICENSE file in the project root for more information.
|
||||||
|
|
||||||
using System;
|
using System;
|
||||||
|
using Microsoft.Spark.E2ETest;
|
||||||
using Microsoft.Spark.E2ETest.Utils;
|
using Microsoft.Spark.E2ETest.Utils;
|
||||||
using Microsoft.Spark.Extensions.Hyperspace.Index;
|
using Microsoft.Spark.Extensions.Hyperspace.Index;
|
||||||
using Microsoft.Spark.Sql;
|
using Microsoft.Spark.Sql;
|
||||||
|
@ -36,7 +37,7 @@ namespace Microsoft.Spark.Extensions.Hyperspace.E2ETest
|
||||||
_sampleDataFrame = _spark.Read()
|
_sampleDataFrame = _spark.Read()
|
||||||
.Option("header", true)
|
.Option("header", true)
|
||||||
.Option("delimiter", ";")
|
.Option("delimiter", ";")
|
||||||
.Csv("Resources\\people.csv");
|
.Csv($"{TestEnvironment.ResourceDirectory}people.csv");
|
||||||
_sampleIndexName = "sample_dataframe";
|
_sampleIndexName = "sample_dataframe";
|
||||||
_sampleIndexConfig = new IndexConfig(_sampleIndexName, new[] { "job" }, new[] { "name" });
|
_sampleIndexConfig = new IndexConfig(_sampleIndexName, new[] { "job" }, new[] { "name" });
|
||||||
_hyperspace.CreateIndex(_sampleDataFrame, _sampleIndexConfig);
|
_hyperspace.CreateIndex(_sampleDataFrame, _sampleIndexConfig);
|
||||||
|
|
Загрузка…
Ссылка в новой задаче