forked from dotnet/spark
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathazure-pipelines-e2e-tests-template.yml
139 lines (120 loc) · 6.81 KB
/
azure-pipelines-e2e-tests-template.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
parameters:
- name: tests
type: object
default: {}
- name: backwardCompatibleRelease
type: string
default: ''
- name: forwardCompatibleRelease
type: string
default: ''
stages:
- ${{ each test in parameters.tests }}:
- stage: E2E_Tests_${{ replace(test.version, '.', '_') }}
displayName: E2E tests for Spark ${{ test.version }}
dependsOn: Build
jobs:
- ${{ each option in test.jobOptions }}:
- job: Run_${{ replace(option.pool, ' ', '_') }}
pool: ${{ option.pool }}
steps:
- task: DownloadBuildArtifacts@0
displayName: Download Build Artifacts
inputs:
artifactName: Microsoft.Spark.Binaries
downloadPath: $(Build.ArtifactStagingDirectory)
- pwsh: |
$framework = "netcoreapp3.1"
if ($env:AGENT_OS -eq 'Windows_NT') {
$runtimeIdentifier = "win-x64"
} else {
$runtimeIdentifier = "linux-x64"
}
$pathSeparator = [IO.Path]::DirectorySeparatorChar
$artifactPath = "$(Build.ArtifactStagingDirectory)${pathSeparator}Microsoft.Spark.Binaries"
echo "##vso[task.setvariable variable=PATH_SEPARATOR]$pathSeparator"
echo "##vso[task.setvariable variable=ArtifactPath]$artifactPath"
$backwardCompatibleRelease = "${{ parameters.backwardCompatibleRelease }}"
echo "##vso[task.setvariable variable=BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR]$(Build.BinariesDirectory)${pathSeparator}Microsoft.Spark.Worker-${backwardCompatibleRelease}"
echo "##vso[task.setvariable variable=BACKWARD_COMPATIBLE_WORKER_URL]https://github.com/dotnet/spark/releases/download/v${backwardCompatibleRelease}/Microsoft.Spark.Worker.${framework}.${runtimeIdentifier}-${backwardCompatibleRelease}.zip"
$dotnetWorkerDir = "${artifactPath}${pathSeparator}Microsoft.Spark.Worker${pathSeparator}${framework}${pathSeparator}${runtimeIdentifier}"
echo "##vso[task.setvariable variable=CURRENT_DOTNET_WORKER_DIR]$dotnetWorkerDir"
if ($env:AGENT_OS -eq 'Linux') {
chmod +x "${dotnetWorkerDir}${pathSeparator}Microsoft.Spark.Worker"
}
displayName: 'Setup Variables and Permissions'
- checkout: self
path: s$(PATH_SEPARATOR)dotnet-spark
- task: CopyFiles@2
displayName: Copy jars
inputs:
sourceFolder: $(ArtifactPath)$(PATH_SEPARATOR)Jars
contents: '**$(PATH_SEPARATOR)*.jar'
targetFolder: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark$(PATH_SEPARATOR)src$(PATH_SEPARATOR)scala
- task: PowerShell@2
condition: eq( variables['Agent.OS'], 'Windows_NT' )
displayName: Download Winutils.exe
inputs:
workingDirectory: $(Build.BinariesDirectory)
pwsh: true
targetType: inline
script: |
echo "Download Hadoop utils for Windows."
curl -k -L -o hadoop.zip https://github.com/steveloughran/winutils/releases/download/tag_2017-08-29-hadoop-2.8.1-native/hadoop-2.8.1.zip
unzip hadoop.zip
New-Item -ItemType Directory -Force -Path hadoop\bin
cp hadoop-2.8.1\winutils.exe hadoop\bin
- pwsh: |
echo "Downloading Spark ${{ test.version }}"
curl -k -L -o spark-${{ test.version }}.tgz https://archive.apache.org/dist/spark/spark-${{ test.version }}/spark-${{ test.version }}-bin-hadoop2.7.tgz
tar xzvf spark-${{ test.version }}.tgz
displayName: 'Download Spark Distro ${{ test.version }}'
workingDirectory: $(Build.BinariesDirectory)
- task: DotNetCoreCLI@2
displayName: 'E2E tests'
inputs:
command: test
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
arguments: '--configuration $(buildConfiguration) ${{ option.testOptions }}'
workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark
env:
HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
DOTNET_WORKER_DIR: $(CURRENT_DOTNET_WORKER_DIR)
# - pwsh: |
# echo "Downloading ${env:BACKWARD_COMPATIBLE_WORKER_URL}"
# curl -k -L -o Microsoft.Spark.Worker-${{ parameters.backwardCompatibleRelease }}.zip ${env:BACKWARD_COMPATIBLE_WORKER_URL}
# unzip Microsoft.Spark.Worker-${{ parameters.backwardCompatibleRelease }}.zip -d $([System.IO.Directory]::GetParent($env:BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR).FullName)
# if ($env:AGENT_OS -eq 'Linux') {
# chmod +x "${env:BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR}${env:PATH_SEPARATOR}Microsoft.Spark.Worker"
# }
# displayName: 'Setup Backward Compatible Microsoft Spark Worker ${{ parameters.backwardCompatibleRelease }}'
# workingDirectory: $(Build.BinariesDirectory)
# - task: DotNetCoreCLI@2
# displayName: 'E2E Backward Compatibility Tests'
# inputs:
# command: test
# projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
# arguments: '--configuration $(buildConfiguration) ${{ option.backwardCompatibleTestOptions }}'
# workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark
# env:
# HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
# SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
# DOTNET_WORKER_DIR: $(BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR)
- checkout: forwardCompatibleRelease
path: s$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}
# - task: Maven@3
# displayName: 'Maven build src for forward compatible release v${{ parameters.forwardCompatibleRelease }}'
# inputs:
# mavenPomFile: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}$(PATH_SEPARATOR)src$(PATH_SEPARATOR)scala$(PATH_SEPARATOR)pom.xml
# - task: DotNetCoreCLI@2
# displayName: 'E2E Forward Compatibility Tests'
# inputs:
# command: test
# projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
# arguments: '--configuration $(buildConfiguration) ${{ option.forwardCompatibleTestOptions }}'
# workingDirectory: $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}
# env:
# HADOOP_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
# SPARK_HOME: $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
# DOTNET_WORKER_DIR: $(CURRENT_DOTNET_WORKER_DIR)