forked from ROCm/hipBLAS
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Jenkinsfile
610 lines (524 loc) · 23.1 KB
/
Jenkinsfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
#!/usr/bin/env groovy
// Generated from snippet generator 'properties; set job properties'
properties([buildDiscarder(logRotator(
artifactDaysToKeepStr: '',
artifactNumToKeepStr: '',
daysToKeepStr: '',
numToKeepStr: '10')),
disableConcurrentBuilds(),
// parameters([booleanParam( name: 'push_image_to_docker_hub', defaultValue: false, description: 'Push hipblas image to rocm docker-hub' )]),
[$class: 'CopyArtifactPermissionProperty', projectNames: '*']
])
////////////////////////////////////////////////////////////////////////
// -- AUXILLARY HELPER FUNCTIONS
// import hudson.FilePath;
import java.nio.file.Path;
////////////////////////////////////////////////////////////////////////
// Return build number of upstream job
@NonCPS
int get_upstream_build_num( )
{
def upstream_cause = currentBuild.rawBuild.getCause( hudson.model.Cause$UpstreamCause )
if( upstream_cause == null)
return 0
return upstream_cause.getUpstreamBuild()
}
////////////////////////////////////////////////////////////////////////
// Return project name of upstream job
@NonCPS
String get_upstream_build_project( )
{
def upstream_cause = currentBuild.rawBuild.getCause( hudson.model.Cause$UpstreamCause )
if( upstream_cause == null)
return null
return upstream_cause.getUpstreamProject()
}
////////////////////////////////////////////////////////////////////////
// Calculate the relative path between two sub-directories from a common root
@NonCPS
String g_relativize( String root_string, String rel_source, String rel_build )
{
Path root_path = new File( root_string ).toPath( )
Path path_src = root_path.resolve( rel_source )
Path path_build = root_path.resolve( rel_build )
return path_build.relativize( path_src ).toString( )
}
////////////////////////////////////////////////////////////////////////
// Construct the relative path of the build directory
void build_directory_rel( project_paths paths, compiler_data hcc_args )
{
// if( hcc_args.build_config.equalsIgnoreCase( 'release' ) )
// {
// paths.project_build_prefix = paths.build_prefix + '/' + paths.project_name + '/release';
// }
// else
// {
// paths.project_build_prefix = paths.build_prefix + '/' + paths.project_name + '/debug';
// }
paths.project_build_prefix = paths.build_prefix + '/' + paths.project_name;
}
////////////////////////////////////////////////////////////////////////
// Lots of images are created above; no apparent way to delete images:tags with docker global variable
def docker_clean_images( String org, String image_name )
{
// Check if any images exist first grepping for image names
int docker_images = sh( script: "docker images | grep \"${org}/${image_name}\"", returnStatus: true )
// The script returns a 0 for success (images were found )
if( docker_images == 0 )
{
// run bash script to clean images:tags after successful pushing
sh "docker images | grep \"${org}/${image_name}\" | awk '{print \$1 \":\" \$2}' | xargs docker rmi"
}
}
////////////////////////////////////////////////////////////////////////
// -- BUILD RELATED FUNCTIONS
////////////////////////////////////////////////////////////////////////
// Checkout source code, source dependencies and update version number numbers
// Returns a relative path to the directory where the source exists in the workspace
void checkout_and_version( project_paths paths )
{
paths.project_src_prefix = paths.src_prefix + '/' + paths.project_name
dir( paths.project_src_prefix )
{
// checkout hipblas
checkout([
$class: 'GitSCM',
branches: scm.branches,
doGenerateSubmoduleConfigurations: scm.doGenerateSubmoduleConfigurations,
extensions: scm.extensions + [[$class: 'CleanCheckout']],
userRemoteConfigs: scm.userRemoteConfigs
])
if( fileExists( 'CMakeLists.txt' ) )
{
def cmake_version_file = readFile( 'CMakeLists.txt' ).trim()
//echo "cmake_version_file:\n${cmake_version_file}"
cmake_version_file = cmake_version_file.replaceAll(/(\d+\.)(\d+\.)(\d+\.)\d+/, "\$1\$2\$3${env.BUILD_ID}")
//echo "cmake_version_file:\n${cmake_version_file}"
writeFile( file: 'CMakeLists.txt', text: cmake_version_file )
}
}
}
////////////////////////////////////////////////////////////////////////
// This creates the docker image that we use to build the project in
// The docker images contains all dependencies, including OS platform, to build
def docker_build_image( docker_data docker_args, project_paths paths )
{
String build_image_name = "build-hipblas-hip-artifactory"
def build_image = null
dir( paths.project_src_prefix )
{
def user_uid = sh( script: 'id -u', returnStdout: true ).trim()
// Docker 17.05 introduced the ability to use ARG values in FROM statements
// Docker inspect failing on FROM statements with ARG https://issues.jenkins-ci.org/browse/JENKINS-44836
// build_image = docker.build( "${paths.project_name}/${build_image_name}:latest", "--pull -f docker/${build_docker_file} --build-arg user_uid=${user_uid} --build-arg base_image=${from_image} ." )
// JENKINS-44836 workaround by using a bash script instead of docker.build()
sh "docker build -t ${paths.project_name}/${build_image_name}:latest -f docker/${docker_args.build_docker_file} ${docker_args.docker_build_args} --build-arg user_uid=${user_uid} --build-arg base_image=${docker_args.from_image} ."
build_image = docker.image( "${paths.project_name}/${build_image_name}:latest" )
}
return build_image
}
////////////////////////////////////////////////////////////////////////
// This encapsulates the cmake configure, build and package commands
// Leverages docker containers to encapsulate the build in a fixed environment
Boolean docker_build_inside_image( def build_image, compiler_data compiler_args, docker_data docker_args, project_paths paths )
{
// Construct a relative path from build directory to src directory; used to invoke cmake
String rel_path_to_src = g_relativize( pwd( ), paths.project_src_prefix, paths.project_build_prefix )
String build_type_postfix = null
if( compiler_args.build_config.equalsIgnoreCase( 'release' ) )
{
build_type_postfix = ""
}
else
{
build_type_postfix = "-d"
}
if( paths.project_name.equalsIgnoreCase( 'hipblas-ubuntu' ) || paths.project_name.equalsIgnoreCase( 'hipblas-hcc-ctu' ) )
{
String rocblas_archive_path='hcc-rocm-ubuntu';
// This invokes 'copy artifact plugin' to copy latest archive from rocblas project
step([$class: 'CopyArtifact', filter: "Release/${rocblas_archive_path}/*.deb",
fingerprintArtifacts: true, projectName: 'ROCmSoftwarePlatform/rocBLAS/develop', flatten: true,
selector: [$class: 'StatusBuildSelector', stable: false],
target: "${paths.project_build_prefix}" ])
}
else if( paths.project_name.equalsIgnoreCase( 'hipblas-fedora' ) )
{
String rocblas_archive_path='hcc-rocm-fedora';
// This invokes 'copy artifact plugin' to copy latest archive from rocblas project
step([$class: 'CopyArtifact', filter: "Release/${rocblas_archive_path}/*.rpm",
fingerprintArtifacts: true, projectName: 'ROCmSoftwarePlatform/rocBLAS/develop', flatten: true,
selector: [$class: 'StatusBuildSelector', stable: false],
target: "${paths.project_build_prefix}" ])
}
build_image.inside( docker_args.docker_run_args )
{
withEnv(["CXX=${compiler_args.compiler_path}", 'CLICOLOR_FORCE=1'])
{
// Build library & clients
sh """#!/usr/bin/env bash
set -x
cd ${paths.project_build_prefix}
${paths.build_command}
"""
}
stage( "Test ${compiler_args.compiler_name} ${compiler_args.build_config}" )
{
// Cap the maximum amount of testing to be a few hours; assume failure if the time limit is hit
timeout(time: 1, unit: 'HOURS')
{
sh """#!/usr/bin/env bash
set -x
cd ${paths.project_build_prefix}/build/release/clients/staging
./hipblas-test${build_type_postfix} --gtest_output=xml --gtest_color=yes
"""
junit "${paths.project_build_prefix}/build/release/clients/staging/*.xml"
}
String docker_context = "${compiler_args.build_config}/${compiler_args.compiler_name}"
if( compiler_args.compiler_name.toLowerCase( ).startsWith( 'hcc-' ) )
{
sh """#!/usr/bin/env bash
set -x
cd ${paths.project_build_prefix}/build/release
make package
"""
if( paths.project_name.equalsIgnoreCase( 'hipblas-ubuntu' ) )
{
sh """#!/usr/bin/env bash
set -x
rm -rf ${docker_context} && mkdir -p ${docker_context}
mv ${paths.project_build_prefix}/build/release/*.deb ${docker_context}
dpkg -c ${docker_context}/*.deb
"""
archiveArtifacts artifacts: "${docker_context}/hipblas-*.deb", fingerprint: true
}
else if( paths.project_name.equalsIgnoreCase( 'hipblas-fedora' ) )
{
sh """#!/usr/bin/env bash
set -x
rm -rf ${docker_context} && mkdir -p ${docker_context}
mv ${paths.project_build_prefix}/build/release/*.rpm ${docker_context}
# Temp rocblas mv because repo.radeon.com does not have rpms for rocblas
mv ${paths.project_build_prefix}/*.rpm ${docker_context}
rpm -qlp ${docker_context}/*.rpm
"""
archiveArtifacts artifacts: "${docker_context}/hipblas-*.rpm", fingerprint: true
}
}
}
}
return true
}
////////////////////////////////////////////////////////////////////////
// This builds a fresh docker image FROM a clean base image, with no build dependencies included
// Uploads the new docker image to internal artifactory
// String docker_upload_artifactory( String hcc_ver, String artifactory_org, String from_image, String hipblas_src_rel, String build_dir_rel )
String docker_upload_artifactory( compiler_data compiler_args, docker_data docker_args, project_paths hipblas_paths, String job_name )
{
def hipblas_install_image = null
String image_name = "hipblas-hip-${compiler_args.compiler_name}-ubuntu-16.04"
String docker_context = "${compiler_args.build_config}/${compiler_args.compiler_name}"
stage( "Install ${compiler_args.compiler_name} ${compiler_args.build_config}" )
{
// We copy the docker files into the bin directory where the .deb lives so that it's a clean build everytime
sh """#!/usr/bin/env bash
set -x
mkdir -p ${docker_context}
cp -r ${hipblas_paths.project_src_prefix}/docker/* ${docker_context}
"""
// Docker 17.05 introduced the ability to use ARG values in FROM statements
// Docker inspect failing on FROM statements with ARG https://issues.jenkins-ci.org/browse/JENKINS-44836
// hipblas_install_image = docker.build( "${job_name}/${image_name}:${env.BUILD_NUMBER}", "--pull -f ${build_dir_rel}/dockerfile-hipblas-ubuntu-16.04 --build-arg base_image=${from_image} ${build_dir_rel}" )
// JENKINS-44836 workaround by using a bash script instead of docker.build()
sh """docker build -t ${job_name}/${image_name} --pull -f ${docker_context}/${docker_args.install_docker_file} \
--build-arg base_image=${docker_args.from_image} ${docker_context}"""
hipblas_install_image = docker.image( "${job_name}/${image_name}" )
// NOTE: Don't push to artifactory yet, just test install package
// The connection to artifactory can fail sometimes, but this should not be treated as a build fail
// try
// {
// // Don't push pull requests to artifactory, these tend to accumulate over time
// if( env.BRANCH_NAME.toLowerCase( ).startsWith( 'pr-' ) )
// {
// println 'Pull Request (PR-xxx) detected; NOT pushing to artifactory'
// }
// else
// {
// docker.withRegistry('http://compute-artifactory:5001', 'artifactory-cred' )
// {
// hipblas_install_image.push( "${env.BUILD_NUMBER}" )
// hipblas_install_image.push( 'latest' )
// }
// }
// }
// catch( err )
// {
// currentBuild.result = 'SUCCESS'
// }
}
return image_name
}
////////////////////////////////////////////////////////////////////////
// Uploads the new docker image to the public docker-hub
def docker_upload_dockerhub( String local_org, String image_name, String remote_org )
{
stage( 'docker-hub' )
{
// Do not treat failures to push to docker-hub as a build fail
try
{
sh """#!/usr/bin/env bash
set -x
echo inside sh
docker tag ${local_org}/${image_name} ${remote_org}/${image_name}
"""
docker_hub_image = docker.image( "${remote_org}/${image_name}" )
docker.withRegistry('https://registry.hub.docker.com', 'docker-hub-cred' )
{
docker_hub_image.push( "${env.BUILD_NUMBER}" )
docker_hub_image.push( 'latest' )
}
}
catch( err )
{
currentBuild.result = 'SUCCESS'
}
}
}
////////////////////////////////////////////////////////////////////////
// hip_integration_testing
// This function sets up compilation and testing of HiP on a compiler downloaded from an upstream build
// Integration testing is centered around docker and constructing clean test environments every time
// NOTES: I have implemeneted integration testing 3 different ways, and I've come to the conclusion nothing is perfect
// 1. I've tried having HCC push the test compiler to artifactory, and having HiP download the test docker image from artifactory
// a. The act of uploading and downloading images from artifactory takes minutes
// b. There is no good way of deleting images from a repository. You have to use an arcane CURL command and I don't know how
// to keep the password secret. These test integration images are meant to be ephemeral.
// 2. I tried 'docker save' to export a docker image into a tarball, and transfering the image through 'copy artifacts plugin'
// a. The HCC docker image uncompressed is over 1GB
// b. Compressing the docker image takes even longer than uploading the image to artifactory
// 3. Download the HCC .deb and dockerfile through 'copy artifacts plugin'. Create a new HCC image on the fly
// a. There is inefficency in building a new ubuntu image and installing HCC twice (once in HCC build, once here)
// b. This solution doesn't scale when we start testing downstream libraries
// I've implemented solution #3 above, probably transitioning to #2 down the line (probably without compression)
String hip_integration_testing( String inside_args, String job, String build_config )
{
// Attempt to make unique docker image names for each build, to support concurrent builds
// Mangle docker org name with upstream build info
String testing_org_name = 'hip-test-' + get_upstream_build_project( ).replaceAll('/','-') + '-' + get_upstream_build_num( )
// Tag image name with this build number
String hip_test_image_name = "hip:${env.BUILD_NUMBER}"
def hipblas_integration_image = null
dir( 'integration-testing' )
{
deleteDir( )
// This invokes 'copy artifact plugin' to copy archived files from upstream build
step([$class: 'CopyArtifact', filter: 'archive/**/*.deb, docker/dockerfile-*',
fingerprintArtifacts: true, projectName: get_upstream_build_project( ), flatten: true,
selector: [$class: 'TriggeredBuildSelector', allowUpstreamDependencies: false, fallbackToLastSuccessful: false, upstreamFilterStrategy: 'UseGlobalSetting'],
target: '.' ])
docker.build( "${testing_org_name}/${hip_test_image_name}", "-f dockerfile-hip-ubuntu-16.04 ." )
}
// Checkout source code, dependencies and version files
String hipblas_src_rel = checkout_and_version( job )
// Conctruct a binary directory path based on build config
String hipblas_bin_rel = build_directory_rel( build_config );
// Build hipblas inside of the build environment
hipblas_integration_image = docker_build_image( job, testing_org_name, '', hipblas_src_rel, "${testing_org_name}/${hip_test_image_name}" )
docker_build_inside_image( hipblas_integration_image, inside_args, job, '', build_config, hipblas_src_rel, hipblas_bin_rel )
docker_clean_images( testing_org_name, '*' )
}
// Docker related variables gathered together to reduce parameter bloat on function calls
class docker_data implements Serializable
{
String from_image
String build_docker_file
String install_docker_file
String docker_run_args
String docker_build_args
}
// Docker related variables gathered together to reduce parameter bloat on function calls
class compiler_data implements Serializable
{
String compiler_name
String build_config
String compiler_path
}
// Paths variables bundled together to reduce parameter bloat on function calls
class project_paths implements Serializable
{
String project_name
String src_prefix
String project_src_prefix
String build_prefix
String project_build_prefix
String build_command
}
////////////////////////////////////////////////////////////////////////
// -- MAIN
// Following this line is the start of MAIN of this Jenkinsfile
// This defines a common build pipeline used by most targets
def build_pipeline( compiler_data compiler_args, docker_data docker_args, project_paths hipblas_paths, def docker_inside_closure )
{
ansiColor( 'vga' )
{
// NOTE: build_succeeded does not appear to be local to each function invokation. I couldn't use it where each
// node had a different success value.
def build_succeeded = false;
stage( "Build ${compiler_args.compiler_name} ${compiler_args.build_config}" )
{
// Checkout source code, dependencies and version files
checkout_and_version( hipblas_paths )
// Conctruct a binary directory path based on build config
build_directory_rel( hipblas_paths, compiler_args );
// Create/reuse a docker image that represents the hipblas build environment
def hipblas_build_image = docker_build_image( docker_args, hipblas_paths )
// Print system information for the log
hipblas_build_image.inside( docker_args.docker_run_args, docker_inside_closure )
// Build hipblas inside of the build environment
build_succeeded = docker_build_inside_image( hipblas_build_image, compiler_args, docker_args, hipblas_paths )
}
// After a successful build, upload a docker image of the results
String job_name = env.JOB_NAME.toLowerCase( )
String hipblas_image_name = docker_upload_artifactory( compiler_args, docker_args, hipblas_paths, job_name )
// if( params.push_image_to_docker_hub )
// {
// docker_upload_dockerhub( job_name, hipblas_image_name, 'rocm' )
// docker_clean_images( 'rocm', hipblas_image_name )
// }
docker_clean_images( job_name, hipblas_image_name )
}
}
// Disabling hcc-ctu builds as we now build hipblas with native host compilers
//hcc_ctu:
parallel hcc_ctu:
{
try
{
node( 'docker && gfx900' )
{
def docker_args = new docker_data(
from_image:'compute-artifactory:5001/rocm-developer-tools/hip/master/hip-hcc-ctu-ubuntu-16.04:latest',
build_docker_file:'dockerfile-build-ubuntu',
install_docker_file:'dockerfile-install-ubuntu',
docker_run_args:'--device=/dev/kfd --device=/dev/dri --group-add=video',
docker_build_args:' --pull' )
def compiler_args = new compiler_data(
compiler_name:'hcc-ctu',
build_config:'Release',
compiler_path:'/opt/rocm/bin/hcc' )
def hipblas_paths = new project_paths(
project_name:'hipblas-hcc-ctu',
src_prefix:'src',
build_prefix:'src',
build_command: 'dpkg -x rocblas-*.deb ~/install_dir; ./install.sh -c -p ~/install_dir/opt/rocm' )
def print_version_closure = {
sh """
set -x
/opt/rocm/bin/hcc --version
"""
}
build_pipeline( compiler_args, docker_args, hipblas_paths, print_version_closure )
}
}
catch( err )
{
currentBuild.result = 'UNSTABLE'
}
},
rocm_ubuntu:
{
node( 'docker && rocm && gfx900' )
{
def hcc_docker_args = new docker_data(
from_image:'rocm/dev-ubuntu-16.04:1.7.1',
build_docker_file:'dockerfile-build-ubuntu',
install_docker_file:'dockerfile-install-ubuntu',
docker_run_args:'--device=/dev/kfd --device=/dev/dri --group-add=video',
docker_build_args:' --pull' )
def hcc_compiler_args = new compiler_data(
compiler_name:'hcc-rocm-ubuntu',
build_config:'Release',
compiler_path:'g++' )
def hipblas_paths = new project_paths(
project_name:'hipblas-ubuntu',
src_prefix:'src',
build_prefix:'src',
build_command: 'dpkg -x rocblas-*.deb ~/install_dir; ./install.sh -c -p ~/install_dir/opt/rocm' )
def print_version_closure = {
sh """
set -x
/opt/rocm/bin/rocm_agent_enumerator -t ALL
/opt/rocm/bin/hcc --version
"""
}
build_pipeline( hcc_compiler_args, hcc_docker_args, hipblas_paths, print_version_closure )
}
},
// rocm_fedora:
// {
// node( 'docker && rocm' )
// {
// def hcc_docker_args = new docker_data(
// from_image:'rocm/dev-fedora-24:latest',
// build_docker_file:'dockerfile-build-fedora',
// install_docker_file:'dockerfile-install-fedora',
// docker_run_args:'--device=/dev/kfd',
// docker_build_args:' --pull' )
// def hcc_compiler_args = new compiler_data(
// compiler_name:'hcc-rocm-fedora',
// build_config:'Release',
// compiler_path:'g++' )
// def hipblas_paths = new project_paths(
// project_name:'hipblas-fedora',
// src_prefix:'src',
// build_prefix:'src',
// build_command: 'sudo rpm -iv rocblas-*.rpm; ./install.sh -c' )
// // build_command: 'sudo dnf install -y rocblas-*.rpm; ./install.sh -c' )
// def print_version_closure = {
// sh """
// set -x
// /opt/rocm/bin/rocm_agent_enumerator -t ALL
// /opt/rocm/bin/hcc --version
// """
// }
// build_pipeline( hcc_compiler_args, hcc_docker_args, hipblas_paths, print_version_closure )
// }
// },
nvcc:
{
try
{
node( 'docker && cuda' )
{
def hcc_docker_args = new docker_data(
from_image:'nvidia/cuda:9.0-devel',
build_docker_file:'dockerfile-build-nvidia-cuda',
install_docker_file:'dockerfile-install-nvidia-cuda',
docker_run_args:'--runtime=nvidia',
docker_build_args:' --pull' )
def hcc_compiler_args = new compiler_data(
compiler_name:'nvcc-9.0',
build_config:'Release',
compiler_path:'g++' )
def hipblas_paths = new project_paths(
project_name:'hipblas-cuda',
src_prefix:'src',
build_prefix:'src',
build_command: './install.sh -c' )
// build_command: 'sudo apt-get install -y cuda-cublas-dev-9-*; ./install.sh -c' )
def print_version_closure = {
sh """
set -x
nvidia-smi
nvcc --version
"""
}
build_pipeline( hcc_compiler_args, hcc_docker_args, hipblas_paths, print_version_closure )
}
}
catch( err )
{
currentBuild.result = 'UNSTABLE'
}
}