forked from OKDP/jupyterlab-docker
-
Notifications
You must be signed in to change notification settings - Fork 0
85 lines (80 loc) · 2.46 KB
/
build-test-spark.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
name: Build, test, and push jupyter Spark images
on:
workflow_call:
inputs:
python_version:
description: Python version
required: true
type: string
spark_download_url:
description: Spark dist download url
required: true
type: string
spark_version:
description: Spark version
required: true
type: string
java_version:
description: Java version
required: true
type: string
scala_version:
description: Scala version
required: true
type: string
hadoop_version:
description: Hadoop version
required: true
type: string
python_dev_tag:
description: Tag to use for latest base images (foundation, minimal, etc)
required: true
type: string
spark_dev_tag:
description: Tag to use for latest pyspark images (pyspark, all-spark, etc)
required: true
type: string
registry:
description: The list of tags space separated values
required: true
type: string
runs-on:
description: GitHub Actions Runner image
required: true
type: string
secrets:
registry_username:
required: true
registry_token:
required: true
defaults:
run:
working-directory: ./docker-stacks
jobs:
pyspark:
uses: ./.github/workflows/docker-build-test-push-latest.yml
with:
parent-image: scipy-notebook:${{ inputs.python_dev_tag }}
image: pyspark-notebook:${{ inputs.spark_dev_tag }}
registry: ${{ inputs.registry }}
runs-on: ${{ inputs.runs-on }}
build-args:
spark_download_url=${{ inputs.spark_download_url }}
spark_version=${{ inputs.spark_version }}
openjdk_version=${{ inputs.java_version }}
scala_version=${{ inputs.scala_version }}
hadoop_version=${{ inputs.hadoop_version }}
secrets:
registry_username: ${{ secrets.registry_username }}
registry_token: ${{ secrets.registry_token }}
all-spark:
uses: ./.github/workflows/docker-build-test-push-latest.yml
with:
parent-image: pyspark-notebook:${{ inputs.spark_dev_tag }}
image: all-spark-notebook:${{ inputs.spark_dev_tag }}
registry: ${{ inputs.registry }}
runs-on: ${{ inputs.runs-on }}
secrets:
registry_username: ${{ secrets.registry_username }}
registry_token: ${{ secrets.registry_token }}
needs: [pyspark]