Skip to content

Commit

Permalink
ci: Test dockerfile before push
Browse files Browse the repository at this point in the history
https://docs.docker.com/build/ci/github-actions/test-before-push/

- Modify docker action by separating the building and pushing of both `ubi-no_model` and `no_model` docker images into separate steps.
- Only maintain `linux/amd64` platform support during the build stage.
- Optimize the Docker image testing process by directly using the build output.
- Add support for both `linux/amd64` and `linux/arm64` platforms during the push stage.

Signed-off-by: 陳鈞 <jim60105@gmail.com>
  • Loading branch information
jim60105 committed Jan 8, 2024
1 parent bbf5df2 commit 56a9fb2
Showing 1 changed file with 32 additions and 13 deletions.
45 changes: 32 additions & 13 deletions .github/workflows/docker_publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,22 +32,20 @@ jobs:
token: ${{ secrets.CR_PAT }}
tag: ubi-no_model

- name: Build and push:ubi-no_model
- name: Build:ubi-no_model
uses: docker/build-push-action@v4
id: build
with:
context: .
file: ./ubi.Dockerfile
push: true
load: true
target: no_model
tags: ${{ steps.setup.outputs.tags }}
labels: ${{ steps.setup.outputs.labels }}
platforms: linux/amd64, linux/arm64
platforms: linux/amd64

- name: Test ubi-no_model docker image
run: |
docker run --group-add 0 -v ".:/app" ghcr.io/jim60105/whisperx:ubi-no_model@${{ steps.build.outputs.digest }} -- --model base --language en --device cpu --compute_type int8 --output_format srt .github/workflows/test/en.webm;
docker run --group-add 0 -v ".:/app" ${{ steps.build.outputs.imageid }} -- --model base --language en --device cpu --compute_type int8 --output_format srt .github/workflows/test/en.webm;
if [ ! -f en.srt ]; then
echo "The en.srt file does not exist"
exit 1
Expand All @@ -60,12 +58,23 @@ jobs:
fi
echo "Test passed."
- name: Build and push:ubi-no_model
uses: docker/build-push-action@v4
with:
context: .
file: ./ubi.Dockerfile
push: true
target: no_model
tags: ${{ steps.setup.outputs.tags }}
labels: ${{ steps.setup.outputs.labels }}
platforms: linux/amd64, linux/arm64

# Run the no_model build first ensure that the code at least builds
docker-no_model:
# The type of runner that the job will run on
runs-on: ubuntu-latest
outputs:
digest: ${{ steps.build.outputs.digest }}
digest: ${{ steps.publish.outputs.digest }}
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- name: Checkout
Expand All @@ -79,24 +88,22 @@ jobs:
with:
token: ${{ secrets.CR_PAT }}

- name: Build and push:no_model
- name: Build:no_model
uses: docker/build-push-action@v4
id: build
with:
context: .
file: ./Dockerfile
push: true
load: true
target: no_model
tags: ${{ steps.setup.outputs.tags }}
labels: ${{ steps.setup.outputs.labels }}
platforms: linux/amd64, linux/arm64
platforms: linux/amd64
cache-from: type=gha
cache-to: type=gha,mode=max

- name: Test no_model docker image
run: |
docker run --group-add 0 -v ".:/app" ghcr.io/jim60105/whisperx:no_model@${{ steps.build.outputs.digest }} -- --model base --language en --device cpu --compute_type int8 --output_format srt .github/workflows/test/en.webm;
docker run --group-add 0 -v ".:/app" ${{ steps.build.outputs.imageid }} -- --model base --language en --device cpu --compute_type int8 --output_format srt .github/workflows/test/en.webm;
if [ ! -f en.srt ]; then
echo "The en.srt file does not exist"
exit 1
Expand All @@ -109,12 +116,24 @@ jobs:
fi
echo "Test passed."
- name: Build and push:no_model
uses: docker/build-push-action@v4
id: publish
with:
context: .
file: ./Dockerfile
push: true
target: no_model
tags: ${{ steps.setup.outputs.tags }}
labels: ${{ steps.setup.outputs.labels }}
platforms: linux/amd64, linux/arm64
cache-from: type=gha
cache-to: type=gha,mode=max

# Download whisper model cache
docker-cache:
# The type of runner that the job will run on
runs-on: ubuntu-latest
outputs:
digest: ${{ steps.build.outputs.digest }}
strategy:
fail-fast: true
matrix:
Expand Down

0 comments on commit 56a9fb2

Please sign in to comment.