Skip to content

publish-pages

publish-pages #31

name: publish-pages
on:
# Runs on pushes targeting the default branch
push:
branches:
- main
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# run every 6 hours
schedule:
- cron: "0 */6 * * *"
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: pages
cancel-in-progress: false
jobs:
# download the redis dump from s3
# create the json files for the redis data
# upload the json files to github artifacts
publish_pages:
runs-on: ubuntu-latest
timeout-minutes: 10
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
steps:
# https://github.com/actions/toolkit/issues/946#issuecomment-1590016041
- name: root suid tar
run: sudo chown root:root /bin/tar && sudo chmod u+s /bin/tar
- uses: actions/checkout@v3
- name: Setup Pages
uses: actions/configure-pages@v3
- name: Use Node.js 22.12.0
uses: actions/setup-node@v3
with:
node-version: 22.12.0
- name: get current hour for cache busting
id: cache-hour
run: echo "hour=$(date +'%Y-%m-%d-%H')" >>$GITHUB_OUTPUT
- name: Cache Redis Dump
id: cache-redis
uses: actions/cache@v3
env:
cache-name: cache-redis
with:
path: ./.redis/dump.rdb
key: cache-redis-${{ steps.cache-hour.outputs.hour }}
# download redis db dump from s3
- name: Download the Redis Dump
if: steps.cache-redis.outputs.cache-hit != 'true'
uses: keithweaver/aws-s3-github-action@v1.0.0
with:
command: cp
source: s3://${{ vars.BUILD_S3_BUCKET }}/checkpoint/dump.rdb
destination: ./.redis/dump.rdb
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws_region: ap-southeast-2
# start redis & check if it is running
- name: Start Redis
working-directory: ./crawler
run: docker compose -f "docker-compose.github.yaml" up -d redis
- working-directory: ./crawler
run: docker ps -a
- working-directory: ./crawler
run: docker compose -f "docker-compose.github.yaml" logs redis
# Install + Cache Crawler Dependencies
- name: Cache Node Modules | Crawler
id: cache-crawler-yarn
uses: actions/cache@v3
env:
cache-name: cache-crawler-yarn
with:
path: ./crawler/node_modules/
key: cache-crawler-yarn-${{ hashFiles('crawler/yarn.lock') }}
- name: Install Node Modules | Crawler
if: steps.cache-crawler-yarn.outputs.cache-hit != 'true'
run: yarn --frozen-lockfile
working-directory: ./crawler
# Run Crawler Output Script
- name: Run Health Script
run: yarn health
working-directory: ./crawler
- name: Run Output Script
run: yarn output
working-directory: ./crawler
- name: copy the json files to github pages
working-directory: ./
run: cp -r ./frontend/public/data/ ./pages/public/
# build pages frontend
- name: Cache Node Modules | Pages
id: cache-pages-yarn
uses: actions/cache@v3
env:
cache-name: cache-pages-yarn
with:
path: ./pages/node_modules/
key: cache-pages-yarn-${{ hashFiles('pages/package-lock.json') }}
- name: Install Dependencies
if: steps.cache-pages-yarn.outputs.cache-hit != 'true'
run: yarn --frozen-lockfile
working-directory: ./pages
- name: Build the Frontend for Pages
run: yarn build
working-directory: ./pages
# upload pages
- name: upload github pages artifacts
uses: actions/upload-pages-artifact@v1
with:
path: ./pages/dist/
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2
# stop redis
- name: Stop Redis
working-directory: ./crawler
if: always()
run: docker compose -f "docker-compose.github.yaml" down