Skip to content

deploy-aws-prod

deploy-aws-prod #28

name: deploy-aws-prod
on:
push:
branches:
- main
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# run every 6 hours
schedule:
- cron: "0 */6 * * *"
concurrency:
group: production
cancel-in-progress: false
jobs:
aws_deploy_prod:
runs-on: ubuntu-latest
environment:
name: production
url: https://lemmyverse.net
steps:
# https://github.com/actions/toolkit/issues/946#issuecomment-1590016041
- name: root suid tar
run: sudo chown root:root /bin/tar && sudo chmod u+s /bin/tar
- uses: actions/checkout@v3
- name: Use Node.js 22.12.0
uses: actions/setup-node@v3
with:
node-version: 22.12.0
# download latest redis database
- name: get current hour for cache busting
id: cache-hour
run: echo "hour=$(date +'%Y-%m-%d-%H')" >>$GITHUB_OUTPUT
- name: Cache Redis Dump
id: cache-redis
uses: actions/cache@v3
env:
cache-name: cache-redis
with:
path: ./.redis/dump.rdb
key: cache-redis-${{ steps.cache-hour.outputs.hour }}
# download redis db dump from s3
- name: Download the Redis Dump
if: steps.cache-redis.outputs.cache-hit != 'true'
uses: keithweaver/aws-s3-github-action@v1.0.0
with:
command: cp
source: s3://${{ vars.BUILD_S3_BUCKET }}/checkpoint/dump.rdb
destination: ./.redis/dump.rdb
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws_region: ap-southeast-2
# start redis & check if it is running
- name: Start Redis
working-directory: ./crawler
run: docker compose -f "docker-compose.github.yaml" up -d redis
- working-directory: ./crawler
run: docker ps -a
- working-directory: ./crawler
run: docker compose -f "docker-compose.github.yaml" logs redis
# Install + Cache Crawler Dependencies
- name: Cache Node Modules | Crawler
id: cache-crawler-yarn
uses: actions/cache@v3
env:
cache-name: cache-crawler-yarn
with:
path: ./crawler/node_modules/
key: cache-crawler-yarn-${{ hashFiles('crawler/yarn.lock') }}
- name: Install Node Modules | Crawler
if: steps.cache-crawler-yarn.outputs.cache-hit != 'true'
run: yarn --frozen-lockfile
working-directory: ./crawler
# Run Crawler Output Script
- name: Run Health Script
run: yarn health
working-directory: ./crawler
- name: Run Output Script
run: yarn output
working-directory: ./crawler
# Install + Cache Frontend Dependencies
- name: Cache Node Modules | Frontend
id: cache-frontend-yarn
uses: actions/cache@v3
env:
cache-name: cache-frontend-yarn
with:
path: ./frontend/node_modules/
key: cache-frontend-yarn-${{ hashFiles('frontend/yarn.lock') }}
- name: Install Node Modules | Frontend
if: steps.cache-frontend-yarn.outputs.cache-hit != 'true'
run: yarn --frozen-lockfile
working-directory: ./frontend
- name: Build the Frontend
run: yarn build
working-directory: ./frontend
- name: Create CDK Config JSON
id: create-json
uses: jsdaniell/create-json@v1.2.2
with:
dir: ./cdk
name: "config.json"
json: ${{ vars.CONFIG_JSON }}
# Install + Cache CDK Dependencies
- name: Cache Node Modules | CDK
id: cache-cdk-yarn
uses: actions/cache@v3
env:
cache-name: cache-cdk-yarn
with:
path: ./cdk/node_modules/
key: cache-cdk-yarn-${{ hashFiles('cdk/yarn.lock') }}
- name: Install Node Modules | CDK
if: steps.cache-cdk-yarn.outputs.cache-hit != 'true'
run: yarn --frozen-lockfile
working-directory: ./cdk
- name: CDK Bootstrap
run: yarn bootstrap
working-directory: ./cdk
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: CDK Synth
run: yarn synth
working-directory: ./cdk
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: CDK Diff
run: yarn diff
working-directory: ./cdk
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: CDK Deploy
run: yarn deploy:ci
working-directory: ./cdk
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}