Skip to content

Automate crawler

Automate crawler #6

Workflow file for this run

name: Algolia Crawler
on:
#push:
#branches: [ current, automate-crawler ]
pull_request:
types: ['opened', 'edited', 'reopened', 'synchronize']
jobs:
algolia_recrawl:
# if: ${{ github.event.label.name == 'trigger-crawl' }}
# if: github.event.pull_request.merged == true
name: Trigger Algolia Crawl
runs-on: ubuntu-latest
steps:
# Checkout repo
- name: Checkout Repo
uses: actions/checkout@v2
- name: Dump GitHub context
env:
EVENT_CONTEXT: ${{ toJson(github.event.label) }}
run: |
echo "$EVENT_CONTEXT"
# Wait for deploy URL to be available from Vercel
# - name: Get deployment URL
# id: deployment
# uses: dorshinar/get-deployment-url@master
# timeout-minutes: 15
# with:
# token: ${{ github.token }}
# # Check for deploy URL every 20 seconds
# retryInterval: '20000'
# Once deploy URL is found, trigger Algolia crawl
#- name: Run Algolia Crawler
# uses: algolia/algoliasearch-crawler-github-actions@v1
# id: crawler_push
# with:
# crawler-user-id: ${{ secrets.CRAWLER_USER_ID }}
# crawler-api-key: ${{ secrets.CRAWLER_API_KEY }}
# algolia-app-id: ${{ secrets.ALGOLIA_APP_ID }}
# algolia-api-key: ${{ secrets.ALGOLIA_API_KEY }}
# site-url: 'https://docs.getdbt.com'
# crawler-name: ${{ secrets.CRAWLER_NAME }}