-
Notifications
You must be signed in to change notification settings - Fork 1
124 lines (110 loc) · 4.02 KB
/
zap_export.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
name: ZAP - Weekly Export from CRM
on:
schedule:
- cron: 0 0 * * 1
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-22.04
defaults:
run:
shell: bash
working-directory: products/zap-opendata
container:
image: nycplanning/build-base:latest
env:
EDM_DATA_ZAP_SCHEMA: weekly_export_${{ github.event_name }}_${{ github.ref_name }}
RECIPES_BUCKET: edm-recipes
strategy:
fail-fast: false
max-parallel: 1
matrix:
entity:
- dcp_projectactions
- dcp_projectmilestones
- dcp_projectactionbbls
- dcp_communityboarddispositions
- dcp_dcpprojectteams
open:
- false
include:
- entity: "dcp_projects"
open: true
- entity: "dcp_projectbbls"
open: true
steps:
- uses: actions/checkout@v4
- name: Load Secrets
uses: 1password/load-secrets-action@v1
with:
export-env: true
env:
OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }}
AWS_S3_ENDPOINT: "op://Data Engineering/DO_keys/AWS_S3_ENDPOINT"
AWS_SECRET_ACCESS_KEY: "op://Data Engineering/DO_keys/AWS_SECRET_ACCESS_KEY"
AWS_ACCESS_KEY_ID: "op://Data Engineering/DO_keys/AWS_ACCESS_KEY_ID"
ZAP_DOMAIN: "op://Data Engineering/ZAP_CRM/DOMAIN"
TENANT_ID: "op://Data Engineering/ZAP_CRM/TENANT_ID"
CLIENT_ID: "op://Data Engineering/ZAP_CRM/CLIENT_ID"
SECRET: "op://Data Engineering/ZAP_CRM/SECRET"
BUILD_ENGINE_SERVER: "op://Data Engineering/EDM_DATA/server_url"
- name: Run Container Setup
working-directory: ./
run: ./bash/docker_container_setup.sh
- name: Set Version info
id: version
run: |
DATE=$(date +%Y%m%d)
echo "version=$DATE" >> "$GITHUB_OUTPUT"
# NOTE disabling all use of BigQuery
# - name: Set up Cloud SDK
# uses: google-github-actions/setup-gcloud@v0
# with:
# project_id: ${{ secrets.GCP_PROJECT_ID_DATA_ENGINEERING }}
# service_account_key: ${{ secrets.GCP_GCS_BQ_SA_KEY }}
# export_default_credentials: true
- name: Get ${{ matrix.entity }}
run: python3 -m src.runner ${{ matrix.entity }} $EDM_DATA_ZAP_SCHEMA
# NOTE disabling all use of BigQuery
# - name: Archive to BigQuery
# env:
# VERSION: ${{ steps.version.outputs.version }}
# run: ./zap.sh upload_bq ${{ matrix.entity }} $VERSION
# NOTE disabling all use of BigQuery
# - name: Archive recoded data to BigQuery
# env:
# VERSION: ${{ steps.version.outputs.version }}
# if: ${{ matrix.open }}
# run: |
# ./zap.sh upload_recoded_bq ${{ matrix.entity }} $VERSION
- name: Library Archive
run: |
library archive --name ${{ matrix.entity }} --path templates/${{ matrix.entity }}.yml \
--latest --s3
- name: Export to edm-private and edm-publishing
env:
VERSION: ${{ steps.version.outputs.version }}
run: |
echo "exporting ${{ matrix.entity }} with version $VERSION"
./zap.sh upload_crm_do ${{ matrix.entity }} $VERSION
if ${{ matrix.open }};
then
./zap.sh upload_internal_do ${{ matrix.entity }} $VERSION
./zap.sh upload_visible_do ${{ matrix.entity }} $VERSION
fi
create_issue_on_failure:
needs: build
runs-on: ubuntu-22.04
if: ${{ failure() && (github.event_name == 'schedule') }}
steps:
- uses: actions/checkout@v4
with:
sparse-checkout: .github
- name: Create issue on failure
uses: JasonEtco/create-an-issue@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ACTION: ${{ github.workflow }}
BUILD_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
with:
filename: .github/ISSUE_TEMPLATE/scheduled_action_failure.md