Skip to content

Commit

Permalink
ci: Skip DB ops during install completely on cache hit
Browse files Browse the repository at this point in the history
Follow up to #3488
  • Loading branch information
BYK committed Dec 31, 2024
1 parent d5b49a4 commit a89fd0d
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 29 deletions.
14 changes: 9 additions & 5 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,11 @@ jobs:
path: |
/var/lib/docker/volumes/sentry-postgres/_data
/var/lib/docker/volumes/sentry-clickhouse/_data
/var/lib/docker/volumes/sentry-kafka/_data
- name: Install ${{ env.LATEST_TAG }}
env:
SKIP_DB_MIGRATIONS: ${{ steps.restore_cache.outputs.cache-hit == 'true' && '1' || '0' }}
run: ./install.sh

- name: Prepare Docker Volume Caching
Expand All @@ -112,6 +115,7 @@ jobs:
path: |
/var/lib/docker/volumes/sentry-postgres/_data
/var/lib/docker/volumes/sentry-clickhouse/_data
/var/lib/docker/volumes/sentry-kafka/_data
- name: Checkout current ref
uses: actions/checkout@v4
Expand Down Expand Up @@ -192,13 +196,12 @@ jobs:
path: |
/var/lib/docker/volumes/sentry-postgres/_data
/var/lib/docker/volumes/sentry-clickhouse/_data
/var/lib/docker/volumes/sentry-kafka/_data
- name: Install self-hosted
uses: nick-fields/retry@v3
with:
timeout_minutes: 10
max_attempts: 3
command: ./install.sh
env:
SKIP_DB_MIGRATIONS: ${{ steps.restore_cache.outputs.cache-hit == 'true' && '1' || '0' }}
run: ./install.sh

- name: Prepare Docker Volume Caching
run: |
Expand All @@ -214,6 +217,7 @@ jobs:
path: |
/var/lib/docker/volumes/sentry-postgres/_data
/var/lib/docker/volumes/sentry-clickhouse/_data
/var/lib/docker/volumes/sentry-kafka/_data
- name: Integration Test
run: |
Expand Down
9 changes: 9 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ x-sentry-defaults: &sentry_defaults
<<: *depends_on-default
smtp:
<<: *depends_on-default
seaweed:
<<: *depends_on-default
snuba-api:
<<: *depends_on-default
symbolicator:
Expand Down Expand Up @@ -139,6 +141,7 @@ services:
kafka:
<<: *restart_policy
image: "confluentinc/cp-kafka:7.6.1"
user: root
environment:
# https://docs.confluent.io/platform/current/installation/docker/config-reference.html#cp-kakfa-example
KAFKA_PROCESS_ROLES: "broker,controller"
Expand Down Expand Up @@ -207,6 +210,11 @@ services:
interval: 10s
timeout: 10s
retries: 30
seaweed:
image: chrislusf/seaweedfs:3.80
command: ['server', '-s3']
volumes:
- "sentry-seaweed:/data"
snuba-api:
<<: *snuba_defaults
# Kafka consumer responsible for feeding events into Clickhouse
Expand Down Expand Up @@ -540,3 +548,4 @@ volumes:
sentry-kafka-log:
sentry-smtp-log:
sentry-clickhouse-log:
sentry-seaweed:
51 changes: 27 additions & 24 deletions install/set-up-and-migrate-database.sh
Original file line number Diff line number Diff line change
@@ -1,33 +1,36 @@
echo "${_group}Setting up / migrating database ..."

# Fixes https://github.com/getsentry/self-hosted/issues/2758, where a migration fails due to indexing issue
$dc up --wait postgres
if [[ -n "${SKIP_DB_MIGRATIONS:-}" ]]; then
# Fixes https://github.com/getsentry/self-hosted/issues/2758, where a migration fails due to indexing issue
$dc up --wait postgres

os=$($dc exec postgres cat /etc/os-release | grep 'ID=debian')
if [[ -z $os ]]; then
echo "Postgres image debian check failed, exiting..."
exit 1
fi
os=$($dc exec postgres cat /etc/os-release | grep 'ID=debian')
if [[ -z $os ]]; then
echo "Postgres image debian check failed, exiting..."
exit 1
fi

# Using django ORM to provide broader support for users with external databases
$dcr web shell -c "
from django.db import connection
# Using django ORM to provide broader support for users with external databases
$dcr web shell -c "
from django.db import connection
with connection.cursor() as cursor:
cursor.execute('ALTER TABLE IF EXISTS sentry_groupedmessage DROP CONSTRAINT IF EXISTS sentry_groupedmessage_project_id_id_515aaa7e_uniq;')
cursor.execute('DROP INDEX IF EXISTS sentry_groupedmessage_project_id_id_515aaa7e_uniq;')
"
with connection.cursor() as cursor:
cursor.execute('ALTER TABLE IF EXISTS sentry_groupedmessage DROP CONSTRAINT IF EXISTS sentry_groupedmessage_project_id_id_515aaa7e_uniq;')
cursor.execute('DROP INDEX IF EXISTS sentry_groupedmessage_project_id_id_515aaa7e_uniq;')
"

if [[ -n "${CI:-}" || "${SKIP_USER_CREATION:-0}" == 1 ]]; then
$dcr web upgrade --noinput --create-kafka-topics
echo ""
echo "Did not prompt for user creation. Run the following command to create one"
echo "yourself (recommended):"
echo ""
echo " $dc_base run --rm web createuser"
echo ""
if [[ -n "${CI:-}" || "${SKIP_USER_CREATION:-0}" == 1 ]]; then
$dcr web upgrade --noinput --create-kafka-topics
echo ""
echo "Did not prompt for user creation. Run the following command to create one"
echo "yourself (recommended):"
echo ""
echo " $dc_base run --rm web createuser"
echo ""
else
$dcr web upgrade --create-kafka-topics
fi
else
$dcr web upgrade --create-kafka-topics
echo "Skipped DB migrations due to SKIP_DB_MIGRATIONS=$SKIP_DB_MIGRATIONS"
fi

echo "${_endgroup}"

0 comments on commit a89fd0d

Please sign in to comment.