forked from timescale/timescaledb
-
Notifications
You must be signed in to change notification settings - Fork 0
312 lines (279 loc) · 11.5 KB
/
linux-build-and-test.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
name: Regression
"on":
schedule:
# run daily 0:00 on main branch
# Since we use the date as a part of the cache key to ensure no
# stale cache entries hiding build failures we need to make sure
# we have a cache entry present before workflows that depend on cache
# are run.
- cron: '0 0 * * *'
push:
branches:
- main
- prerelease_test
paths-ignore:
- '**.md'
- 'LICENSE*'
- NOTICE
pull_request:
paths-ignore:
- '**.md'
- 'LICENSE*'
- NOTICE
jobs:
matrixbuilder:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- name: Checkout source code
uses: actions/checkout@v4
- name: Build matrix
id: set-matrix
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]] ;
then
git fetch origin ${{ github.base_ref }}:base
.github/gh_matrix_builder.py ${{ github.event_name }} base
else
.github/gh_matrix_builder.py ${{ github.event_name }}
fi
regress:
# Change the JOB_NAME variable below when changing the name.
name: PG${{ matrix.pg }}${{ matrix.snapshot }} ${{ matrix.name }} ${{ matrix.os }}
needs: matrixbuilder
runs-on: ${{ matrix.os }}
env:
PG_SRC_DIR: pgbuild
PG_INSTALL_DIR: postgresql
CLANG: ${{ matrix.clang }}
CC: ${{ matrix.cc }}
CXX: ${{ matrix.cxx }}
strategy:
matrix: ${{ fromJson(needs.matrixbuilder.outputs.matrix) }}
fail-fast: false
steps:
- name: Install Linux Dependencies
if: runner.os == 'Linux'
run: |
# Don't add ddebs here because the ddebs mirror is always 503 Service Unavailable.
# If needed, install them before opening the core dump.
sudo apt-get update
sudo apt-get install flex bison lcov systemd-coredump gdb libipc-run-perl \
libtest-most-perl pkgconf icu-devtools ${{ matrix.extra_packages }}
- name: Install macOS Dependencies
if: runner.os == 'macOS'
run: |
# Disable the automatic dependency upgrade executed by `brew install`
# https://docs.brew.sh/Manpage#install-options-formulacask-
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 brew install gawk
# Install perl modules after last Homebew call, since Homebrew can change the perl version
sudo perl -MCPAN -e "CPAN::Shell->notest('install', 'IPC::Run')"
sudo perl -MCPAN -e "CPAN::Shell->notest('install', 'Test::Most')"
- name: Setup macOS coredump directory
if: runner.os == 'macOS'
run: sudo chmod 777 /cores
- name: Checkout TimescaleDB
uses: actions/checkout@v4
# We are going to rebuild Postgres daily, so that it doesn't suddenly break
# ages after the original problem.
- name: Get date for build caching
id: get-date
run: |
echo "date=$(date +"%d")" >> $GITHUB_OUTPUT
# on macOS the path used is depending on the runner version leading to cache failure
# when the runner version changes so we extract runner version from path and add it
# as cache suffix
- name: Cache suffix
if: runner.os == 'macOS'
run: echo "CACHE_SUFFIX=-${ImageVersion}" >> $GITHUB_ENV
# we cache the build directory instead of the install directory here
# because extension installation will write files to install directory
# leading to a tainted cache
- name: Cache PostgreSQL ${{ matrix.pg }} ${{ matrix.build_type }}
id: cache-postgresql
if: matrix.snapshot != 'snapshot'
uses: actions/cache@v4
with:
path: ~/${{ env.PG_SRC_DIR }}
key: "${{ matrix.os }}-postgresql-${{ matrix.pg }}-${{ matrix.cc }}\
-${{ steps.get-date.outputs.date }}-${{ hashFiles('.github/**') }}${{ env.CACHE_SUFFIX }}"
- name: Build PostgreSQL ${{ matrix.pg }}${{ matrix.snapshot }}
if: steps.cache-postgresql.outputs.cache-hit != 'true'
run: |
if [ "${{ matrix.snapshot }}" = "snapshot" ]; then
wget -q -O postgresql.tar.bz2 \
https://ftp.postgresql.org/pub/snapshot/${{ matrix.pg }}/postgresql-${{ matrix.pg }}-snapshot.tar.bz2
else
wget -q -O postgresql.tar.bz2 \
https://ftp.postgresql.org/pub/source/v${{ matrix.pg }}/postgresql-${{ matrix.pg }}.tar.bz2
fi
mkdir -p ~/$PG_SRC_DIR
tar --extract --file postgresql.tar.bz2 --directory ~/$PG_SRC_DIR --strip-components 1
cd ~/$PG_SRC_DIR
./configure --prefix=$HOME/$PG_INSTALL_DIR --with-openssl \
--without-readline --without-zlib --without-libxml ${{ matrix.pg_extra_args }}
make -j $(nproc)
for ext in ${{ matrix.pg_extensions }}; do
make -j $(nproc) -C contrib/${ext}
done
- name: Install PostgreSQL ${{ matrix.pg }}
run: |
cd ~/$PG_SRC_DIR
make install
for ext in ${{ matrix.pg_extensions }}; do
make -C contrib/${ext} install
done
echo "$HOME/$PG_INSTALL_DIR/bin" >> "${GITHUB_PATH}"
- name: Upload config.log
if: always() && steps.cache-postgresql.outputs.cache-hit != 'true'
uses: actions/upload-artifact@v4
with:
name: config.log for PostgreSQL ${{ matrix.os }} ${{ matrix.name }} ${{ matrix.pg }}
path: ~/${{ env.PG_SRC_DIR }}/config.log
- name: Test telemetry without OpenSSL
if: github.event_name != 'pull_request' && runner.os == 'Linux' && matrix.build_type == 'Debug'
run: |
BUILD_DIR=nossl ./bootstrap -DCMAKE_BUILD_TYPE=Debug \
-DPG_SOURCE_DIR=~/$PG_SRC_DIR -DPG_PATH=~/$PG_INSTALL_DIR \
${{ matrix.tsdb_build_args }} -DCODECOVERAGE=${{ matrix.coverage }} -DUSE_OPENSSL=OFF \
-DTEST_PG_LOG_DIRECTORY="$(readlink -f .)"
make -j $(nproc) -C nossl
make -C nossl install
make -C nossl regresscheck TESTS=telemetry
- name: Build TimescaleDB
run: |
./bootstrap -DCMAKE_BUILD_TYPE="${{ matrix.build_type }}" \
-DPG_SOURCE_DIR=~/$PG_SRC_DIR -DPG_PATH=~/$PG_INSTALL_DIR \
${{ matrix.tsdb_build_args }} -DCODECOVERAGE=${{ matrix.coverage }} \
-DTEST_PG_LOG_DIRECTORY="$(readlink -f .)"
make -j $(nproc) -C build
make -C build install
- name: Check exported symbols
run: ./build/scripts/export_prefix_check.sh
- name: make installcheck
id: installcheck
run: |
set -o pipefail
make -k -C build installcheck IGNORES="${{ join(matrix.ignored_tests, ' ') }}" \
SKIPS="${{ join(matrix.skipped_tests, ' ') }}" ${{ matrix.installcheck_args }} \
| tee installcheck.log
- name: pginstallcheck
if: matrix.pginstallcheck
run: make -C build pginstallcheck
- name: coverage
if: matrix.coverage
run: make -j $(nproc) -k -C build coverage
- name: Send coverage report to Codecov.io app
if: matrix.coverage
uses: codecov/codecov-action@v4
with:
file: ./build/codecov/timescaledb-codecov.info
- name: Save LCOV coverage report
if: matrix.coverage
uses: actions/upload-artifact@v4
with:
name: LCOV coverage report ${{ matrix.os }} ${{ matrix.name }} ${{ matrix.pg }}
path: ./build/codecov/codecov-report
- name: Show regression diffs
if: always()
id: collectlogs
run: |
find . -name regression.diffs -exec cat {} + > regression.log
if [[ "${{ runner.os }}" == "Linux" ]] ; then
# wait in case there are in-progress coredumps
sleep 10
if coredumpctl -q list >/dev/null; then echo "coredumps=true" >>$GITHUB_OUTPUT; fi
# print OOM killer information
sudo journalctl --system -q --facility=kern --grep "Killed process" || true
elif [[ "${{ runner.os }}" == "macOS" ]] ; then
if [ $(find /cores -type f | wc -l) -gt 0 ]; then echo "coredumps=true" >>$GITHUB_OUTPUT; fi
fi
if [[ -s regression.log ]]; then echo "regression_diff=true" >>$GITHUB_OUTPUT; fi
grep -e 'FAILED' -e 'failed (ignored)' -e 'not ok' installcheck.log || true
cat regression.log
- name: Save regression diffs
if: always() && steps.collectlogs.outputs.regression_diff == 'true'
uses: actions/upload-artifact@v4
with:
name: Regression diff ${{ matrix.os }} ${{ matrix.name }} ${{ matrix.pg }}
path: |
regression.log
installcheck.log
- name: Save PostgreSQL log
if: always()
uses: actions/upload-artifact@v4
with:
name: PostgreSQL log ${{ matrix.os }} ${{ matrix.name }} ${{ matrix.pg }}
path: postmaster.*
- name: Stack trace Linux
if: always() && steps.collectlogs.outputs.coredumps == 'true' && runner.os == 'Linux'
run: |
sudo coredumpctl debug --debugger=gdb --debugger-arguments='' <<<"
set verbose on
set trace-commands on
show debug-file-directory
printf "'"'"query = '%s'\n\n"'"'", debug_query_string
bt full
# We try to find ExceptionalCondition frame to print the failed condition
# for searching in logs.
frame function ExceptionalCondition
printf "'"'"condition = '%s'\n"'"'", conditionName
# Hopefully now we should be around the failed assertion, print where
# we are.
up 1
list
info args
info locals
" 2>&1 | tee -a stacktrace.log
./scripts/bundle_coredumps.sh
- name: Stack trace macOS
if: always() && steps.collectlogs.outputs.coredumps == 'true' && runner.os == 'macOS'
run: |
~/$PG_SRC_DIR/src/tools/ci/cores_backtrace.sh macos /cores
- name: Coredumps
if: always() && steps.collectlogs.outputs.coredumps == 'true'
uses: actions/upload-artifact@v4
with:
name: Coredumps ${{ matrix.os }} ${{ matrix.name }} ${{ matrix.pg }}
path: coredumps
- name: Save stacktraces
if: always() && steps.collectlogs.outputs.coredumps == 'true'
uses: actions/upload-artifact@v4
with:
name: Stacktraces ${{ matrix.os }} ${{ matrix.name }} ${{ matrix.pg }}
path: stacktrace.log
- name: Save TAP test logs
if: always()
uses: actions/upload-artifact@v4
with:
name: TAP test logs ${{ matrix.os }} ${{ matrix.name }} ${{ matrix.pg }}
path: |
build/test/tmp_check/log
build/tsl/test/tmp_check/log
- name: Upload test results to the database
# Don't upload the results of the flaky check, because the db schema only
# supports running one test once per job.
if: always() && (! contains(matrix.name, 'Flaky'))
env:
# GitHub Actions allow you neither to use the env context for the job name,
# nor to access the job name from the step context, so we have to
# duplicate it to work around this nonsense.
JOB_NAME: PG${{ matrix.pg }}${{ matrix.snapshot }} ${{ matrix.name }} ${{ matrix.os }}
CI_STATS_DB: ${{ secrets.CI_STATS_DB }}
GITHUB_EVENT_NAME: ${{ github.event_name }}
GITHUB_REF_NAME: ${{ github.ref_name }}
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_RUN_ATTEMPT: ${{ github.run_attempt }}
GITHUB_RUN_ID: ${{ github.run_id }}
GITHUB_RUN_NUMBER: ${{ github.run_number }}
JOB_STATUS: ${{ job.status }}
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]] ;
then
GITHUB_PR_NUMBER="${{ github.event.number }}"
else
GITHUB_PR_NUMBER=0
fi
export GITHUB_PR_NUMBER
scripts/upload_ci_stats.sh