Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(github): free disk space for github workflows #1577

Merged
merged 11 commits into from
Aug 8, 2023
Merged
70 changes: 70 additions & 0 deletions .github/workflows/free_disk_space.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

echo "=============================================================================="
echo "Freeing up disk space on Github workflows"
echo "=============================================================================="

echo "Before freeing, the space of each disk:"
df -h

echo "Listing 100 largest packages ..."
dpkg-query -Wf '${Installed-Size}\t${Package}\n' | sort -n | tail -n 100

echo "Removing large packages ..."
apt-get -s autoremove
apt-get remove -y openjdk-11-jre-headless

echo "After removing large packages, the space of each disk:"
df -h

echo "Listing directories ..."
mount
ls -lrt /
du -csh /*
du -csh /__e/*/*
du -csh /__t/*/*
du -csh /__w/*/*
ls -lrt /github
du -csh /github/*
du -csh /opt/*
du -csh /usr/local/*
du -csh /usr/local/lib/*
du -csh /usr/local/share/*
du -csh /usr/share/*
echo "AGENT_TOOLSDIRECTORY is $AGENT_TOOLSDIRECTORY"

echo "Removing large directories ..."
rm -rf /__t/CodeQL
rm -rf /__t/PyPy
rm -rf /__t/Python
rm -rf /__t/Ruby
rm -rf /__t/go
rm -rf /__t/node
rm -rf /opt/ghc
rm -rf /usr/local/.ghcup
rm -rf /usr/local/graalvm
rm -rf /usr/local/lib/android
rm -rf /usr/local/lib/node_modules
rm -rf /usr/local/share/boost
rm -rf /usr/local/share/chromium
rm -rf /usr/local/share/powershell
rm -rf /usr/share/dotnet

echo "After freeing, the space of each disk:"
df -h
67 changes: 61 additions & 6 deletions .github/workflows/lint_and_test_cpp.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ on:
env:
# Update the options to reduce the consumption of the disk space
ONEBOX_OPTS: disk_min_available_space_ratio=5
TEST_OPTS: throttle_test_medium_value_kb=10,throttle_test_large_value_kb=25
TEST_OPTS: disk_min_available_space_ratio=5,throttle_test_medium_value_kb=10,throttle_test_large_value_kb=25

jobs:
cpp_clang_format_linter:
name: Lint
Expand All @@ -70,6 +70,9 @@ jobs:
image: apache/pegasus:thirdparties-bin-ubuntu2204-${{ github.base_ref }}
steps:
- uses: actions/checkout@v3
- name: Free Disk Space (Ubuntu)
run: |
.github/workflows/free_disk_space.sh
- uses: dorny/paths-filter@v2
id: changes
with:
Expand Down Expand Up @@ -120,6 +123,9 @@ jobs:
path: |
/github/home/.ccache
key: release_ccache
- name: Free Disk Space (Ubuntu)
run: |
.github/workflows/free_disk_space.sh
- uses: dorny/paths-filter@v2
id: changes
with:
Expand All @@ -132,25 +138,36 @@ jobs:
- name: Unpack prebuilt third-parties
if: steps.changes.outputs.thirdparty == 'false'
run: |
rm -f /root/thirdparties-src.zip
unzip /root/thirdparties-bin.zip -d ./thirdparty
rm -f /root/thirdparties-bin.zip
find ./thirdparty -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
rm -rf ./thirdparty/hadoop-bin/share/doc
rm -rf ./thirdparty/zookeeper-bin/docs
- name: Rebuild third-parties
if: steps.changes.outputs.thirdparty == 'true'
working-directory: thirdparty
# Build thirdparties and leave some necessary libraries and source
run: |
rm -f /root/thirdparties-src.zip
mkdir build
cmake -DCMAKE_BUILD_TYPE=Release -DROCKSDB_PORTABLE=ON -B build/
cmake --build build/ -j $(nproc)
rm -rf build/Build build/Download/[a-y]* build/Source/[a-g]* build/Source/[i-q]* build/Source/[s-z]*
find ./ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
../scripts/download_hadoop.sh hadoop-bin
../scripts/download_zk.sh zookeeper-bin
rm -rf hadoop-bin/share/doc
rm -rf zookeeper-bin/docs
- name: Compilation
run: |
ccache -p
ccache -z
./run.sh build --test --skip_thirdparty -j $(nproc) -t release
ccache -s
- name: Clear Build Files
run: |
find ./build/latest/src/ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
- name: Pack Server
run: |
./run.sh pack_server
Expand All @@ -164,7 +181,6 @@ jobs:
mv thirdparty/hadoop-bin ./
mv thirdparty/zookeeper-bin ./
rm -rf thirdparty
find ./build/latest/src/ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
tar -zcvhf release__builder.tar build/latest/output build/latest/bin build/latest/src/server/test/config.ini hadoop-bin zookeeper-bin
- name: Upload Artifact
uses: actions/upload-artifact@v3
Expand Down Expand Up @@ -252,6 +268,9 @@ jobs:
path: |
/github/home/.ccache
key: asan_ccache
- name: Free Disk Space (Ubuntu)
run: |
.github/workflows/free_disk_space.sh
- uses: dorny/paths-filter@v2
id: changes
with:
Expand All @@ -264,31 +283,41 @@ jobs:
- name: Unpack prebuilt third-parties
if: steps.changes.outputs.thirdparty == 'false'
run: |
rm -f /root/thirdparties-src.zip
unzip /root/thirdparties-bin.zip -d ./thirdparty
rm -f /root/thirdparties-bin.zip
find ./thirdparty -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
rm -rf ./thirdparty/hadoop-bin/share/doc
rm -rf ./thirdparty/zookeeper-bin/docs
- name: Rebuild third-parties
if: steps.changes.outputs.thirdparty == 'true'
working-directory: thirdparty
# Build thirdparties and leave some necessary libraries and source
run: |
rm -f /root/thirdparties-src.zip
mkdir build
cmake -DCMAKE_BUILD_TYPE=Release -DROCKSDB_PORTABLE=ON -B build/
cmake --build build/ -j $(nproc)
rm -rf build/Build build/Download/[a-y]* build/Source/[a-g]* build/Source/[i-q]* build/Source/[s-z]*
find ./ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
../scripts/download_hadoop.sh hadoop-bin
../scripts/download_zk.sh zookeeper-bin
rm -rf hadoop-bin/share/doc
rm -rf zookeeper-bin/docs
- name: Compilation
run: |
ccache -p
ccache -z
./run.sh build --test --sanitizer address --skip_thirdparty --disable_gperf -j $(nproc)
ccache -s
- name: Clear Build Files
run: |
find ./build/latest/src/ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
- name: Tar files
run: |
mv thirdparty/hadoop-bin ./
mv thirdparty/zookeeper-bin ./
rm -rf thirdparty
find ./build/latest/src/ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
tar -zcvhf release_address_builder.tar build/latest/output build/latest/bin build/latest/src/server/test/config.ini hadoop-bin zookeeper-bin
- name: Upload Artifact
uses: actions/upload-artifact@v3
Expand Down Expand Up @@ -393,31 +422,41 @@ jobs:
# - name: Unpack prebuilt third-parties
# if: steps.changes.outputs.thirdparty == 'false'
# run: |
# rm -f /root/thirdparties-src.zip
# unzip /root/thirdparties-bin.zip -d ./thirdparty
# rm -f /root/thirdparties-bin.zip
# find ./thirdparty -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
# rm -rf ./thirdparty/hadoop-bin/share/doc
# rm -rf ./thirdparty/zookeeper-bin/docs
# - name: Rebuild third-parties
# if: steps.changes.outputs.thirdparty == 'true'
# working-directory: thirdparty
# # Build thirdparties and leave some necessary libraries and source
# run: |
# rm -f /root/thirdparties-src.zip
# mkdir build
# cmake -DCMAKE_BUILD_TYPE=Release -DROCKSDB_PORTABLE=ON -B build/
# cmake --build build/ -j $(nproc)
# rm -rf build/Build build/Download/[a-y]* build/Source/[a-g]* build/Source/[i-q]* build/Source/[s-z]*
# find ./ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
# ../scripts/download_hadoop.sh hadoop-bin
# ../scripts/download_zk.sh zookeeper-bin
# rm -rf hadoop-bin/share/doc
# rm -rf zookeeper-bin/docs
# - name: Compilation
# run: |
# ccache -p
# ccache -z
# ./run.sh build --test --sanitizer undefined --skip_thirdparty --disable_gperf -j $(nproc)
# ccache -s
# - name: Clear Build Files
# run: |
# find ./build/latest/src/ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
# - name: Tar files
# run: |
# mv thirdparty/hadoop-bin ./
# mv thirdparty/zookeeper-bin ./
# rm -rf thirdparty
# find ./build/latest/src/ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
# tar -zcvhf release_undefined_builder.tar build/latest/output build/latest/bin build/latest/src/server/test/config.ini hadoop-bin zookeeper-bin
# - name: Upload Artifact
# uses: actions/upload-artifact@v3
Expand Down Expand Up @@ -470,6 +509,9 @@ jobs:
# options: --cap-add=SYS_PTRACE
# steps:
# - uses: actions/checkout@v3
# - name: Free Disk Space (Ubuntu)
# run: |
# .github/workflows/free_disk_space.sh
# - name: Unpack prebuilt third-parties
# run: |
# unzip /root/thirdparties-bin.zip -d ./thirdparty
Expand Down Expand Up @@ -503,6 +545,9 @@ jobs:
path: |
/github/home/.ccache
key: jemalloc_ccache
- name: Free Disk Space (Ubuntu)
run: |
.github/workflows/free_disk_space.sh
- uses: dorny/paths-filter@v2
id: changes
with:
Expand All @@ -515,25 +560,36 @@ jobs:
- name: Unpack prebuilt third-parties
if: steps.changes.outputs.thirdparty == 'false'
run: |
rm -f /root/thirdparties-src.zip
unzip /root/thirdparties-bin.zip -d ./thirdparty
rm -f /root/thirdparties-bin.zip
find ./thirdparty -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
rm -rf ./thirdparty/hadoop-bin/share/doc
rm -rf ./thirdparty/zookeeper-bin/docs
- name: Rebuild third-parties
if: steps.changes.outputs.thirdparty == 'true'
working-directory: thirdparty
# Build thirdparties and leave some necessary libraries and source
run: |
rm -f /root/thirdparties-src.zip
mkdir build
cmake -DCMAKE_BUILD_TYPE=Release -DROCKSDB_PORTABLE=ON -DUSE_JEMALLOC=ON -B build/
cmake --build build/ -j $(nproc)
rm -rf build/Build build/Download/[a-y]* build/Source/[a-g]* build/Source/[i-q]* build/Source/[s-z]*
find ./ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
../scripts/download_hadoop.sh hadoop-bin
../scripts/download_zk.sh zookeeper-bin
rm -rf hadoop-bin/share/doc
rm -rf zookeeper-bin/docs
- name: Compilation
run: |
ccache -p
ccache -z
./run.sh build --test --skip_thirdparty -j $(nproc) -t release --use_jemalloc
ccache -s
- name: Clear Build Files
run: |
find ./build/latest/src/ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
- name: Pack Server
run: ./run.sh pack_server -j
- name: Pack Tools
Expand All @@ -543,7 +599,6 @@ jobs:
mv thirdparty/hadoop-bin ./
mv thirdparty/zookeeper-bin ./
rm -rf thirdparty
find ./build/latest/src/ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
tar -zcvhf release_jemalloc_builder.tar build/latest/output build/latest/bin build/latest/src/server/test/config.ini hadoop-bin zookeeper-bin
- name: Upload Artifact
uses: actions/upload-artifact@v3
Expand Down
1 change: 1 addition & 0 deletions src/common/test/config-test.ini
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ rpc_message_header_format = dsn
rpc_timeout_milliseconds = 5000

[replication]
disk_min_available_space_ratio = 10
cluster_name = master-cluster

[duplication-group]
Expand Down
21 changes: 20 additions & 1 deletion src/common/test/run.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/bin/sh
#!/bin/bash
# The MIT License (MIT)
#
# Copyright (c) 2015 Microsoft Corporation
Expand Down Expand Up @@ -31,6 +31,25 @@ exit_if_fail() {
fi
}

if [ -n ${TEST_OPTS} ]; then
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I‘m planning to remove the run.sh scripts for unit tests in a long term, this change may be a small block for that.
How about define the default value as

DSN_DEFINE_int32(replication,
                 disk_min_available_space_ratio,
#ifdef PEGASUS_UNIT_TEST
                 5,
#else
                 10,
#endif
                 "if disk available space ratio "

if [ ! -f ./config-test.ini ]; then
echo "./config-test.ini does not exists !"
exit 1
fi

OPTS=`echo ${TEST_OPTS} | xargs`
config_kvs=(${OPTS//,/ })
for config_kv in ${config_kvs[@]}; do
config_kv=`echo $config_kv | xargs`
kv=(${config_kv//=/ })
if [ ! ${#kv[*]} -eq 2 ]; then
echo "Invalid config kv !"
exit 1
fi
sed -i '/^\s*'"${kv[0]}"'/c '"${kv[0]}"' = '"${kv[1]}" ./config-test.ini
done
fi

./dsn_replication_common_test

exit_if_fail $? "run unit test failed"
1 change: 1 addition & 0 deletions src/meta/test/config-test.ini
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ only_move_primary = false
cold_backup_disabled = false

[replication]
disk_min_available_space_ratio = 10
cluster_name = master-cluster
duplication_enabled = true

Expand Down
21 changes: 20 additions & 1 deletion src/meta/test/run.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/bin/sh
#!/bin/bash
# The MIT License (MIT)
#
# Copyright (c) 2015 Microsoft Corporation
Expand Down Expand Up @@ -28,6 +28,25 @@ if [ -z "${REPORT_DIR}" ]; then
REPORT_DIR="."
fi

if [ -n ${TEST_OPTS} ]; then
if [ ! -f "./config-test.ini" ]; then
echo "./config-test.ini does not exists"
exit 1
fi

OPTS=`echo ${TEST_OPTS} | xargs`
config_kvs=(${OPTS//,/ })
for config_kv in ${config_kvs[@]}; do
config_kv=`echo $config_kv | xargs`
kv=(${config_kv//=/ })
if [ ! ${#kv[*]} -eq 2 ]; then
echo "Invalid config kv !"
exit 1
fi
sed -i '/^\s*'"${kv[0]}"'/c '"${kv[0]}"' = '"${kv[1]}" ./config-test.ini
done
fi

./clear.sh
output_xml="${REPORT_DIR}/dsn.meta.test.1.xml"
GTEST_OUTPUT="xml:${output_xml}" ./dsn.meta.test
Expand Down
1 change: 1 addition & 0 deletions src/replica/backup/test/config-test.ini
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ partitioned = true
name = replica_long

[replication]
disk_min_available_space_ratio = 10
cluster_name = master-cluster

[duplication-group]
Expand Down
Loading