Skip to content

Commit

Permalink
Run build and tests directly on Ubuntu 20.04, 22.04 without docker (#…
Browse files Browse the repository at this point in the history
davidbolvansky authored Dec 17, 2022

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
1 parent 79bbe99 commit 9bfb5a1
Showing 12 changed files with 346 additions and 156 deletions.
22 changes: 14 additions & 8 deletions .github/workflows/ci-lint.yaml
Original file line number Diff line number Diff line change
@@ -8,7 +8,12 @@ on:

jobs:
p4c-lint:
runs-on: ubuntu-latest
strategy:
fail-fast: false
runs-on: ubuntu-20.04
env:
IMAGE_TYPE: test
CMAKE_ONLY: ON
steps:
- uses: actions/checkout@v3
with:
@@ -18,22 +23,23 @@ jobs:
- name: ccache
uses: hendrikmuhs/ccache-action@v1
with:
key: apply-linters
key: apply-linters-${{ runner.os }}
max-size: 1000M

# TODO: This check is disabled because git on the Github serves behaves differently. Unclear why.
# - name: Check submodule ref points.
# run: |
# ./tools/check-git-submodules.sh

- name: Build
- name: Build (Ubuntu 20.04)
run: |
docker build -t p4c --build-arg IMAGE_TYPE=test --build-arg CMAKE_ONLY=ON .
sudo cp -rf . /p4c/
(cd /p4c/ && sudo -E tools/ci-build.sh)
- name: Run cpplint.
run: |
docker run p4c make cpplint -C build
run: make cpplint -C build
working-directory: /p4c

- name: Run clang-format.
run: |
docker run p4c make clang-format -C build
run: make clang-format -C build
working-directory: /p4c
20 changes: 11 additions & 9 deletions .github/workflows/ci-p4tools.yml
Original file line number Diff line number Diff line change
@@ -16,9 +16,12 @@ jobs:
build-and-test-tools:
strategy:
fail-fast: false
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
env:
CTEST_PARALLEL_LEVEL: 4
IMAGE_TYPE: test
ENABLE_UNIFIED_COMPILATION: ON
ENABLE_TEST_TOOLS: ON
steps:
- uses: actions/checkout@v2
with:
@@ -30,13 +33,12 @@ jobs:
key: test-tools-${{ matrix.unified }}-${{ runner.os }}
max-size: 1000M

- name: Build (Ubuntu Linux)
- name: Build (Ubuntu 20.04)
run: |
docker build -t p4c --build-arg IMAGE_TYPE=test --build-arg ENABLE_UNIFIED_COMPILATION=ON --build-arg ENABLE_TEST_TOOLS=ON .
./tools/export_ccache.sh
sudo cp -rf . /p4c/
(cd /p4c/ && sudo -E tools/ci-build.sh)
sudo cp -rf /p4c/.ccache .
# run with sudo (...) --privileged
# this is needed to create network namespaces for the ebpf tests.
- name: Run tests (Ubuntu Linux)
run: |
sudo docker run --privileged -w /p4c/build -e $CTEST_PARALLEL_LEVEL p4c ctest -R testgen- -j2 --output-on-failure --schedule-random
- name: Run tests (Ubuntu 20.04)
run: sudo -E ctest -R testgen- --output-on-failure --schedule-random
working-directory: /p4c/build
2 changes: 1 addition & 1 deletion .github/workflows/ci-ptf-kernels-weekly.yml
Original file line number Diff line number Diff line change
@@ -59,7 +59,7 @@ jobs:
- name: ccache
uses: hendrikmuhs/ccache-action@v1
with:
key: test-${{ env.UNIFIED }}-${{ runner.os }}
key: test-ptf-kernels-${{ runner.os }}
max-size: 1000M

- name: Cache VM image
25 changes: 16 additions & 9 deletions .github/workflows/ci-ptf.yml
Original file line number Diff line number Diff line change
@@ -28,9 +28,15 @@ concurrency:

jobs:
ptf-linux:
strategy:
fail-fast: false
runs-on: ubuntu-20.04
env:
UNIFIED: ON
runs-on: ubuntu-latest
CTEST_PARALLEL_LEVEL: 4
IMAGE_TYPE: test
ENABLE_UNIFIED_COMPILATION: ON
MAKEFLAGS: -j8
INSTALL_PTF_EBPF_DEPENDENCIES: ON
steps:
- uses: actions/checkout@v2
with:
@@ -39,14 +45,15 @@ jobs:
- name: ccache
uses: hendrikmuhs/ccache-action@v1
with:
key: test-${{ env.UNIFIED }}-${{ runner.os }}
key: test-ptf-${{ runner.os }}
max-size: 1000M

- name: Build (Linux)
- name: Build (Ubuntu 20.04)
run: |
docker build --network host -t p4c --build-arg MAKEFLAGS=-j8 --build-arg IMAGE_TYPE=test --build-arg ENABLE_UNIFIED_COMPILATION=$UNIFIED --build-arg INSTALL_PTF_EBPF_DEPENDENCIES=ON .
./tools/export_ccache.sh
sudo cp -rf . /p4c/
(cd /p4c/ && sudo -E tools/ci-build.sh)
sudo cp -rf /p4c/.ccache .
- name: Run PTF tests for eBPF backend (Linux)
run: |
sudo docker run --privileged -v /sys/fs/bpf:/sys/fs/bpf -w /p4c/backends/ebpf/tests p4c ./test.sh
- name: Run PTF tests for eBPF backend (Ubuntu 20.04)
run: sudo -E ./test.sh
working-directory: /p4c/backends/ebpf/tests
15 changes: 9 additions & 6 deletions .github/workflows/ci-static-build-test.yml
Original file line number Diff line number Diff line change
@@ -17,9 +17,11 @@ jobs:
build-linux:
strategy:
fail-fast: false
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
env:
CTEST_PARALLEL_LEVEL: 4
IMAGE_TYPE: test
ENABLE_UNIFIED_COMPILATION: ON
BUILD_STATIC_RELEASE: ON
steps:
- uses: actions/checkout@v2
with:
@@ -28,10 +30,11 @@ jobs:
- name: ccache
uses: hendrikmuhs/ccache-action@v1
with:
key: static-${{ runner.os }}
key: test-static-${{ runner.os }}
max-size: 1000M

- name: Build (Linux)
- name: Build (Ubuntu 20.04)
run: |
docker build -t p4c --build-arg IMAGE_TYPE=test --build-arg ENABLE_UNIFIED_COMPILATION=ON --build-arg BUILD_STATIC_RELEASE=ON .
./tools/export_ccache.sh
sudo cp -rf . /p4c/
(cd /p4c/ && sudo -E tools/ci-build.sh)
sudo cp -rf /p4c/.ccache .
102 changes: 85 additions & 17 deletions .github/workflows/ci-test.yml
Original file line number Diff line number Diff line change
@@ -12,15 +12,48 @@ concurrency:
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}

jobs:
# Build with gcc and test p4c on Ubuntu 22.04.
test-ubuntu22:
strategy:
fail-fast: false
runs-on: ubuntu-22.04
env:
CTEST_PARALLEL_LEVEL: 4
IMAGE_TYPE: test
ENABLE_UNIFIED_COMPILATION: ON
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
fetch-depth: 0

- name: ccache
uses: hendrikmuhs/ccache-action@v1
with:
key: test-${{ runner.os }}-gcc
max-size: 1000M

- name: Build (Ubuntu 22.04, GCC)
run: |
sudo cp -rf . /p4c/
(cd /p4c/ && sudo -E tools/ci-build.sh)
sudo cp -rf /p4c/.ccache .
- name: Run tests (Ubuntu 22.04)
run: sudo -E ctest --output-on-failure --schedule-random
working-directory: /p4c/build

# Build with gcc and test p4c on Ubuntu 20.04.
test-linux:
test-ubuntu20:
strategy:
fail-fast: false
matrix:
unified: [ON, OFF]
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
env:
CTEST_PARALLEL_LEVEL: 4
IMAGE_TYPE: test
ENABLE_UNIFIED_COMPILATION: ${{ matrix.unified }}
steps:
- uses: actions/checkout@v2
with:
@@ -33,47 +66,82 @@ jobs:
key: test-${{ matrix.unified }}-${{ runner.os }}-gcc
max-size: 1000M

- name: Build (Ubuntu Linux, GCC)
- name: Build (Ubuntu 20.04, GCC)
run: |
docker build -t p4c --build-arg IMAGE_TYPE=test --build-arg ENABLE_UNIFIED_COMPILATION=${{ matrix.unified }} .
./tools/export_ccache.sh
sudo cp -rf . /p4c/
(cd /p4c/ && sudo -E tools/ci-build.sh)
sudo cp -rf /p4c/.ccache .
# run with sudo (...) --privileged
# this is needed to create network namespaces for the ebpf tests.
- name: Run tests (Ubuntu Linux)
run: |
sudo docker run --privileged -w /p4c/build -e $CTEST_PARALLEL_LEVEL p4c ctest --output-on-failure --schedule-random
- name: Run tests (Ubuntu 20.04)
run: sudo -E ctest --output-on-failure --schedule-random
working-directory: /p4c/build
if: matrix.unified == 'ON'

# Build with clang and test p4c on Ubuntu 20.04.
test-linux-clang-sanitizers:
test-ubuntu20-clang-sanitizers:
strategy:
fail-fast: false
runs-on: ubuntu-20.04
env:
CTEST_PARALLEL_LEVEL: 2
IMAGE_TYPE: test
ENABLE_UNIFIED_COMPILATION: ON
COMPILE_WITH_CLANG: ON
BUILD_AUTO_VAR_INIT_PATTERN: ON
ENABLE_SANITIZERS: ON
UBSAN_OPTIONS: print_stacktrace=1
ASAN_OPTIONS: print_stacktrace=1:detect_leaks=0
steps:
- uses: actions/checkout@v2
with:
submodules: recursive

- name: ccache
uses: hendrikmuhs/ccache-action@v1
with:
key: test-${{ runner.os }}-clang
max-size: 1000M

- name: Build (Ubuntu 20.04, Clang, Sanitizers)
run: |
sudo cp -rf . /p4c/
(cd /p4c/ && sudo -E tools/ci-build.sh)
sudo cp -rf /p4c/.ccache .
- name: Run tests (Ubuntu 20.04)
run: sudo -E ctest --output-on-failure --schedule-random
working-directory: /p4c/build

# Build with gcc and test p4c on Ubuntu 18.04.
test-ubuntu18:
strategy:
fail-fast: false
runs-on: ubuntu-latest
env:
CTEST_PARALLEL_LEVEL: 4
ENABLE_UNIFIED_COMPILATION: ON
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
fetch-depth: 0

- name: ccache
uses: hendrikmuhs/ccache-action@v1
with:
key: test-${{ runner.os }}-clang
key: test-ubuntu-18.04-gcc
max-size: 1000M

- name: Build (Ubuntu Linux, Clang, Sanitizers)
- name: Build (Ubuntu 18.04, GCC)
run: |
docker build -t p4c --build-arg IMAGE_TYPE=test --build-arg ENABLE_UNIFIED_COMPILATION=ON --build-arg COMPILE_WITH_CLANG=ON \
--build-arg BUILD_AUTO_VAR_INIT_PATTERN=ON --build-arg ENABLE_SANITIZERS=ON .
docker build -t p4c --build-arg BASE_IMAGE=ubuntu:18.04 --build-arg IMAGE_TYPE=test --build-arg ENABLE_UNIFIED_COMPILATION=ON .
./tools/export_ccache.sh
# run with sudo (...) --privileged
# this is needed to create network namespaces for the ebpf tests.
- name: Run tests (Ubuntu Linux)
- name: Run tests (Ubuntu 18.04)
run: |
sudo docker run --privileged -w /p4c/build -e $CTEST_PARALLEL_LEVEL p4c ctest --output-on-failure --schedule-random
sudo -E docker run --privileged -w /p4c/build -e $CTEST_PARALLEL_LEVEL p4c ctest --output-on-failure --schedule-random
# Build and test p4c on Fedora.
test-fedora-linux:
25 changes: 10 additions & 15 deletions .github/workflows/ci-validation.yml
Original file line number Diff line number Diff line change
@@ -17,10 +17,11 @@ jobs:
# We only test the front end and some mid end passes for now.
validate:
env:
UNIFIED: ON
BUILD_SUCCESS: true
CTEST_PARALLEL_LEVEL: 4
runs-on: ubuntu-latest
IMAGE_TYPE: test
ENABLE_UNIFIED_COMPILATION: ON
VALIDATION: ON
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:
@@ -32,18 +33,12 @@ jobs:
key: validation-${{ runner.os }}
max-size: 1000M

- name: Build (Linux)
- name: Build (Ubuntu 20.04)
run: |
docker build -t p4c --build-arg IMAGE_TYPE=test --build-arg ENABLE_UNIFIED_COMPILATION=$UNIFIED --build-arg VALIDATION=ON . || echo "BUILD_SUCCESS=false" >> $GITHUB_ENV
./tools/export_ccache.sh || echo "BUILD_SUCCESS=false" >> $GITHUB_ENV
sudo cp -rf . /p4c/
(cd /p4c/ && sudo -E tools/ci-build.sh)
sudo cp -rf /p4c/.ccache .
- name: Validate
if: env.BUILD_SUCCESS == 'true'
run: |
docker run --privileged -w /p4c/build -e $CTEST_PARALLEL_LEVEL p4c ctest -R toz3-validate-p4c --output-on-failure --schedule-random
- name: Build Failed
if: env.BUILD_SUCCESS == 'false'
run: |
echo "Building Gauntlet failed."
run: sudo -E ctest -R toz3-validate-p4c --output-on-failure --schedule-random
working-directory: /p4c/build
5 changes: 3 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
FROM p4lang/behavioral-model:latest
ARG BASE_IMAGE=p4lang/behavioral-model:latest
FROM ${BASE_IMAGE}
LABEL maintainer="P4 Developers <p4-dev@lists.p4.org>"

# Default to using 2 make jobs, which is a good default for CI. If you're
@@ -46,6 +47,6 @@ ENV ASAN_OPTIONS=print_stacktrace=1:detect_leaks=0

# Delegate the build to tools/ci-build.
COPY . /p4c/
RUN chmod u+x /p4c/tools/ci-build.sh && /p4c/tools/ci-build.sh
RUN /p4c/tools/ci-build.sh
# Set the workdir after building p4c.
WORKDIR /p4c/
2 changes: 1 addition & 1 deletion backends/ubpf/ubpfTable.cpp
Original file line number Diff line number Diff line change
@@ -191,7 +191,7 @@ void UBPFTable::emitInstance(EBPF::CodeBuilder* builder) {
}

void UBPFTable::setTableKind() {
// set table kind to HASH by default
// Set table kind to HASH by default
this->tableKind = EBPF::TableHash;
if (keyGenerator == nullptr) {
return;
133 changes: 93 additions & 40 deletions ir/ir-inline.h
Original file line number Diff line number Diff line change
@@ -81,44 +81,62 @@ void IR::Vector<T>::visit_children(Visitor& v) {
auto n = v.apply_visitor(*i);
if (!n && *i) {
i = erase(i);
} else if (n == *i) {
continue;
}
CHECK_NULL(n);
if (n == *i) {
i++;
} else if (auto l = dynamic_cast<const Vector*>(n)) {
continue;
}
if (auto l = dynamic_cast<const Vector*>(n)) {
i = erase(i);
i = insert(i, l->vec.begin(), l->vec.end());
i += l->vec.size();
} else if (auto v = dynamic_cast<const VectorBase*>(n)) {
continue;
}
if (const auto* v = dynamic_cast<const VectorBase*>(n)) {
if (v->empty()) {
i = erase(i);
} else {
i = insert(i, v->size() - 1, nullptr);
for (auto el : *v) {
if (auto e = dynamic_cast<const T*>(el))
for (const auto* el : *v) {
CHECK_NULL(el);
if (auto e = dynamic_cast<const T*>(el)) {
*i++ = e;
else
} else {
BUG("visitor returned invalid type %s for Vector<%s>", el->node_type_name(),
T::static_type_name());
}
}
}
} else if (auto e = dynamic_cast<const T*>(n)) {
continue;
}
if (auto e = dynamic_cast<const T*>(n)) {
*i++ = e;
} else {
BUG("visitor returned invalid type %s for Vector<%s>", n->node_type_name(),
T::static_type_name());
continue;
}
BUG("visitor returned invalid type %s for Vector<%s>", n->node_type_name(),
T::static_type_name());
}
}
template <class T>
void IR::Vector<T>::visit_children(Visitor& v) const {
for (auto& a : vec) v.visit(a);
for (auto& a : vec) {
v.visit(a);
}
}
template <class T>
void IR::Vector<T>::parallel_visit_children(Visitor& v) {
Visitor *start = nullptr, *tmp = &v;
Visitor* start = nullptr;
Visitor* tmp = &v;
size_t todo = vec.size();
if (todo > 1) start = &v.flow_clone();
if (todo > 1) {
start = &v.flow_clone();
}
for (auto i = vec.begin(); i != vec.end(); --todo, tmp = nullptr) {
if (!tmp) tmp = todo > 1 ? &start->flow_clone() : start;
if (tmp == nullptr) {
tmp = todo > 1 ? &start->flow_clone() : start;
}
auto n = tmp->apply_visitor(*i);
if (!n && *i) {
i = erase(i);
@@ -128,37 +146,50 @@ void IR::Vector<T>::parallel_visit_children(Visitor& v) {
i = erase(i);
i = insert(i, l->vec.begin(), l->vec.end());
i += l->vec.size();
} else if (auto v = dynamic_cast<const VectorBase*>(n)) {
} else if (const auto* v = dynamic_cast<const VectorBase*>(n)) {
if (v->empty()) {
i = erase(i);
} else {
i = insert(i, v->size() - 1, nullptr);
for (auto el : *v) {
if (auto e = dynamic_cast<const T*>(el))
for (const auto* el : *v) {
CHECK_NULL(el);
if (auto e = dynamic_cast<const T*>(el)) {
*i++ = e;
else
} else {
BUG("visitor returned invalid type %s for Vector<%s>", el->node_type_name(),
T::static_type_name());
}
}
}
} else if (auto e = dynamic_cast<const T*>(n)) {
*i++ = e;
} else {
CHECK_NULL(n);
BUG("visitor returned invalid type %s for Vector<%s>", n->node_type_name(),
T::static_type_name());
}
if (tmp != &v) v.flow_merge(*tmp);

if (tmp != &v) {
v.flow_merge(*tmp);
}
}
}
template <class T>
void IR::Vector<T>::parallel_visit_children(Visitor& v) const {
Visitor *start = nullptr, *tmp = &v;
Visitor* start = nullptr;
Visitor* tmp = &v;
size_t todo = vec.size();
if (todo > 1) start = &v.flow_clone();
if (todo > 1) {
start = &v.flow_clone();
}
for (auto& a : vec) {
if (!tmp) tmp = todo > 1 ? &start->flow_clone() : start;
if (tmp == nullptr) {
tmp = todo > 1 ? &start->flow_clone() : start;
}
tmp->visit(a);
if (tmp != &v) v.flow_merge(*tmp);
if (tmp != &v) {
v.flow_merge(*tmp);
}
--todo;
tmp = nullptr;
}
@@ -174,7 +205,9 @@ void IR::Vector<T>::toJSON(JSONGenerator& json) const {
sep = ",";
}
--json.indent;
if (*sep) json << std::endl << json.indent;
if (*sep) {
json << std::endl << json.indent;
}
json << "]";
}

@@ -186,18 +219,25 @@ void IR::IndexedVector<T>::visit_children(Visitor& v) {
auto n = v.apply_visitor(*i);
if (!n && *i) {
i = erase(i);
} else if (n == *i) {
continue;
}
CHECK_NULL(n);
if (n == *i) {
i++;
} else if (auto l = dynamic_cast<const Vector<T>*>(n)) {
continue;
}
if (auto l = dynamic_cast<const Vector<T>*>(n)) {
i = erase(i);
i = insert(i, l->begin(), l->end());
i += l->Vector<T>::size();
} else if (auto e = dynamic_cast<const T*>(n)) {
continue;
}
if (auto e = dynamic_cast<const T*>(n)) {
i = replace(i, e);
} else {
BUG("visitor returned invalid type %s for IndexedVector<%s>", n->node_type_name(),
T::static_type_name());
continue;
}
BUG("visitor returned invalid type %s for IndexedVector<%s>", n->node_type_name(),
T::static_type_name());
}
}
template <class T>
@@ -209,12 +249,14 @@ void IR::IndexedVector<T>::toJSON(JSONGenerator& json) const {
const char* sep = "";
Vector<T>::toJSON(json);
json << "," << std::endl << json.indent++ << "\"declarations\" : {";
for (auto& k : declarations) {
for (const auto& k : declarations) {
json << sep << std::endl << json.indent << k.first << " : " << k.second;
sep = ",";
}
--json.indent;
if (*sep) json << std::endl << json.indent;
if (*sep != 0) {
json << std::endl << json.indent;
}
json << "}";
}
IRNODE_DEFINE_APPLY_OVERLOAD(IndexedVector, template <class T>, <T>)
@@ -256,31 +298,40 @@ void IR::NameMap<T, MAP, COMP, ALLOC>::visit_children(Visitor& v) {
auto n = v.apply_visitor(i->second, i->first);
if (!n && i->second) {
i = symbols.erase(i);
} else if (n == i->second) {
continue;
}
CHECK_NULL(n);
if (n == i->second) {
i++;
} else if (auto m = dynamic_cast<const NameMap*>(n)) {
continue;
}
if (auto m = dynamic_cast<const NameMap*>(n)) {
namemap_insert_helper(i, m->symbols.begin(), m->symbols.end(), symbols, new_symbols);
i = symbols.erase(i);
} else if (auto s = dynamic_cast<const T*>(n)) {
continue;
}
if (auto s = dynamic_cast<const T*>(n)) {
if (match_name(i->first, s)) {
i->second = s;
i++;
} else {
namemap_insert_helper(i, cstring(obj_name(s)), std::move(s), symbols, new_symbols);
i = symbols.erase(i);
}
} else {
BUG("visitor returned invalid type %s for NameMap<%s>", n->node_type_name(),
T::static_type_name());
continue;
}
BUG("visitor returned invalid type %s for NameMap<%s>", n->node_type_name(),
T::static_type_name());
}
symbols.insert(new_symbols.begin(), new_symbols.end());
}
template <class T, template <class K, class V, class COMP, class ALLOC> class MAP /*= std::map */,
class COMP /*= std::less<cstring>*/,
class ALLOC /*= std::allocator<std::pair<cstring, const T*>>*/>
void IR::NameMap<T, MAP, COMP, ALLOC>::visit_children(Visitor& v) const {
for (auto& k : symbols) v.visit(k.second, k.first);
for (auto& k : symbols) {
v.visit(k.second, k.first);
}
}
template <class T, template <class K, class V, class COMP, class ALLOC> class MAP /*= std::map */,
class COMP /*= std::less<cstring>*/,
@@ -294,7 +345,9 @@ void IR::NameMap<T, MAP, COMP, ALLOC>::toJSON(JSONGenerator& json) const {
sep = ",";
}
--json.indent;
if (*sep) json << std::endl << json.indent;
if (*sep) {
json << std::endl << json.indent;
}
json << "}";
}

145 changes: 100 additions & 45 deletions tools/ci-build.sh
100644 → 100755
Original file line number Diff line number Diff line change
@@ -5,63 +5,118 @@
set -e # Exit on error.
set -x # Make command execution verbose

export P4C_DEPS="bison \
build-essential \
cmake \
curl \
flex \
g++ \
git \
lld \
libboost-dev \
libboost-graph-dev \
libboost-iostreams1.71-dev \
libfl-dev \
libgc-dev \
pkg-config \
python3 \
python3-pip \
python3-setuptools \
tcpdump"

export P4C_EBPF_DEPS="libpcap-dev \
libelf-dev \
zlib1g-dev \
llvm \
clang \
iproute2 \
iptables \
net-tools"

export P4C_RUNTIME_DEPS="cpp \
libboost-graph1.71.0 \
libboost-iostreams1.71.0 \
libgc1c2 \
libgmp-dev \
python3"
# Default to using 2 make jobs, which is a good default for CI. If you're
# building locally or you know there are more cores available, you may want to
# override this.
: "${MAKEFLAGS:=-j2}"
# Select the type of image we're building. Use `build` for a normal build, which
# is optimized for image size. Use `test` if this image will be used for
# testing; in this case, the source code and build-only dependencies will not be
# removed from the image.
: "${IMAGE_TYPE:=build}"
# Whether to do a unified build.
: "${ENABLE_UNIFIED_COMPILATION:=ON}"
# Whether to enable translation validation
: "${VALIDATION:=OFF}"
# This creates a release build that includes link time optimization and links
# all libraries statically.
: "${BUILD_STATIC_RELEASE:=OFF}"
# No questions asked during package installation.
: "${DEBIAN_FRONTEND:=noninteractive}"
# Whether to install dependencies required to run PTF-ebpf tests
: "${INSTALL_PTF_EBPF_DEPENDENCIES:=OFF}"
# List of kernel versions to install supporting packages for PTF-ebpf tests
: "${KERNEL_VERSIONS:=}"
# Whether to build the P4Tools back end and platform.
: "${ENABLE_TEST_TOOLS:=OFF}"

# Whether to treat warnings as errors.
: "${ENABLE_WERROR:=ON}"
# Compile with Clang compiler
: "${COMPILE_WITH_CLANG:=OFF}"
# Compile with sanitizers (UBSan, ASan)
: "${ENABLE_SANITIZERS:=OFF}"
# Only execute the steps necessary to successfully run CMake.
: "${CMAKE_ONLY:=OFF}"
# Build with -ftrivial-auto-var-init=pattern to catch more bugs caused by
# uninitialized variables.
: "${BUILD_AUTO_VAR_INIT_PATTERN:=OFF}"

. /etc/lsb-release

P4C_DEPS="bison \
build-essential \
ccache \
cmake \
curl \
flex \
g++ \
git \
gnupg \
lld \
libboost-dev \
libboost-graph-dev \
libboost-iostreams-dev \
libfl-dev \
libgc-dev \
pkg-config \
python3 \
python3-pip \
python3-setuptools \
tcpdump"

P4C_EBPF_DEPS="libpcap-dev \
libelf-dev \
zlib1g-dev \
llvm \
clang \
iproute2 \
iptables \
net-tools"

if [[ "${DISTRIB_RELEASE}" == "18.04" ]] ; then
P4C_RUNTIME_DEPS_BOOST="libboost-graph1.65.1 libboost-iostreams1.65.1"
else
P4C_RUNTIME_DEPS_BOOST="libboost-graph1.7* libboost-iostreams1.7*"
fi

P4C_RUNTIME_DEPS="cpp \
${P4C_RUNTIME_DEPS_BOOST} \
libgc1* \
libgmp-dev \
python3"

# use scapy 2.4.5, which is the version on which ptf depends
export P4C_PIP_PACKAGES="ipaddr \
pyroute2 \
ply==3.8 \
scapy==2.4.5 \
clang-format>=15.0.4"
P4C_PIP_PACKAGES="ipaddr \
pyroute2 \
ply==3.8 \
ptf \
scapy==2.4.5 \
clang-format>=15.0.4"


if [[ "${DISTRIB_RELEASE}" == "18.04" ]] ; then
P4C_DEPS+=" libprotobuf-dev protobuf-compiler"
else
echo "deb http://download.opensuse.org/repositories/home:/p4lang/xUbuntu_${DISTRIB_RELEASE}/ /" | tee /etc/apt/sources.list.d/home:p4lang.list
curl -L "http://download.opensuse.org/repositories/home:/p4lang/xUbuntu_${DISTRIB_RELEASE}/Release.key" | apt-key add -
P4C_DEPS+=" p4lang-bmv2"
fi

apt update
apt install -y --no-install-recommends \
${P4C_DEPS} \
${P4C_EBPF_DEPS} \
${P4C_RUNTIME_DEPS}

# TODO: Remove this rm -rf line once the ccache memcache config (https://github.com/p4lang/third-party/blob/main/Dockerfile#L72) is removed.
rm -rf /usr/local/etc/ccache.conf
/usr/local/bin/ccache --set-config cache_dir=/p4c/.ccache
/usr/local/bin/ccache --set-config max_size=1G
ccache --set-config cache_dir=/p4c/.ccache
ccache --set-config max_size=1G

# we want to use Python as the default so change the symlinks
ln -sf /usr/bin/python3 /usr/bin/python
ln -sf /usr/bin/pip3 /usr/bin/pip

pip3 install --upgrade pip
pip3 install wheel
pip3 install $P4C_PIP_PACKAGES

@@ -77,7 +132,7 @@ function install_ptf_ebpf_test_deps() (
for version in $KERNEL_VERSIONS; do
LINUX_TOOLS+=" linux-tools-$version-generic"
done
export P4C_PTF_PACKAGES="gcc-multilib \
P4C_PTF_PACKAGES="gcc-multilib \
python3-six \
libgmp-dev \
libjansson-dev"
@@ -193,7 +248,7 @@ if [ "$CMAKE_ONLY" == "OFF" ]; then
make
make install
# Print ccache statistics after building
/usr/local/bin/ccache -p -s
ccache -p -s
fi


6 changes: 3 additions & 3 deletions tools/ir-generator/ir-generator.ypp
Original file line number Diff line number Diff line change
@@ -99,6 +99,9 @@ static IrNamespace *current_namespace = LookupScope().resolve(0);
%nonassoc ABSTRACT CLASS INTERFACE

%{
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-variable"
#pragma GCC diagnostic ignored "-Wunused-function"
static void
symbol_print(FILE* file, int type, YYSTYPE value)
{
@@ -123,9 +126,6 @@ symbol_print(FILE* file, int type, YYSTYPE value)

#define YYPRINT(file, type, value) symbol_print(file, type, value)

#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-variable"
#pragma GCC diagnostic ignored "-Wunused-function"
#include "ir-generator-lex.c"
#pragma GCC diagnostic pop

0 comments on commit 9bfb5a1

Please sign in to comment.