From 69bde064aa4bcea389cc24e2c23f1d9b6776d814 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A1t=C3=A9=20Szab=C3=B3?= Date: Fri, 26 Jul 2024 18:43:21 +0200 Subject: [PATCH] initial cmake rework build system --- .github/workflows/getdeps_linux.yml | 172 ++ .gitignore | 1 + .projectid | 1 + CMakeLists.txt | 69 + build/fbcode_builder/.gitignore | 5 + .../fbcode_builder/CMake/FBBuildOptions.cmake | 15 + .../CMake/FBCMakeParseArgs.cmake | 141 ++ .../CMake/FBCompilerSettings.cmake | 13 + .../CMake/FBCompilerSettingsMSVC.cmake | 11 + .../CMake/FBCompilerSettingsUnix.cmake | 9 + .../fbcode_builder/CMake/FBPythonBinary.cmake | 697 ++++++++ .../CMake/FBPythonTestAddTests.cmake | 59 + .../CMake/FBThriftCppLibrary.cmake | 197 +++ .../CMake/FBThriftLibrary.cmake | 77 + .../CMake/FBThriftPyLibrary.cmake | 111 ++ .../CMake/FindDoubleConversion.cmake | 19 + build/fbcode_builder/CMake/FindGMock.cmake | 80 + build/fbcode_builder/CMake/FindGflags.cmake | 106 ++ build/fbcode_builder/CMake/FindGlog.cmake | 49 + build/fbcode_builder/CMake/FindLMDB.cmake | 19 + build/fbcode_builder/CMake/FindLibEvent.cmake | 77 + .../fbcode_builder/CMake/FindLibUnwind.cmake | 29 + build/fbcode_builder/CMake/FindPCRE.cmake | 11 + build/fbcode_builder/CMake/FindPCRE2.cmake | 12 + build/fbcode_builder/CMake/FindRe2.cmake | 20 + build/fbcode_builder/CMake/FindSodium.cmake | 297 ++++ build/fbcode_builder/CMake/FindZstd.cmake | 41 + .../CMake/RustStaticLibrary.cmake | 534 +++++++ build/fbcode_builder/CMake/fb_py_test_main.py | 805 ++++++++++ build/fbcode_builder/CMake/fb_py_win_main.c | 140 ++ .../fbcode_builder/CMake/make_fbpy_archive.py | 327 ++++ build/fbcode_builder/LICENSE | 21 + build/fbcode_builder/README.md | 43 + build/fbcode_builder/getdeps.py | 1422 +++++++++++++++++ build/fbcode_builder/getdeps/__init__.py | 0 build/fbcode_builder/getdeps/builder.py | 1326 +++++++++++++++ build/fbcode_builder/getdeps/buildopts.py | 670 ++++++++ build/fbcode_builder/getdeps/cache.py | 39 + build/fbcode_builder/getdeps/cargo.py | 467 ++++++ build/fbcode_builder/getdeps/copytree.py | 82 + build/fbcode_builder/getdeps/dyndeps.py | 461 ++++++ build/fbcode_builder/getdeps/envfuncs.py | 198 +++ build/fbcode_builder/getdeps/errors.py | 19 + build/fbcode_builder/getdeps/expr.py | 186 +++ build/fbcode_builder/getdeps/fetcher.py | 911 +++++++++++ build/fbcode_builder/getdeps/load.py | 366 +++++ build/fbcode_builder/getdeps/manifest.py | 783 +++++++++ build/fbcode_builder/getdeps/platform.py | 291 ++++ .../getdeps/py_wheel_builder.py | 288 ++++ build/fbcode_builder/getdeps/runcmd.py | 168 ++ build/fbcode_builder/getdeps/subcmd.py | 58 + .../fbcode_builder/getdeps/test/expr_test.py | 50 + .../getdeps/test/fixtures/duplicate/foo | 2 + .../test/fixtures/duplicate/subdir/foo | 2 + .../getdeps/test/manifest_test.py | 234 +++ .../getdeps/test/platform_test.py | 41 + .../getdeps/test/scratch_test.py | 81 + build/fbcode_builder/manifests/CLI11 | 14 + build/fbcode_builder/manifests/autoconf | 22 + build/fbcode_builder/manifests/automake | 25 + build/fbcode_builder/manifests/benchmark | 13 + build/fbcode_builder/manifests/blake3 | 10 + build/fbcode_builder/manifests/boost | 116 ++ build/fbcode_builder/manifests/bz2 | 30 + build/fbcode_builder/manifests/cabal | 12 + build/fbcode_builder/manifests/clang | 5 + build/fbcode_builder/manifests/cmake | 49 + build/fbcode_builder/manifests/cpptoml | 16 + .../manifests/double-conversion | 23 + build/fbcode_builder/manifests/eden | 113 ++ build/fbcode_builder/manifests/edencommon | 32 + build/fbcode_builder/manifests/exprtk | 15 + build/fbcode_builder/manifests/fatal | 24 + build/fbcode_builder/manifests/fb303 | 37 + build/fbcode_builder/manifests/fboss | 48 + build/fbcode_builder/manifests/fbthrift | 47 + build/fbcode_builder/manifests/fizz | 37 + build/fbcode_builder/manifests/fmt | 20 + build/fbcode_builder/manifests/folly | 76 + build/fbcode_builder/manifests/gflags | 25 + build/fbcode_builder/manifests/ghc | 15 + build/fbcode_builder/manifests/glean | 45 + build/fbcode_builder/manifests/glog | 32 + build/fbcode_builder/manifests/googletest | 30 + build/fbcode_builder/manifests/gperf | 14 + build/fbcode_builder/manifests/hsthrift | 34 + build/fbcode_builder/manifests/iproute2 | 14 + build/fbcode_builder/manifests/katran | 41 + build/fbcode_builder/manifests/libbpf | 26 + build/fbcode_builder/manifests/libcurl | 42 + build/fbcode_builder/manifests/libdwarf | 20 + build/fbcode_builder/manifests/libelf | 23 + build/fbcode_builder/manifests/libevent | 41 + build/fbcode_builder/manifests/libffi | 23 + build/fbcode_builder/manifests/libgit2 | 33 + build/fbcode_builder/manifests/libiberty | 27 + build/fbcode_builder/manifests/libmnl | 24 + build/fbcode_builder/manifests/libnl | 21 + build/fbcode_builder/manifests/libsai | 14 + build/fbcode_builder/manifests/libsodium | 39 + build/fbcode_builder/manifests/libtool | 28 + build/fbcode_builder/manifests/libunwind | 17 + build/fbcode_builder/manifests/libusb | 29 + build/fbcode_builder/manifests/libyaml | 13 + build/fbcode_builder/manifests/llvm | 5 + build/fbcode_builder/manifests/lmdb | 17 + build/fbcode_builder/manifests/lz4 | 25 + build/fbcode_builder/manifests/mcrouter | 23 + build/fbcode_builder/manifests/mononoke | 51 + build/fbcode_builder/manifests/mvfst | 32 + build/fbcode_builder/manifests/ncurses | 30 + build/fbcode_builder/manifests/nghttp2 | 24 + build/fbcode_builder/manifests/ninja | 32 + build/fbcode_builder/manifests/nlohmann-json | 12 + build/fbcode_builder/manifests/openr | 38 + build/fbcode_builder/manifests/openssl | 32 + build/fbcode_builder/manifests/osxfuse | 12 + build/fbcode_builder/manifests/patchelf | 20 + build/fbcode_builder/manifests/pcre2 | 20 + build/fbcode_builder/manifests/perl | 11 + build/fbcode_builder/manifests/pexpect | 12 + build/fbcode_builder/manifests/proxygen | 37 + build/fbcode_builder/manifests/python | 48 + .../fbcode_builder/manifests/python-filelock | 9 + .../manifests/python-ptyprocess | 9 + .../manifests/python-setuptools | 9 + build/fbcode_builder/manifests/python-six | 9 + build/fbcode_builder/manifests/python-toml | 9 + build/fbcode_builder/manifests/ragel | 19 + build/fbcode_builder/manifests/range-v3 | 11 + build/fbcode_builder/manifests/re2 | 23 + build/fbcode_builder/manifests/rocksdb | 38 + build/fbcode_builder/manifests/rust-shed | 35 + build/fbcode_builder/manifests/snappy | 30 + build/fbcode_builder/manifests/sqlite3 | 27 + build/fbcode_builder/manifests/wangle | 27 + build/fbcode_builder/manifests/watchman | 48 + build/fbcode_builder/manifests/ws_airstore | 35 + build/fbcode_builder/manifests/xxhash | 5 + build/fbcode_builder/manifests/xz | 22 + build/fbcode_builder/manifests/yaml-cpp | 20 + build/fbcode_builder/manifests/zlib | 28 + build/fbcode_builder/manifests/zstd | 34 + build/fbcode_builder/manifests/zstrong | 33 + .../boost_comparator_operator_fix.patch | 11 + .../fbcode_builder/patches/iproute2_oss.patch | 36 + .../zlib_dont_build_more_than_needed.patch | 33 + mcrouter/CMakeLists.txt | 140 ++ mcrouter/configure.ac | 15 +- mcrouter/lib/CMakeLists.txt | 77 + mcrouter/lib/carbon/CMakeLists.txt | 26 + mcrouter/lib/carbon/connection/CMakeLists.txt | 10 + mcrouter/lib/carbon/example/CMakeLists.txt | 18 + .../lib/carbon/example/gen/CMakeLists.txt | 31 + .../example/gen/HelloGoodbyeService.thrift | 4 +- mcrouter/lib/carbon/test/CMakeLists.txt | 29 + mcrouter/lib/config/CMakeLists.txt | 10 + mcrouter/lib/config/test/CMakeLists.txt | 19 + mcrouter/lib/debug/CMakeLists.txt | 9 + mcrouter/lib/fbi/CMakeLists.txt | 14 + mcrouter/lib/fbi/cpp/CMakeLists.txt | 11 + mcrouter/lib/fbi/cpp/test/CMakeLists.txt | 23 + mcrouter/lib/fbi/test/CMakeLists.txt | 16 + mcrouter/lib/invalidation/CMakeLists.txt | 16 + mcrouter/lib/invalidation/test/CMakeLists.txt | 19 + mcrouter/lib/mc/CMakeLists.txt | 6 + mcrouter/lib/network/CMakeLists.txt | 60 + mcrouter/lib/network/gen/CMakeLists.txt | 29 + mcrouter/lib/network/test/CMakeLists.txt | 124 ++ mcrouter/lib/network/test/gen/CMakeLists.txt | 13 + mcrouter/lib/test/CMakeLists.txt | 52 + .../lib/test/CompressionCodecManagerTest.cpp | 6 + mcrouter/mcrouter_config-impl.h | 4 - mcrouter/mcrouter_config.h | 4 - mcrouter/routes/CMakeLists.txt | 44 + mcrouter/routes/test/CMakeLists.txt | 52 + mcrouter/test/CMakeLists.txt | 6 + mcrouter/test/cpp_unit_tests/CMakeLists.txt | 32 + mcrouter/test/{cpp_unit_tests => }/main.cpp | 9 + mcrouter/tools/CMakeLists.txt | 6 + mcrouter/tools/mcpiper/CMakeLists.txt | 36 + mcrouter/tools/mcpiper/test/CMakeLists.txt | 18 + 182 files changed, 15818 insertions(+), 18 deletions(-) create mode 100644 .github/workflows/getdeps_linux.yml create mode 100644 .gitignore create mode 100644 .projectid create mode 100644 CMakeLists.txt create mode 100644 build/fbcode_builder/.gitignore create mode 100644 build/fbcode_builder/CMake/FBBuildOptions.cmake create mode 100644 build/fbcode_builder/CMake/FBCMakeParseArgs.cmake create mode 100644 build/fbcode_builder/CMake/FBCompilerSettings.cmake create mode 100644 build/fbcode_builder/CMake/FBCompilerSettingsMSVC.cmake create mode 100644 build/fbcode_builder/CMake/FBCompilerSettingsUnix.cmake create mode 100644 build/fbcode_builder/CMake/FBPythonBinary.cmake create mode 100644 build/fbcode_builder/CMake/FBPythonTestAddTests.cmake create mode 100644 build/fbcode_builder/CMake/FBThriftCppLibrary.cmake create mode 100644 build/fbcode_builder/CMake/FBThriftLibrary.cmake create mode 100644 build/fbcode_builder/CMake/FBThriftPyLibrary.cmake create mode 100644 build/fbcode_builder/CMake/FindDoubleConversion.cmake create mode 100644 build/fbcode_builder/CMake/FindGMock.cmake create mode 100644 build/fbcode_builder/CMake/FindGflags.cmake create mode 100644 build/fbcode_builder/CMake/FindGlog.cmake create mode 100644 build/fbcode_builder/CMake/FindLMDB.cmake create mode 100644 build/fbcode_builder/CMake/FindLibEvent.cmake create mode 100644 build/fbcode_builder/CMake/FindLibUnwind.cmake create mode 100644 build/fbcode_builder/CMake/FindPCRE.cmake create mode 100644 build/fbcode_builder/CMake/FindPCRE2.cmake create mode 100644 build/fbcode_builder/CMake/FindRe2.cmake create mode 100644 build/fbcode_builder/CMake/FindSodium.cmake create mode 100644 build/fbcode_builder/CMake/FindZstd.cmake create mode 100644 build/fbcode_builder/CMake/RustStaticLibrary.cmake create mode 100644 build/fbcode_builder/CMake/fb_py_test_main.py create mode 100644 build/fbcode_builder/CMake/fb_py_win_main.c create mode 100755 build/fbcode_builder/CMake/make_fbpy_archive.py create mode 100644 build/fbcode_builder/LICENSE create mode 100644 build/fbcode_builder/README.md create mode 100755 build/fbcode_builder/getdeps.py create mode 100644 build/fbcode_builder/getdeps/__init__.py create mode 100644 build/fbcode_builder/getdeps/builder.py create mode 100644 build/fbcode_builder/getdeps/buildopts.py create mode 100644 build/fbcode_builder/getdeps/cache.py create mode 100644 build/fbcode_builder/getdeps/cargo.py create mode 100644 build/fbcode_builder/getdeps/copytree.py create mode 100644 build/fbcode_builder/getdeps/dyndeps.py create mode 100644 build/fbcode_builder/getdeps/envfuncs.py create mode 100644 build/fbcode_builder/getdeps/errors.py create mode 100644 build/fbcode_builder/getdeps/expr.py create mode 100644 build/fbcode_builder/getdeps/fetcher.py create mode 100644 build/fbcode_builder/getdeps/load.py create mode 100644 build/fbcode_builder/getdeps/manifest.py create mode 100644 build/fbcode_builder/getdeps/platform.py create mode 100644 build/fbcode_builder/getdeps/py_wheel_builder.py create mode 100644 build/fbcode_builder/getdeps/runcmd.py create mode 100644 build/fbcode_builder/getdeps/subcmd.py create mode 100644 build/fbcode_builder/getdeps/test/expr_test.py create mode 100644 build/fbcode_builder/getdeps/test/fixtures/duplicate/foo create mode 100644 build/fbcode_builder/getdeps/test/fixtures/duplicate/subdir/foo create mode 100644 build/fbcode_builder/getdeps/test/manifest_test.py create mode 100644 build/fbcode_builder/getdeps/test/platform_test.py create mode 100644 build/fbcode_builder/getdeps/test/scratch_test.py create mode 100644 build/fbcode_builder/manifests/CLI11 create mode 100644 build/fbcode_builder/manifests/autoconf create mode 100644 build/fbcode_builder/manifests/automake create mode 100644 build/fbcode_builder/manifests/benchmark create mode 100644 build/fbcode_builder/manifests/blake3 create mode 100644 build/fbcode_builder/manifests/boost create mode 100644 build/fbcode_builder/manifests/bz2 create mode 100644 build/fbcode_builder/manifests/cabal create mode 100644 build/fbcode_builder/manifests/clang create mode 100644 build/fbcode_builder/manifests/cmake create mode 100644 build/fbcode_builder/manifests/cpptoml create mode 100644 build/fbcode_builder/manifests/double-conversion create mode 100644 build/fbcode_builder/manifests/eden create mode 100644 build/fbcode_builder/manifests/edencommon create mode 100644 build/fbcode_builder/manifests/exprtk create mode 100644 build/fbcode_builder/manifests/fatal create mode 100644 build/fbcode_builder/manifests/fb303 create mode 100644 build/fbcode_builder/manifests/fboss create mode 100644 build/fbcode_builder/manifests/fbthrift create mode 100644 build/fbcode_builder/manifests/fizz create mode 100644 build/fbcode_builder/manifests/fmt create mode 100644 build/fbcode_builder/manifests/folly create mode 100644 build/fbcode_builder/manifests/gflags create mode 100644 build/fbcode_builder/manifests/ghc create mode 100644 build/fbcode_builder/manifests/glean create mode 100644 build/fbcode_builder/manifests/glog create mode 100644 build/fbcode_builder/manifests/googletest create mode 100644 build/fbcode_builder/manifests/gperf create mode 100644 build/fbcode_builder/manifests/hsthrift create mode 100644 build/fbcode_builder/manifests/iproute2 create mode 100644 build/fbcode_builder/manifests/katran create mode 100644 build/fbcode_builder/manifests/libbpf create mode 100644 build/fbcode_builder/manifests/libcurl create mode 100644 build/fbcode_builder/manifests/libdwarf create mode 100644 build/fbcode_builder/manifests/libelf create mode 100644 build/fbcode_builder/manifests/libevent create mode 100644 build/fbcode_builder/manifests/libffi create mode 100644 build/fbcode_builder/manifests/libgit2 create mode 100644 build/fbcode_builder/manifests/libiberty create mode 100644 build/fbcode_builder/manifests/libmnl create mode 100644 build/fbcode_builder/manifests/libnl create mode 100644 build/fbcode_builder/manifests/libsai create mode 100644 build/fbcode_builder/manifests/libsodium create mode 100644 build/fbcode_builder/manifests/libtool create mode 100644 build/fbcode_builder/manifests/libunwind create mode 100644 build/fbcode_builder/manifests/libusb create mode 100644 build/fbcode_builder/manifests/libyaml create mode 100644 build/fbcode_builder/manifests/llvm create mode 100644 build/fbcode_builder/manifests/lmdb create mode 100644 build/fbcode_builder/manifests/lz4 create mode 100644 build/fbcode_builder/manifests/mcrouter create mode 100644 build/fbcode_builder/manifests/mononoke create mode 100644 build/fbcode_builder/manifests/mvfst create mode 100644 build/fbcode_builder/manifests/ncurses create mode 100644 build/fbcode_builder/manifests/nghttp2 create mode 100644 build/fbcode_builder/manifests/ninja create mode 100644 build/fbcode_builder/manifests/nlohmann-json create mode 100644 build/fbcode_builder/manifests/openr create mode 100644 build/fbcode_builder/manifests/openssl create mode 100644 build/fbcode_builder/manifests/osxfuse create mode 100644 build/fbcode_builder/manifests/patchelf create mode 100644 build/fbcode_builder/manifests/pcre2 create mode 100644 build/fbcode_builder/manifests/perl create mode 100644 build/fbcode_builder/manifests/pexpect create mode 100644 build/fbcode_builder/manifests/proxygen create mode 100644 build/fbcode_builder/manifests/python create mode 100644 build/fbcode_builder/manifests/python-filelock create mode 100644 build/fbcode_builder/manifests/python-ptyprocess create mode 100644 build/fbcode_builder/manifests/python-setuptools create mode 100644 build/fbcode_builder/manifests/python-six create mode 100644 build/fbcode_builder/manifests/python-toml create mode 100644 build/fbcode_builder/manifests/ragel create mode 100644 build/fbcode_builder/manifests/range-v3 create mode 100644 build/fbcode_builder/manifests/re2 create mode 100644 build/fbcode_builder/manifests/rocksdb create mode 100644 build/fbcode_builder/manifests/rust-shed create mode 100644 build/fbcode_builder/manifests/snappy create mode 100644 build/fbcode_builder/manifests/sqlite3 create mode 100644 build/fbcode_builder/manifests/wangle create mode 100644 build/fbcode_builder/manifests/watchman create mode 100644 build/fbcode_builder/manifests/ws_airstore create mode 100644 build/fbcode_builder/manifests/xxhash create mode 100644 build/fbcode_builder/manifests/xz create mode 100644 build/fbcode_builder/manifests/yaml-cpp create mode 100644 build/fbcode_builder/manifests/zlib create mode 100644 build/fbcode_builder/manifests/zstd create mode 100644 build/fbcode_builder/manifests/zstrong create mode 100644 build/fbcode_builder/patches/boost_comparator_operator_fix.patch create mode 100644 build/fbcode_builder/patches/iproute2_oss.patch create mode 100644 build/fbcode_builder/patches/zlib_dont_build_more_than_needed.patch create mode 100644 mcrouter/CMakeLists.txt create mode 100644 mcrouter/lib/CMakeLists.txt create mode 100644 mcrouter/lib/carbon/CMakeLists.txt create mode 100644 mcrouter/lib/carbon/connection/CMakeLists.txt create mode 100644 mcrouter/lib/carbon/example/CMakeLists.txt create mode 100644 mcrouter/lib/carbon/example/gen/CMakeLists.txt create mode 100644 mcrouter/lib/carbon/test/CMakeLists.txt create mode 100644 mcrouter/lib/config/CMakeLists.txt create mode 100644 mcrouter/lib/config/test/CMakeLists.txt create mode 100644 mcrouter/lib/debug/CMakeLists.txt create mode 100644 mcrouter/lib/fbi/CMakeLists.txt create mode 100644 mcrouter/lib/fbi/cpp/CMakeLists.txt create mode 100644 mcrouter/lib/fbi/cpp/test/CMakeLists.txt create mode 100644 mcrouter/lib/fbi/test/CMakeLists.txt create mode 100644 mcrouter/lib/invalidation/CMakeLists.txt create mode 100644 mcrouter/lib/invalidation/test/CMakeLists.txt create mode 100644 mcrouter/lib/mc/CMakeLists.txt create mode 100644 mcrouter/lib/network/CMakeLists.txt create mode 100644 mcrouter/lib/network/gen/CMakeLists.txt create mode 100644 mcrouter/lib/network/test/CMakeLists.txt create mode 100644 mcrouter/lib/network/test/gen/CMakeLists.txt create mode 100644 mcrouter/lib/test/CMakeLists.txt create mode 100644 mcrouter/routes/CMakeLists.txt create mode 100644 mcrouter/routes/test/CMakeLists.txt create mode 100644 mcrouter/test/CMakeLists.txt create mode 100644 mcrouter/test/cpp_unit_tests/CMakeLists.txt rename mcrouter/test/{cpp_unit_tests => }/main.cpp (68%) create mode 100644 mcrouter/tools/CMakeLists.txt create mode 100644 mcrouter/tools/mcpiper/CMakeLists.txt create mode 100644 mcrouter/tools/mcpiper/test/CMakeLists.txt diff --git a/.github/workflows/getdeps_linux.yml b/.github/workflows/getdeps_linux.yml new file mode 100644 index 000000000..a71b0587c --- /dev/null +++ b/.github/workflows/getdeps_linux.yml @@ -0,0 +1,172 @@ +# This file was @generated by getdeps.py + +name: linux + +on: + push: + branches: + - main + pull_request: + branches: + - main + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + build: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - name: Show disk space at start + run: df -h + - name: Free up disk space + run: sudo rm -rf /usr/local/lib/android + - name: Show disk space after freeing up + run: df -h + - name: Update system package info + run: sudo apt-get update + - name: Install system deps + run: sudo python3 build/fbcode_builder/getdeps.py --allow-system-packages install-system-deps --recursive mcrouter + - name: Install packaging system deps + run: sudo python3 build/fbcode_builder/getdeps.py --allow-system-packages install-system-deps --recursive patchelf + - name: Fetch ninja + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests ninja + - name: Fetch cmake + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests cmake + - name: Fetch fmt + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests fmt + - name: Fetch googletest + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests googletest + - name: Fetch python-six + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests python-six + - name: Fetch zstd + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests zstd + - name: Fetch zlib + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests zlib + - name: Fetch boost + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests boost + - name: Fetch double-conversion + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests double-conversion + - name: Fetch gflags + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests gflags + - name: Fetch glog + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests glog + - name: Fetch libdwarf + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests libdwarf + - name: Fetch libevent + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests libevent + - name: Fetch lz4 + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests lz4 + - name: Fetch snappy + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests snappy + - name: Fetch bz2 + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests bz2 + - name: Fetch autoconf + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests autoconf + - name: Fetch automake + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests automake + - name: Fetch libtool + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests libtool + - name: Fetch ragel + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests ragel + - name: Fetch libiberty + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests libiberty + - name: Fetch libsodium + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests libsodium + - name: Fetch libunwind + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests libunwind + - name: Fetch xz + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests xz + - name: Fetch folly + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests folly + - name: Fetch fizz + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests fizz + - name: Fetch wangle + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests wangle + - name: Fetch mvfst + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests mvfst + - name: Fetch libffi + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests libffi + - name: Fetch ncurses + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests ncurses + - name: Fetch python + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests python + - name: Fetch fbthrift + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fetch --no-tests fbthrift + - name: Build ninja + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests ninja + - name: Build cmake + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests cmake + - name: Build fmt + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests fmt + - name: Build googletest + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests googletest + - name: Build python-six + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests python-six + - name: Build zstd + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests zstd + - name: Build zlib + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests zlib + - name: Build boost + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests boost + - name: Build double-conversion + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests double-conversion + - name: Build gflags + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests gflags + - name: Build glog + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests glog + - name: Build libdwarf + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests libdwarf + - name: Build libevent + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests libevent + - name: Build lz4 + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests lz4 + - name: Build snappy + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests snappy + - name: Build bz2 + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests bz2 + - name: Build autoconf + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests autoconf + - name: Build automake + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests automake + - name: Build libtool + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests libtool + - name: Build ragel + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests ragel + - name: Build libiberty + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests libiberty + - name: Build libsodium + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests libsodium + - name: Build libunwind + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests libunwind + - name: Build xz + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests xz + - name: Build folly + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests folly + - name: Build fizz + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests fizz + - name: Build wangle + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests wangle + - name: Build mvfst + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests mvfst + - name: Build libffi + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests libffi + - name: Build ncurses + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests ncurses + - name: Build python + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests python + - name: Build fbthrift + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --free-up-disk --no-tests fbthrift + - name: Build mcrouter + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages build --src-dir=. mcrouter --project-install-prefix mcrouter:/usr/local + - name: Copy artifacts + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages fixup-dyn-deps --strip --src-dir=. mcrouter _artifacts/linux --project-install-prefix mcrouter:/usr/local --final-install-prefix /usr/local + - uses: actions/upload-artifact@v2 + with: + name: mcrouter + path: _artifacts + - name: Test mcrouter + run: python3 build/fbcode_builder/getdeps.py --allow-system-packages test --src-dir=. mcrouter --project-install-prefix mcrouter:/usr/local + - name: Show disk space at end + run: df -h diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..9bf3746b2 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +compile_commands.json diff --git a/.projectid b/.projectid new file mode 100644 index 000000000..1d8fa2843 --- /dev/null +++ b/.projectid @@ -0,0 +1 @@ +mcrouter diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 000000000..79d183ec9 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,69 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +cmake_minimum_required(VERSION 3.10) + +project(mcrouter) + +set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/build/fbcode_builder/CMake" + ${CMAKE_MODULE_PATH}) + +set(CMAKE_CXX_STANDARD 20) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +option(BUILD_TESTS "If enabled, compile the tests." OFF) + +# Disable Meta-specific functionality for the OSS build. +add_compile_definitions(LIBMC_FBTRACE_DISABLE DISABLE_COMPRESSION) + +if(WIN32) + include(FBCompilerSettingsMSVC) +else() + include(FBCompilerSettingsUnix) + set(CMAKE_CXX_FLAGS + "${CMAKE_CXX_FLAGS} -fno-coroutines -Wno-unused-parameter -Wno-uninitialized -Wno-maybe-uninitialized" + ) + + # Create symlink to compile_commands.json for IDE to pick it up + execute_process( + COMMAND ${CMAKE_COMMAND} -E create_symlink + ${CMAKE_BINARY_DIR}/compile_commands.json + ${CMAKE_CURRENT_SOURCE_DIR}/compile_commands.json + ) +endif() + +# Set up options for generated Thrift client code +include(FBThriftLibrary) +set(THRIFT_OPTIONS stack_arguments sync_methods_return_try + deprecated_terse_writes) + +find_package(Boost 1.65.1 REQUIRED COMPONENTS system thread filesystem regex + context program_options) + +find_package(fmt REQUIRED) +find_package(folly REQUIRED) +find_package(Fizz REQUIRED) +find_package(Glog REQUIRED) +find_package(gflags REQUIRED) +find_package(wangle REQUIRED) +find_package(FBThrift REQUIRED) + +include_directories(.) +include_directories(${CMAKE_CURRENT_BINARY_DIR}) + +if(BUILD_TESTS) + enable_testing() + include(CTest) + + find_package(GTest MODULE REQUIRED) + + include(GoogleTest) +endif() + +add_subdirectory(mcrouter) + +install(TARGETS mcrouter mcpiper) diff --git a/build/fbcode_builder/.gitignore b/build/fbcode_builder/.gitignore new file mode 100644 index 000000000..b98f3edfa --- /dev/null +++ b/build/fbcode_builder/.gitignore @@ -0,0 +1,5 @@ +# Facebook-internal CI builds don't have write permission outside of the +# source tree, so we install all projects into this directory. +/facebook_ci +__pycache__/ +*.pyc diff --git a/build/fbcode_builder/CMake/FBBuildOptions.cmake b/build/fbcode_builder/CMake/FBBuildOptions.cmake new file mode 100644 index 000000000..dbaa29933 --- /dev/null +++ b/build/fbcode_builder/CMake/FBBuildOptions.cmake @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +function (fb_activate_static_library_option) + option(USE_STATIC_DEPS_ON_UNIX + "If enabled, use static dependencies on unix systems. This is generally discouraged." + OFF + ) + # Mark USE_STATIC_DEPS_ON_UNIX as an "advanced" option, since enabling it + # is generally discouraged. + mark_as_advanced(USE_STATIC_DEPS_ON_UNIX) + + if(UNIX AND USE_STATIC_DEPS_ON_UNIX) + SET(CMAKE_FIND_LIBRARY_SUFFIXES ".a" PARENT_SCOPE) + endif() +endfunction() diff --git a/build/fbcode_builder/CMake/FBCMakeParseArgs.cmake b/build/fbcode_builder/CMake/FBCMakeParseArgs.cmake new file mode 100644 index 000000000..933180189 --- /dev/null +++ b/build/fbcode_builder/CMake/FBCMakeParseArgs.cmake @@ -0,0 +1,141 @@ +# +# Copyright (c) Facebook, Inc. and its affiliates. +# +# Helper function for parsing arguments to a CMake function. +# +# This function is very similar to CMake's built-in cmake_parse_arguments() +# function, with some improvements: +# - This function correctly handles empty arguments. (cmake_parse_arguments() +# ignores empty arguments.) +# - If a multi-value argument is specified more than once, the subsequent +# arguments are appended to the original list rather than replacing it. e.g. +# if "SOURCES" is a multi-value argument, and the argument list contains +# "SOURCES a b c SOURCES x y z" then the resulting value for SOURCES will be +# "a;b;c;x;y;z" rather than "x;y;z" +# - This function errors out by default on unrecognized arguments. You can +# pass in an extra "ALLOW_UNPARSED_ARGS" argument to make it behave like +# cmake_parse_arguments(), and return the unparsed arguments in a +# _UNPARSED_ARGUMENTS variable instead. +# +# It does look like cmake_parse_arguments() handled empty arguments correctly +# from CMake 3.0 through 3.3, but it seems like this was probably broken when +# it was turned into a built-in function in CMake 3.4. Here is discussion and +# patches that fixed this behavior prior to CMake 3.0: +# https://cmake.org/pipermail/cmake-developers/2013-November/020607.html +# +# The one downside to this function over the built-in cmake_parse_arguments() +# is that I don't think we can achieve the PARSE_ARGV behavior in a non-builtin +# function, so we can't properly handle arguments that contain ";". CMake will +# treat the ";" characters as list element separators, and treat it as multiple +# separate arguments. +# +function(fb_cmake_parse_args PREFIX OPTIONS ONE_VALUE_ARGS MULTI_VALUE_ARGS ARGS) + foreach(option IN LISTS ARGN) + if ("${option}" STREQUAL "ALLOW_UNPARSED_ARGS") + set(ALLOW_UNPARSED_ARGS TRUE) + else() + message( + FATAL_ERROR + "unknown optional argument for fb_cmake_parse_args(): ${option}" + ) + endif() + endforeach() + + # Define all options as FALSE in the parent scope to start with + foreach(var_name IN LISTS OPTIONS) + set("${PREFIX}_${var_name}" "FALSE" PARENT_SCOPE) + endforeach() + + # TODO: We aren't extremely strict about error checking for one-value + # arguments here. e.g., we don't complain if a one-value argument is + # followed by another option/one-value/multi-value name rather than an + # argument. We also don't complain if a one-value argument is the last + # argument and isn't followed by a value. + + list(APPEND all_args ${ONE_VALUE_ARGS}) + list(APPEND all_args ${MULTI_VALUE_ARGS}) + set(current_variable) + set(unparsed_args) + foreach(arg IN LISTS ARGS) + list(FIND OPTIONS "${arg}" opt_index) + if("${opt_index}" EQUAL -1) + list(FIND all_args "${arg}" arg_index) + if("${arg_index}" EQUAL -1) + # This argument does not match an argument name, + # must be an argument value + if("${current_variable}" STREQUAL "") + list(APPEND unparsed_args "${arg}") + else() + # Ugh, CMake lists have a pretty fundamental flaw: they cannot + # distinguish between an empty list and a list with a single empty + # element. We track our own SEEN_VALUES_arg setting to help + # distinguish this and behave properly here. + if ("${SEEN_${current_variable}}" AND "${${current_variable}}" STREQUAL "") + set("${current_variable}" ";${arg}") + else() + list(APPEND "${current_variable}" "${arg}") + endif() + set("SEEN_${current_variable}" TRUE) + endif() + else() + # We found a single- or multi-value argument name + set(current_variable "VALUES_${arg}") + set("SEEN_${arg}" TRUE) + endif() + else() + # We found an option variable + set("${PREFIX}_${arg}" "TRUE" PARENT_SCOPE) + set(current_variable) + endif() + endforeach() + + foreach(arg_name IN LISTS ONE_VALUE_ARGS) + if(NOT "${SEEN_${arg_name}}") + unset("${PREFIX}_${arg_name}" PARENT_SCOPE) + elseif(NOT "${SEEN_VALUES_${arg_name}}") + # If the argument was seen but a value wasn't specified, error out. + # We require exactly one value to be specified. + message( + FATAL_ERROR "argument ${arg_name} was specified without a value" + ) + else() + list(LENGTH "VALUES_${arg_name}" num_args) + if("${num_args}" EQUAL 0) + # We know an argument was specified and that we called list(APPEND). + # If CMake thinks the list is empty that means there is really a single + # empty element in the list. + set("${PREFIX}_${arg_name}" "" PARENT_SCOPE) + elseif("${num_args}" EQUAL 1) + list(GET "VALUES_${arg_name}" 0 arg_value) + set("${PREFIX}_${arg_name}" "${arg_value}" PARENT_SCOPE) + else() + message( + FATAL_ERROR "too many arguments specified for ${arg_name}: " + "${VALUES_${arg_name}}" + ) + endif() + endif() + endforeach() + + foreach(arg_name IN LISTS MULTI_VALUE_ARGS) + # If this argument name was never seen, then unset the parent scope + if (NOT "${SEEN_${arg_name}}") + unset("${PREFIX}_${arg_name}" PARENT_SCOPE) + else() + # TODO: Our caller still won't be able to distinguish between an empty + # list and a list with a single empty element. We can tell which is + # which, but CMake lists don't make it easy to show this to our caller. + set("${PREFIX}_${arg_name}" "${VALUES_${arg_name}}" PARENT_SCOPE) + endif() + endforeach() + + # By default we fatal out on unparsed arguments, but return them to the + # caller if ALLOW_UNPARSED_ARGS was specified. + if (DEFINED unparsed_args) + if ("${ALLOW_UNPARSED_ARGS}") + set("${PREFIX}_UNPARSED_ARGUMENTS" "${unparsed_args}" PARENT_SCOPE) + else() + message(FATAL_ERROR "unrecognized arguments: ${unparsed_args}") + endif() + endif() +endfunction() diff --git a/build/fbcode_builder/CMake/FBCompilerSettings.cmake b/build/fbcode_builder/CMake/FBCompilerSettings.cmake new file mode 100644 index 000000000..585c95320 --- /dev/null +++ b/build/fbcode_builder/CMake/FBCompilerSettings.cmake @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +# This file applies common compiler settings that are shared across +# a number of Facebook opensource projects. +# Please use caution and your best judgement before making changes +# to these shared compiler settings in order to avoid accidentally +# breaking a build in another project! + +if (WIN32) + include(FBCompilerSettingsMSVC) +else() + include(FBCompilerSettingsUnix) +endif() diff --git a/build/fbcode_builder/CMake/FBCompilerSettingsMSVC.cmake b/build/fbcode_builder/CMake/FBCompilerSettingsMSVC.cmake new file mode 100644 index 000000000..4efd7e966 --- /dev/null +++ b/build/fbcode_builder/CMake/FBCompilerSettingsMSVC.cmake @@ -0,0 +1,11 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +# This file applies common compiler settings that are shared across +# a number of Facebook opensource projects. +# Please use caution and your best judgement before making changes +# to these shared compiler settings in order to avoid accidentally +# breaking a build in another project! + +add_compile_options( + /wd4250 # 'class1' : inherits 'class2::member' via dominance +) diff --git a/build/fbcode_builder/CMake/FBCompilerSettingsUnix.cmake b/build/fbcode_builder/CMake/FBCompilerSettingsUnix.cmake new file mode 100644 index 000000000..c26ce78b1 --- /dev/null +++ b/build/fbcode_builder/CMake/FBCompilerSettingsUnix.cmake @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +# This file applies common compiler settings that are shared across +# a number of Facebook opensource projects. +# Please use caution and your best judgement before making changes +# to these shared compiler settings in order to avoid accidentally +# breaking a build in another project! + +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -Wall -Wextra -Wno-deprecated -Wno-deprecated-declarations") diff --git a/build/fbcode_builder/CMake/FBPythonBinary.cmake b/build/fbcode_builder/CMake/FBPythonBinary.cmake new file mode 100644 index 000000000..f91ebaf32 --- /dev/null +++ b/build/fbcode_builder/CMake/FBPythonBinary.cmake @@ -0,0 +1,697 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +include(FBCMakeParseArgs) + +# +# This file contains helper functions for building self-executing Python +# binaries. +# +# This is somewhat different than typical python installation with +# distutils/pip/virtualenv/etc. We primarily want to build a standalone +# executable, isolated from other Python packages on the system. We don't want +# to install files into the standard library python paths. This is more +# similar to PEX (https://github.com/pantsbuild/pex) and XAR +# (https://github.com/facebookincubator/xar). (In the future it would be nice +# to update this code to also support directly generating XAR files if XAR is +# available.) +# +# We also want to be able to easily define "libraries" of python files that can +# be shared and re-used between these standalone python executables, and can be +# shared across projects in different repositories. This means that we do need +# a way to "install" libraries so that they are visible to CMake builds in +# other repositories, without actually installing them in the standard python +# library paths. +# + +# If the caller has not already found Python, do so now. +# If we fail to find python now we won't fail immediately, but +# add_fb_python_executable() or add_fb_python_library() will fatal out if they +# are used. +if(NOT TARGET Python3::Interpreter) + # CMake 3.12+ ships with a FindPython3.cmake module. Try using it first. + # We find with QUIET here, since otherwise this generates some noisy warnings + # on versions of CMake before 3.12 + if (WIN32) + # On Windows we need both the Interpreter as well as the Development + # libraries. + find_package(Python3 COMPONENTS Interpreter Development QUIET) + else() + find_package(Python3 COMPONENTS Interpreter QUIET) + endif() + if(Python3_Interpreter_FOUND) + message(STATUS "Found Python 3: ${Python3_EXECUTABLE}") + else() + # Try with the FindPythonInterp.cmake module available in older CMake + # versions. Check to see if the caller has already searched for this + # themselves first. + if(NOT PYTHONINTERP_FOUND) + set(Python_ADDITIONAL_VERSIONS 3 3.6 3.5 3.4 3.3 3.2 3.1) + find_package(PythonInterp) + # TODO: On Windows we require the Python libraries as well. + # We currently do not search for them on this code path. + # For now we require building with CMake 3.12+ on Windows, so that the + # FindPython3 code path above is available. + endif() + if(PYTHONINTERP_FOUND) + if("${PYTHON_VERSION_MAJOR}" GREATER_EQUAL 3) + set(Python3_EXECUTABLE "${PYTHON_EXECUTABLE}") + add_custom_target(Python3::Interpreter) + else() + string( + CONCAT FBPY_FIND_PYTHON_ERR + "found Python ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}, " + "but need Python 3" + ) + endif() + endif() + endif() +endif() + +# Find our helper program. +# We typically install this in the same directory as this .cmake file. +find_program( + FB_MAKE_PYTHON_ARCHIVE "make_fbpy_archive.py" + PATHS ${CMAKE_MODULE_PATH} +) +set(FB_PY_TEST_MAIN "${CMAKE_CURRENT_LIST_DIR}/fb_py_test_main.py") +set( + FB_PY_TEST_DISCOVER_SCRIPT + "${CMAKE_CURRENT_LIST_DIR}/FBPythonTestAddTests.cmake" +) +set( + FB_PY_WIN_MAIN_C + "${CMAKE_CURRENT_LIST_DIR}/fb_py_win_main.c" +) + +# An option to control the default installation location for +# install_fb_python_library(). This is relative to ${CMAKE_INSTALL_PREFIX} +set( + FBPY_LIB_INSTALL_DIR "lib/fb-py-libs" CACHE STRING + "The subdirectory where FB python libraries should be installed" +) + +# +# Build a self-executing python binary. +# +# This accepts the same arguments as add_fb_python_library(). +# +# In addition, a MAIN_MODULE argument is accepted. This argument specifies +# which module should be started as the __main__ module when the executable is +# run. If left unspecified, a __main__.py script must be present in the +# manifest. +# +function(add_fb_python_executable TARGET) + fb_py_check_available() + + # Parse the arguments + set(one_value_args BASE_DIR NAMESPACE MAIN_MODULE TYPE) + set(multi_value_args SOURCES DEPENDS) + fb_cmake_parse_args( + ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" + ) + fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR) + + # Use add_fb_python_library() to perform most of our source handling + add_fb_python_library( + "${TARGET}.main_lib" + BASE_DIR "${ARG_BASE_DIR}" + NAMESPACE "${ARG_NAMESPACE}" + SOURCES ${ARG_SOURCES} + DEPENDS ${ARG_DEPENDS} + ) + + set( + manifest_files + "$" + ) + set( + source_files + "$" + ) + + # The command to build the executable archive. + # + # If we are using CMake 3.8+ we can use COMMAND_EXPAND_LISTS. + # CMP0067 isn't really the policy we care about, but seems like the best way + # to check if we are running 3.8+. + if (POLICY CMP0067) + set(extra_cmd_params COMMAND_EXPAND_LISTS) + set(make_py_args "${manifest_files}") + else() + set(extra_cmd_params) + set(make_py_args --manifest-separator "::" "$") + endif() + + set(output_file "${TARGET}${CMAKE_EXECUTABLE_SUFFIX}") + if(WIN32) + set(zipapp_output "${TARGET}.py_zipapp") + else() + set(zipapp_output "${output_file}") + endif() + set(zipapp_output_file "${zipapp_output}") + + set(is_dir_output FALSE) + if(DEFINED ARG_TYPE) + list(APPEND make_py_args "--type" "${ARG_TYPE}") + if ("${ARG_TYPE}" STREQUAL "dir") + set(is_dir_output TRUE) + # CMake doesn't really seem to like having a directory specified as an + # output; specify the __main__.py file as the output instead. + set(zipapp_output_file "${zipapp_output}/__main__.py") + list(APPEND + extra_cmd_params + COMMAND "${CMAKE_COMMAND}" -E remove_directory "${zipapp_output}" + ) + endif() + endif() + + if(DEFINED ARG_MAIN_MODULE) + list(APPEND make_py_args "--main" "${ARG_MAIN_MODULE}") + endif() + + add_custom_command( + OUTPUT "${zipapp_output_file}" + ${extra_cmd_params} + COMMAND + "${Python3_EXECUTABLE}" "${FB_MAKE_PYTHON_ARCHIVE}" + -o "${zipapp_output}" + ${make_py_args} + DEPENDS + ${source_files} + "${TARGET}.main_lib.py_sources_built" + "${FB_MAKE_PYTHON_ARCHIVE}" + ) + + if(WIN32) + if(is_dir_output) + # TODO: generate a main executable that will invoke Python3 + # with the correct main module inside the output directory + else() + add_executable("${TARGET}.winmain" "${FB_PY_WIN_MAIN_C}") + target_link_libraries("${TARGET}.winmain" Python3::Python) + # The Python3::Python target doesn't seem to be set up completely + # correctly on Windows for some reason, and we have to explicitly add + # ${Python3_LIBRARY_DIRS} to the target link directories. + target_link_directories( + "${TARGET}.winmain" + PUBLIC ${Python3_LIBRARY_DIRS} + ) + add_custom_command( + OUTPUT "${output_file}" + DEPENDS "${TARGET}.winmain" "${zipapp_output_file}" + COMMAND + "cmd.exe" "/c" "copy" "/b" + "${TARGET}.winmain${CMAKE_EXECUTABLE_SUFFIX}+${zipapp_output}" + "${output_file}" + ) + endif() + endif() + + # Add an "ALL" target that depends on force ${TARGET}, + # so that ${TARGET} will be included in the default list of build targets. + add_custom_target("${TARGET}.GEN_PY_EXE" ALL DEPENDS "${output_file}") + + # Allow resolving the executable path for the target that we generate + # via a generator expression like: + # "WATCHMAN_WAIT_PATH=$" + set_property(TARGET "${TARGET}.GEN_PY_EXE" + PROPERTY EXECUTABLE "${CMAKE_CURRENT_BINARY_DIR}/${output_file}") +endfunction() + +# Define a python unittest executable. +# The executable is built using add_fb_python_executable and has the +# following differences: +# +# Each of the source files specified in SOURCES will be imported +# and have unittest discovery performed upon them. +# Those sources will be imported in the top level namespace. +# +# The ENV argument allows specifying a list of "KEY=VALUE" +# pairs that will be used by the test runner to set up the environment +# in the child process prior to running the test. This is useful for +# passing additional configuration to the test. +function(add_fb_python_unittest TARGET) + # Parse the arguments + set(multi_value_args SOURCES DEPENDS ENV PROPERTIES) + set( + one_value_args + WORKING_DIRECTORY BASE_DIR NAMESPACE TEST_LIST DISCOVERY_TIMEOUT + ) + fb_cmake_parse_args( + ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" + ) + fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR) + if(NOT ARG_WORKING_DIRECTORY) + # Default the working directory to the current binary directory. + # This matches the default behavior of add_test() and other standard + # test functions like gtest_discover_tests() + set(ARG_WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}") + endif() + if(NOT ARG_TEST_LIST) + set(ARG_TEST_LIST "${TARGET}_TESTS") + endif() + if(NOT ARG_DISCOVERY_TIMEOUT) + set(ARG_DISCOVERY_TIMEOUT 5) + endif() + + # Tell our test program the list of modules to scan for tests. + # We scan all modules directly listed in our SOURCES argument, and skip + # modules that came from dependencies in the DEPENDS list. + # + # This is written into a __test_modules__.py module that the test runner + # will look at. + set( + test_modules_path + "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}_test_modules.py" + ) + file(WRITE "${test_modules_path}" "TEST_MODULES = [\n") + string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}") + if (NOT "${namespace_dir}" STREQUAL "") + set(namespace_dir "${namespace_dir}/") + endif() + set(test_modules) + foreach(src_path IN LISTS ARG_SOURCES) + fb_py_compute_dest_path( + abs_source dest_path + "${src_path}" "${namespace_dir}" "${ARG_BASE_DIR}" + ) + string(REPLACE "/" "." module_name "${dest_path}") + string(REGEX REPLACE "\\.py$" "" module_name "${module_name}") + list(APPEND test_modules "${module_name}") + file(APPEND "${test_modules_path}" " '${module_name}',\n") + endforeach() + file(APPEND "${test_modules_path}" "]\n") + + # The __main__ is provided by our runner wrapper/bootstrap + list(APPEND ARG_SOURCES "${FB_PY_TEST_MAIN}=__main__.py") + list(APPEND ARG_SOURCES "${test_modules_path}=__test_modules__.py") + + add_fb_python_executable( + "${TARGET}" + NAMESPACE "${ARG_NAMESPACE}" + BASE_DIR "${ARG_BASE_DIR}" + SOURCES ${ARG_SOURCES} + DEPENDS ${ARG_DEPENDS} + ) + + # Run test discovery after the test executable is built. + # This logic is based on the code for gtest_discover_tests() + set(ctest_file_base "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}") + set(ctest_include_file "${ctest_file_base}_include.cmake") + set(ctest_tests_file "${ctest_file_base}_tests.cmake") + add_custom_command( + TARGET "${TARGET}.GEN_PY_EXE" POST_BUILD + BYPRODUCTS "${ctest_tests_file}" + COMMAND + "${CMAKE_COMMAND}" + -D "TEST_TARGET=${TARGET}" + -D "TEST_INTERPRETER=${Python3_EXECUTABLE}" + -D "TEST_ENV=${ARG_ENV}" + -D "TEST_EXECUTABLE=$" + -D "TEST_WORKING_DIR=${ARG_WORKING_DIRECTORY}" + -D "TEST_LIST=${ARG_TEST_LIST}" + -D "TEST_PREFIX=${TARGET}::" + -D "TEST_PROPERTIES=${ARG_PROPERTIES}" + -D "CTEST_FILE=${ctest_tests_file}" + -P "${FB_PY_TEST_DISCOVER_SCRIPT}" + VERBATIM + ) + + file( + WRITE "${ctest_include_file}" + "if(EXISTS \"${ctest_tests_file}\")\n" + " include(\"${ctest_tests_file}\")\n" + "else()\n" + " add_test(\"${TARGET}_NOT_BUILT\" \"${TARGET}_NOT_BUILT\")\n" + "endif()\n" + ) + set_property( + DIRECTORY APPEND PROPERTY TEST_INCLUDE_FILES + "${ctest_include_file}" + ) +endfunction() + +# +# Define a python library. +# +# If you want to install a python library generated from this rule note that +# you need to use install_fb_python_library() rather than CMake's built-in +# install() function. This will make it available for other downstream +# projects to use in their add_fb_python_executable() and +# add_fb_python_library() calls. (You do still need to use `install(EXPORT)` +# later to install the CMake exports.) +# +# Parameters: +# - BASE_DIR : +# The base directory path to strip off from each source path. All source +# files must be inside this directory. If not specified it defaults to +# ${CMAKE_CURRENT_SOURCE_DIR}. +# - NAMESPACE : +# The destination namespace where these files should be installed in python +# binaries. If not specified, this defaults to the current relative path of +# ${CMAKE_CURRENT_SOURCE_DIR} inside ${CMAKE_SOURCE_DIR}. e.g., a python +# library defined in the directory repo_root/foo/bar will use a default +# namespace of "foo.bar" +# - SOURCES <...>: +# The python source files. +# You may optionally specify as source using the form: PATH=ALIAS where +# PATH is a relative path in the source tree and ALIAS is the relative +# path into which PATH should be rewritten. This is useful for mapping +# an executable script to the main module in a python executable. +# e.g.: `python/bin/watchman-wait=__main__.py` +# - DEPENDS <...>: +# Other python libraries that this one depends on. +# - INSTALL_DIR : +# The directory where this library should be installed. +# install_fb_python_library() must still be called later to perform the +# installation. If a relative path is given it will be treated relative to +# ${CMAKE_INSTALL_PREFIX} +# +# CMake is unfortunately pretty crappy at being able to define custom build +# rules & behaviors. It doesn't support transitive property propagation +# between custom targets; only the built-in add_executable() and add_library() +# targets support transitive properties. +# +# We hack around this janky CMake behavior by (ab)using interface libraries to +# propagate some of the data we want between targets, without actually +# generating a C library. +# +# add_fb_python_library(SOMELIB) generates the following things: +# - An INTERFACE library rule named SOMELIB.py_lib which tracks some +# information about transitive dependencies: +# - the transitive set of source files in the INTERFACE_SOURCES property +# - the transitive set of manifest files that this library depends on in +# the INTERFACE_INCLUDE_DIRECTORIES property. +# - A custom command that generates a SOMELIB.manifest file. +# This file contains the mapping of source files to desired destination +# locations in executables that depend on this library. This manifest file +# will then be read at build-time in order to build executables. +# +function(add_fb_python_library LIB_NAME) + fb_py_check_available() + + # Parse the arguments + # We use fb_cmake_parse_args() rather than cmake_parse_arguments() since + # cmake_parse_arguments() does not handle empty arguments, and it is common + # for callers to want to specify an empty NAMESPACE parameter. + set(one_value_args BASE_DIR NAMESPACE INSTALL_DIR) + set(multi_value_args SOURCES DEPENDS) + fb_cmake_parse_args( + ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" + ) + fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR) + + string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}") + if (NOT "${namespace_dir}" STREQUAL "") + set(namespace_dir "${namespace_dir}/") + endif() + + if(NOT DEFINED ARG_INSTALL_DIR) + set(install_dir "${FBPY_LIB_INSTALL_DIR}/") + elseif("${ARG_INSTALL_DIR}" STREQUAL "") + set(install_dir "") + else() + set(install_dir "${ARG_INSTALL_DIR}/") + endif() + + # message(STATUS "fb py library ${LIB_NAME}: " + # "NS=${namespace_dir} BASE=${ARG_BASE_DIR}") + + # TODO: In the future it would be nice to support pre-compiling the source + # files. We could emit a rule to compile each source file and emit a + # .pyc/.pyo file here, and then have the manifest reference the pyc/pyo + # files. + + # Define a library target to help pass around information about the library, + # and propagate dependency information. + # + # CMake make a lot of assumptions that libraries are C++ libraries. To help + # avoid confusion we name our target "${LIB_NAME}.py_lib" rather than just + # "${LIB_NAME}". This helps avoid confusion if callers try to use + # "${LIB_NAME}" on their own as a target name. (e.g., attempting to install + # it directly with install(TARGETS) won't work. Callers must use + # install_fb_python_library() instead.) + add_library("${LIB_NAME}.py_lib" INTERFACE) + + # Emit the manifest file. + # + # We write the manifest file to a temporary path first, then copy it with + # configure_file(COPYONLY). This is necessary to get CMake to understand + # that "${manifest_path}" is generated by the CMake configure phase, + # and allow using it as a dependency for add_custom_command(). + # (https://gitlab.kitware.com/cmake/cmake/issues/16367) + set(manifest_path "${CMAKE_CURRENT_BINARY_DIR}/${LIB_NAME}.manifest") + set(tmp_manifest "${manifest_path}.tmp") + file(WRITE "${tmp_manifest}" "FBPY_MANIFEST 1\n") + set(abs_sources) + foreach(src_path IN LISTS ARG_SOURCES) + fb_py_compute_dest_path( + abs_source dest_path + "${src_path}" "${namespace_dir}" "${ARG_BASE_DIR}" + ) + list(APPEND abs_sources "${abs_source}") + target_sources( + "${LIB_NAME}.py_lib" INTERFACE + "$" + "$" + ) + file( + APPEND "${tmp_manifest}" + "${abs_source} :: ${dest_path}\n" + ) + endforeach() + configure_file("${tmp_manifest}" "${manifest_path}" COPYONLY) + + target_include_directories( + "${LIB_NAME}.py_lib" INTERFACE + "$" + "$" + ) + + # Add a target that depends on all of the source files. + # This is needed in case some of the source files are generated. This will + # ensure that these source files are brought up-to-date before we build + # any python binaries that depend on this library. + add_custom_target("${LIB_NAME}.py_sources_built" DEPENDS ${abs_sources}) + add_dependencies("${LIB_NAME}.py_lib" "${LIB_NAME}.py_sources_built") + + # Hook up library dependencies, and also make the *.py_sources_built target + # depend on the sources for all of our dependencies also being up-to-date. + foreach(dep IN LISTS ARG_DEPENDS) + target_link_libraries("${LIB_NAME}.py_lib" INTERFACE "${dep}.py_lib") + + # Mark that our .py_sources_built target depends on each our our dependent + # libraries. This serves two functions: + # - This causes CMake to generate an error message if one of the + # dependencies is never defined. The target_link_libraries() call above + # won't complain if one of the dependencies doesn't exist (since it is + # intended to allow passing in file names for plain library files rather + # than just targets). + # - It ensures that sources for our dependencies are built before any + # executable that depends on us. Note that we depend on "${dep}.py_lib" + # rather than "${dep}.py_sources_built" for this purpose because the + # ".py_sources_built" target won't be available for imported targets. + add_dependencies("${LIB_NAME}.py_sources_built" "${dep}.py_lib") + endforeach() + + # Add a custom command to help with library installation, in case + # install_fb_python_library() is called later for this library. + # add_custom_command() only works with file dependencies defined in the same + # CMakeLists.txt file, so we want to make sure this is defined here, rather + # then where install_fb_python_library() is called. + # This command won't be run by default, but will only be run if it is needed + # by a subsequent install_fb_python_library() call. + # + # This command copies the library contents into the build directory. + # It would be nicer if we could skip this intermediate copy, and just run + # make_fbpy_archive.py at install time to copy them directly to the desired + # installation directory. Unfortunately this is difficult to do, and seems + # to interfere with some of the CMake code that wants to generate a manifest + # of installed files. + set(build_install_dir "${CMAKE_CURRENT_BINARY_DIR}/${LIB_NAME}.lib_install") + add_custom_command( + OUTPUT + "${build_install_dir}/${LIB_NAME}.manifest" + COMMAND "${CMAKE_COMMAND}" -E remove_directory "${build_install_dir}" + COMMAND + "${Python3_EXECUTABLE}" "${FB_MAKE_PYTHON_ARCHIVE}" --type lib-install + --install-dir "${LIB_NAME}" + -o "${build_install_dir}/${LIB_NAME}" "${manifest_path}" + DEPENDS + "${abs_sources}" + "${manifest_path}" + "${FB_MAKE_PYTHON_ARCHIVE}" + ) + add_custom_target( + "${LIB_NAME}.py_lib_install" + DEPENDS "${build_install_dir}/${LIB_NAME}.manifest" + ) + + # Set some properties to pass through the install paths to + # install_fb_python_library() + # + # Passing through ${build_install_dir} allows install_fb_python_library() + # to work even if used from a different CMakeLists.txt file than where + # add_fb_python_library() was called (i.e. such that + # ${CMAKE_CURRENT_BINARY_DIR} is different between the two calls). + set(abs_install_dir "${install_dir}") + if(NOT IS_ABSOLUTE "${abs_install_dir}") + set(abs_install_dir "${CMAKE_INSTALL_PREFIX}/${abs_install_dir}") + endif() + string(REGEX REPLACE "/$" "" abs_install_dir "${abs_install_dir}") + set_target_properties( + "${LIB_NAME}.py_lib_install" + PROPERTIES + INSTALL_DIR "${abs_install_dir}" + BUILD_INSTALL_DIR "${build_install_dir}" + ) +endfunction() + +# +# Install an FB-style packaged python binary. +# +# - DESTINATION : +# Associate the installed target files with the given export-name. +# +function(install_fb_python_executable TARGET) + # Parse the arguments + set(one_value_args DESTINATION) + set(multi_value_args) + fb_cmake_parse_args( + ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" + ) + + if(NOT DEFINED ARG_DESTINATION) + set(ARG_DESTINATION bin) + endif() + + install( + PROGRAMS "$" + DESTINATION "${ARG_DESTINATION}" + ) +endfunction() + +# +# Install a python library. +# +# - EXPORT : +# Associate the installed target files with the given export-name. +# +# Note that unlike the built-in CMake install() function we do not accept a +# DESTINATION parameter. Instead, use the INSTALL_DIR parameter to +# add_fb_python_library() to set the installation location. +# +function(install_fb_python_library LIB_NAME) + set(one_value_args EXPORT) + fb_cmake_parse_args(ARG "" "${one_value_args}" "" "${ARGN}") + + # Export our "${LIB_NAME}.py_lib" target so that it will be available to + # downstream projects in our installed CMake config files. + if(DEFINED ARG_EXPORT) + install(TARGETS "${LIB_NAME}.py_lib" EXPORT "${ARG_EXPORT}") + endif() + + # add_fb_python_library() emits a .py_lib_install target that will prepare + # the installation directory. However, it isn't part of the "ALL" target and + # therefore isn't built by default. + # + # Make sure the ALL target depends on it now. We have to do this by + # introducing yet another custom target. + # Add it as a dependency to the ALL target now. + add_custom_target("${LIB_NAME}.py_lib_install_all" ALL) + add_dependencies( + "${LIB_NAME}.py_lib_install_all" "${LIB_NAME}.py_lib_install" + ) + + # Copy the intermediate install directory generated at build time into + # the desired install location. + get_target_property(dest_dir "${LIB_NAME}.py_lib_install" "INSTALL_DIR") + get_target_property( + build_install_dir "${LIB_NAME}.py_lib_install" "BUILD_INSTALL_DIR" + ) + install( + DIRECTORY "${build_install_dir}/${LIB_NAME}" + DESTINATION "${dest_dir}" + ) + install( + FILES "${build_install_dir}/${LIB_NAME}.manifest" + DESTINATION "${dest_dir}" + ) +endfunction() + +# Helper macro to process the BASE_DIR and NAMESPACE arguments for +# add_fb_python_executable() and add_fb_python_executable() +macro(fb_py_process_default_args NAMESPACE_VAR BASE_DIR_VAR) + # If the namespace was not specified, default to the relative path to the + # current directory (starting from the repository root). + if(NOT DEFINED "${NAMESPACE_VAR}") + file( + RELATIVE_PATH "${NAMESPACE_VAR}" + "${CMAKE_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}" + ) + endif() + + if(NOT DEFINED "${BASE_DIR_VAR}") + # If the base directory was not specified, default to the current directory + set("${BASE_DIR_VAR}" "${CMAKE_CURRENT_SOURCE_DIR}") + else() + # If the base directory was specified, always convert it to an + # absolute path. + get_filename_component("${BASE_DIR_VAR}" "${${BASE_DIR_VAR}}" ABSOLUTE) + endif() +endmacro() + +function(fb_py_check_available) + # Make sure that Python 3 and our make_fbpy_archive.py helper script are + # available. + if(NOT Python3_EXECUTABLE) + if(FBPY_FIND_PYTHON_ERR) + message(FATAL_ERROR "Unable to find Python 3: ${FBPY_FIND_PYTHON_ERR}") + else() + message(FATAL_ERROR "Unable to find Python 3") + endif() + endif() + + if (NOT FB_MAKE_PYTHON_ARCHIVE) + message( + FATAL_ERROR "unable to find make_fbpy_archive.py helper program (it " + "should be located in the same directory as FBPythonBinary.cmake)" + ) + endif() +endfunction() + +function( + fb_py_compute_dest_path + src_path_output dest_path_output src_path namespace_dir base_dir +) + if("${src_path}" MATCHES "=") + # We want to split the string on the `=` sign, but cmake doesn't + # provide much in the way of helpers for this, so we rewrite the + # `=` sign to `;` so that we can treat it as a cmake list and + # then index into the components + string(REPLACE "=" ";" src_path_list "${src_path}") + list(GET src_path_list 0 src_path) + # Note that we ignore the `namespace_dir` in the alias case + # in order to allow aliasing a source to the top level `__main__.py` + # filename. + list(GET src_path_list 1 dest_path) + else() + unset(dest_path) + endif() + + get_filename_component(abs_source "${src_path}" ABSOLUTE) + if(NOT DEFINED dest_path) + file(RELATIVE_PATH rel_src "${ARG_BASE_DIR}" "${abs_source}") + if("${rel_src}" MATCHES "^../") + message( + FATAL_ERROR "${LIB_NAME}: source file \"${abs_source}\" is not inside " + "the base directory ${ARG_BASE_DIR}" + ) + endif() + set(dest_path "${namespace_dir}${rel_src}") + endif() + + set("${src_path_output}" "${abs_source}" PARENT_SCOPE) + set("${dest_path_output}" "${dest_path}" PARENT_SCOPE) +endfunction() diff --git a/build/fbcode_builder/CMake/FBPythonTestAddTests.cmake b/build/fbcode_builder/CMake/FBPythonTestAddTests.cmake new file mode 100644 index 000000000..d73c055d8 --- /dev/null +++ b/build/fbcode_builder/CMake/FBPythonTestAddTests.cmake @@ -0,0 +1,59 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +# Add a command to be emitted to the CTest file +set(ctest_script) +function(add_command CMD) + set(escaped_args "") + foreach(arg ${ARGN}) + # Escape all arguments using "Bracket Argument" syntax + # We could skip this for argument that don't contain any special + # characters if we wanted to make the output slightly more human-friendly. + set(escaped_args "${escaped_args} [==[${arg}]==]") + endforeach() + set(ctest_script "${ctest_script}${CMD}(${escaped_args})\n" PARENT_SCOPE) +endfunction() + +if(NOT EXISTS "${TEST_EXECUTABLE}") + message(FATAL_ERROR "Test executable does not exist: ${TEST_EXECUTABLE}") +endif() +execute_process( + COMMAND ${CMAKE_COMMAND} -E env ${TEST_ENV} "${TEST_INTERPRETER}" "${TEST_EXECUTABLE}" --list-tests + WORKING_DIRECTORY "${TEST_WORKING_DIR}" + OUTPUT_VARIABLE output + RESULT_VARIABLE result +) +if(NOT "${result}" EQUAL 0) + string(REPLACE "\n" "\n " output "${output}") + message( + FATAL_ERROR + "Error running test executable: ${TEST_EXECUTABLE}\n" + "Output:\n" + " ${output}\n" + ) +endif() + +# Parse output +string(REPLACE "\n" ";" tests_list "${output}") +foreach(test_name ${tests_list}) + add_command( + add_test + "${TEST_PREFIX}${test_name}" + ${CMAKE_COMMAND} -E env ${TEST_ENV} + "${TEST_INTERPRETER}" "${TEST_EXECUTABLE}" "${test_name}" + ) + add_command( + set_tests_properties + "${TEST_PREFIX}${test_name}" + PROPERTIES + WORKING_DIRECTORY "${TEST_WORKING_DIR}" + ${TEST_PROPERTIES} + ) +endforeach() + +# Set a list of discovered tests in the parent scope, in case users +# want access to this list as a CMake variable +if(TEST_LIST) + add_command(set ${TEST_LIST} ${tests_list}) +endif() + +file(WRITE "${CTEST_FILE}" "${ctest_script}") diff --git a/build/fbcode_builder/CMake/FBThriftCppLibrary.cmake b/build/fbcode_builder/CMake/FBThriftCppLibrary.cmake new file mode 100644 index 000000000..7688d8096 --- /dev/null +++ b/build/fbcode_builder/CMake/FBThriftCppLibrary.cmake @@ -0,0 +1,197 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +include(FBCMakeParseArgs) + +# Generate a C++ library from a thrift file +# +# Parameters: +# - SERVICES [ ...] +# The names of the services defined in the thrift file. +# - DEPENDS [ ...] +# A list of other thrift C++ libraries that this library depends on. +# - OPTIONS [ ...] +# A list of options to pass to the thrift compiler. +# - INCLUDE_DIR +# The sub-directory where generated headers will be installed. +# Defaults to "include" if not specified. The caller must still call +# install() to install the thrift library if desired. +# - THRIFT_INCLUDE_DIR +# The sub-directory where generated headers will be installed. +# Defaults to "${INCLUDE_DIR}/thrift-files" if not specified. +# The caller must still call install() to install the thrift library if +# desired. +function(add_fbthrift_cpp_library LIB_NAME THRIFT_FILE) + # Parse the arguments + set(one_value_args INCLUDE_DIR THRIFT_INCLUDE_DIR) + set(multi_value_args SERVICES DEPENDS OPTIONS) + fb_cmake_parse_args( + ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" + ) + if(NOT DEFINED ARG_INCLUDE_DIR) + set(ARG_INCLUDE_DIR "include") + endif() + if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR) + set(ARG_THRIFT_INCLUDE_DIR "${ARG_INCLUDE_DIR}/thrift-files") + endif() + + get_filename_component(base ${THRIFT_FILE} NAME_WE) + get_filename_component( + output_dir + ${CMAKE_CURRENT_BINARY_DIR}/${THRIFT_FILE} + DIRECTORY + ) + + # Generate relative paths in #includes + file( + RELATIVE_PATH include_prefix + "${CMAKE_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}" + ) + get_filename_component(include_prefix ${include_prefix} DIRECTORY) + + if (NOT "${include_prefix}" STREQUAL "") + list(APPEND ARG_OPTIONS "include_prefix=${include_prefix}") + endif() + # CMake 3.12 is finally getting a list(JOIN) function, but until then + # treating the list as a string and replacing the semicolons is good enough. + string(REPLACE ";" "," GEN_ARG_STR "${ARG_OPTIONS}") + + # Compute the list of generated files + list(APPEND generated_headers + "${output_dir}/gen-cpp2/${base}_constants.h" + "${output_dir}/gen-cpp2/${base}_types.h" + "${output_dir}/gen-cpp2/${base}_types.tcc" + "${output_dir}/gen-cpp2/${base}_types_custom_protocol.h" + "${output_dir}/gen-cpp2/${base}_metadata.h" + ) + list(APPEND generated_sources + "${output_dir}/gen-cpp2/${base}_constants.cpp" + "${output_dir}/gen-cpp2/${base}_data.h" + "${output_dir}/gen-cpp2/${base}_data.cpp" + "${output_dir}/gen-cpp2/${base}_types.cpp" + "${output_dir}/gen-cpp2/${base}_metadata.cpp" + ) + foreach(service IN LISTS ARG_SERVICES) + list(APPEND generated_headers + "${output_dir}/gen-cpp2/${service}.h" + "${output_dir}/gen-cpp2/${service}.tcc" + "${output_dir}/gen-cpp2/${service}AsyncClient.h" + "${output_dir}/gen-cpp2/${service}_custom_protocol.h" + ) + list(APPEND generated_sources + "${output_dir}/gen-cpp2/${service}.cpp" + "${output_dir}/gen-cpp2/${service}AsyncClient.cpp" + "${output_dir}/gen-cpp2/${service}_processmap_binary.cpp" + "${output_dir}/gen-cpp2/${service}_processmap_compact.cpp" + ) + endforeach() + + # This generator expression gets the list of include directories required + # for all of our dependencies. + # It requires using COMMAND_EXPAND_LISTS in the add_custom_command() call + # below. COMMAND_EXPAND_LISTS is only available in CMake 3.8+ + # If we really had to support older versions of CMake we would probably need + # to use a wrapper script around the thrift compiler that could take the + # include list as a single argument and split it up before invoking the + # thrift compiler. + if (NOT POLICY CMP0067) + message(FATAL_ERROR "add_fbthrift_cpp_library() requires CMake 3.8+") + endif() + set( + thrift_include_options + "-I;$,;-I;>" + ) + + # Emit the rule to run the thrift compiler + add_custom_command( + OUTPUT + ${generated_headers} + ${generated_sources} + COMMAND_EXPAND_LISTS + COMMAND + "${CMAKE_COMMAND}" -E make_directory "${output_dir}" + COMMAND + "${FBTHRIFT_COMPILER}" + --legacy-strict + --gen "mstch_cpp2:${GEN_ARG_STR}" + "${thrift_include_options}" + -I "${FBTHRIFT_INCLUDE_DIR}" + -o "${output_dir}" + "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}" + WORKING_DIRECTORY + "${CMAKE_BINARY_DIR}" + MAIN_DEPENDENCY + "${THRIFT_FILE}" + DEPENDS + ${ARG_DEPENDS} + "${FBTHRIFT_COMPILER}" + ) + + # Now emit the library rule to compile the sources + if (BUILD_SHARED_LIBS) + set(LIB_TYPE SHARED) + else () + set(LIB_TYPE STATIC) + endif () + + add_library( + "${LIB_NAME}" ${LIB_TYPE} + ${generated_sources} + ) + + target_include_directories( + "${LIB_NAME}" + PUBLIC + "$" + "$" + ) + target_link_libraries( + "${LIB_NAME}" + PUBLIC + ${ARG_DEPENDS} + FBThrift::thriftcpp2 + Folly::folly + mvfst::mvfst_server_async_tran + mvfst::mvfst_server + ) + + # Add ${generated_headers} to the PUBLIC_HEADER property for ${LIB_NAME} + # + # This allows callers to install it using + # "install(TARGETS ${LIB_NAME} PUBLIC_HEADER)" + # However, note that CMake's PUBLIC_HEADER behavior is rather inflexible, + # and does have any way to preserve header directory structure. Callers + # must be careful to use the correct PUBLIC_HEADER DESTINATION parameter + # when doing this, to put the files the correct directory themselves. + # We define a HEADER_INSTALL_DIR property with the include directory prefix, + # so typically callers should specify the PUBLIC_HEADER DESTINATION as + # "$" + set_property( + TARGET "${LIB_NAME}" + PROPERTY PUBLIC_HEADER ${generated_headers} + ) + + # Define a dummy interface library to help propagate the thrift include + # directories between dependencies. + add_library("${LIB_NAME}.thrift_includes" INTERFACE) + target_include_directories( + "${LIB_NAME}.thrift_includes" + INTERFACE + "$" + "$" + ) + foreach(dep IN LISTS ARG_DEPENDS) + target_link_libraries( + "${LIB_NAME}.thrift_includes" + INTERFACE "${dep}.thrift_includes" + ) + endforeach() + + set_target_properties( + "${LIB_NAME}" + PROPERTIES + EXPORT_PROPERTIES "THRIFT_INSTALL_DIR" + THRIFT_INSTALL_DIR "${ARG_THRIFT_INCLUDE_DIR}/${include_prefix}" + HEADER_INSTALL_DIR "${ARG_INCLUDE_DIR}/${include_prefix}/gen-cpp2" + ) +endfunction() diff --git a/build/fbcode_builder/CMake/FBThriftLibrary.cmake b/build/fbcode_builder/CMake/FBThriftLibrary.cmake new file mode 100644 index 000000000..e4280e2a4 --- /dev/null +++ b/build/fbcode_builder/CMake/FBThriftLibrary.cmake @@ -0,0 +1,77 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +include(FBCMakeParseArgs) +include(FBThriftPyLibrary) +include(FBThriftCppLibrary) + +# +# add_fbthrift_library() +# +# This is a convenience function that generates thrift libraries for multiple +# languages. +# +# For example: +# add_fbthrift_library( +# foo foo.thrift +# LANGUAGES cpp py +# SERVICES Foo +# DEPENDS bar) +# +# will be expanded into two separate calls: +# +# add_fbthrift_cpp_library(foo_cpp foo.thrift SERVICES Foo DEPENDS bar_cpp) +# add_fbthrift_py_library(foo_py foo.thrift SERVICES Foo DEPENDS bar_py) +# +function(add_fbthrift_library LIB_NAME THRIFT_FILE) + # Parse the arguments + set(one_value_args PY_NAMESPACE INCLUDE_DIR THRIFT_INCLUDE_DIR) + set(multi_value_args SERVICES DEPENDS LANGUAGES CPP_OPTIONS PY_OPTIONS) + fb_cmake_parse_args( + ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" + ) + + if(NOT DEFINED ARG_INCLUDE_DIR) + set(ARG_INCLUDE_DIR "include") + endif() + if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR) + set(ARG_THRIFT_INCLUDE_DIR "${ARG_INCLUDE_DIR}/thrift-files") + endif() + + # CMake 3.12+ adds list(TRANSFORM) which would be nice to use here, but for + # now we still want to support older versions of CMake. + set(CPP_DEPENDS) + set(PY_DEPENDS) + foreach(dep IN LISTS ARG_DEPENDS) + list(APPEND CPP_DEPENDS "${dep}_cpp") + list(APPEND PY_DEPENDS "${dep}_py") + endforeach() + + foreach(lang IN LISTS ARG_LANGUAGES) + if ("${lang}" STREQUAL "cpp") + add_fbthrift_cpp_library( + "${LIB_NAME}_cpp" "${THRIFT_FILE}" + SERVICES ${ARG_SERVICES} + DEPENDS ${CPP_DEPENDS} + OPTIONS ${ARG_CPP_OPTIONS} + INCLUDE_DIR "${ARG_INCLUDE_DIR}" + THRIFT_INCLUDE_DIR "${ARG_THRIFT_INCLUDE_DIR}" + ) + elseif ("${lang}" STREQUAL "py" OR "${lang}" STREQUAL "python") + if (DEFINED ARG_PY_NAMESPACE) + set(namespace_args NAMESPACE "${ARG_PY_NAMESPACE}") + endif() + add_fbthrift_py_library( + "${LIB_NAME}_py" "${THRIFT_FILE}" + SERVICES ${ARG_SERVICES} + ${namespace_args} + DEPENDS ${PY_DEPENDS} + OPTIONS ${ARG_PY_OPTIONS} + THRIFT_INCLUDE_DIR "${ARG_THRIFT_INCLUDE_DIR}" + ) + else() + message( + FATAL_ERROR "unknown language for thrift library ${LIB_NAME}: ${lang}" + ) + endif() + endforeach() +endfunction() diff --git a/build/fbcode_builder/CMake/FBThriftPyLibrary.cmake b/build/fbcode_builder/CMake/FBThriftPyLibrary.cmake new file mode 100644 index 000000000..fa77cde71 --- /dev/null +++ b/build/fbcode_builder/CMake/FBThriftPyLibrary.cmake @@ -0,0 +1,111 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +include(FBCMakeParseArgs) +include(FBPythonBinary) + +# Generate a Python library from a thrift file +function(add_fbthrift_py_library LIB_NAME THRIFT_FILE) + # Parse the arguments + set(one_value_args NAMESPACE THRIFT_INCLUDE_DIR) + set(multi_value_args SERVICES DEPENDS OPTIONS) + fb_cmake_parse_args( + ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" + ) + + if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR) + set(ARG_THRIFT_INCLUDE_DIR "include/thrift-files") + endif() + + get_filename_component(base ${THRIFT_FILE} NAME_WE) + set(output_dir "${CMAKE_CURRENT_BINARY_DIR}/${THRIFT_FILE}-py") + + # Parse the namespace value + if (NOT DEFINED ARG_NAMESPACE) + set(ARG_NAMESPACE "${base}") + endif() + + string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}") + set(py_output_dir "${output_dir}/gen-py/${namespace_dir}") + list(APPEND generated_sources + "${py_output_dir}/__init__.py" + "${py_output_dir}/ttypes.py" + "${py_output_dir}/constants.py" + ) + foreach(service IN LISTS ARG_SERVICES) + list(APPEND generated_sources + ${py_output_dir}/${service}.py + ) + endforeach() + + # Define a dummy interface library to help propagate the thrift include + # directories between dependencies. + add_library("${LIB_NAME}.thrift_includes" INTERFACE) + target_include_directories( + "${LIB_NAME}.thrift_includes" + INTERFACE + "$" + "$" + ) + foreach(dep IN LISTS ARG_DEPENDS) + target_link_libraries( + "${LIB_NAME}.thrift_includes" + INTERFACE "${dep}.thrift_includes" + ) + endforeach() + + # This generator expression gets the list of include directories required + # for all of our dependencies. + # It requires using COMMAND_EXPAND_LISTS in the add_custom_command() call + # below. COMMAND_EXPAND_LISTS is only available in CMake 3.8+ + # If we really had to support older versions of CMake we would probably need + # to use a wrapper script around the thrift compiler that could take the + # include list as a single argument and split it up before invoking the + # thrift compiler. + if (NOT POLICY CMP0067) + message(FATAL_ERROR "add_fbthrift_py_library() requires CMake 3.8+") + endif() + set( + thrift_include_options + "-I;$,;-I;>" + ) + + # Always force generation of "new-style" python classes for Python 2 + list(APPEND ARG_OPTIONS "new_style") + # CMake 3.12 is finally getting a list(JOIN) function, but until then + # treating the list as a string and replacing the semicolons is good enough. + string(REPLACE ";" "," GEN_ARG_STR "${ARG_OPTIONS}") + + # Emit the rule to run the thrift compiler + add_custom_command( + OUTPUT + ${generated_sources} + COMMAND_EXPAND_LISTS + COMMAND + "${CMAKE_COMMAND}" -E make_directory "${output_dir}" + COMMAND + "${FBTHRIFT_COMPILER}" + --legacy-strict + --gen "py:${GEN_ARG_STR}" + "${thrift_include_options}" + -o "${output_dir}" + "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}" + WORKING_DIRECTORY + "${CMAKE_BINARY_DIR}" + MAIN_DEPENDENCY + "${THRIFT_FILE}" + DEPENDS + "${FBTHRIFT_COMPILER}" + ) + + # We always want to pass the namespace as "" to this call: + # thrift will already emit the files with the desired namespace prefix under + # gen-py. We don't want add_fb_python_library() to prepend the namespace a + # second time. + add_fb_python_library( + "${LIB_NAME}" + BASE_DIR "${output_dir}/gen-py" + NAMESPACE "" + SOURCES ${generated_sources} + DEPENDS ${ARG_DEPENDS} FBThrift::thrift_py + ) +endfunction() diff --git a/build/fbcode_builder/CMake/FindDoubleConversion.cmake b/build/fbcode_builder/CMake/FindDoubleConversion.cmake new file mode 100644 index 000000000..12a423bc1 --- /dev/null +++ b/build/fbcode_builder/CMake/FindDoubleConversion.cmake @@ -0,0 +1,19 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. + +# Finds libdouble-conversion. +# +# This module defines: +# DOUBLE_CONVERSION_INCLUDE_DIR +# DOUBLE_CONVERSION_LIBRARY +# + +find_path(DOUBLE_CONVERSION_INCLUDE_DIR double-conversion/double-conversion.h) +find_library(DOUBLE_CONVERSION_LIBRARY NAMES double-conversion) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args( + DoubleConversion + DEFAULT_MSG + DOUBLE_CONVERSION_LIBRARY DOUBLE_CONVERSION_INCLUDE_DIR) + +mark_as_advanced(DOUBLE_CONVERSION_INCLUDE_DIR DOUBLE_CONVERSION_LIBRARY) diff --git a/build/fbcode_builder/CMake/FindGMock.cmake b/build/fbcode_builder/CMake/FindGMock.cmake new file mode 100644 index 000000000..cd042dd9c --- /dev/null +++ b/build/fbcode_builder/CMake/FindGMock.cmake @@ -0,0 +1,80 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# Find libgmock +# +# LIBGMOCK_DEFINES - List of defines when using libgmock. +# LIBGMOCK_INCLUDE_DIR - where to find gmock/gmock.h, etc. +# LIBGMOCK_LIBRARIES - List of libraries when using libgmock. +# LIBGMOCK_FOUND - True if libgmock found. + +IF (LIBGMOCK_INCLUDE_DIR) + # Already in cache, be silent + SET(LIBGMOCK_FIND_QUIETLY TRUE) +ENDIF () + +find_package(GTest CONFIG QUIET) +if (TARGET GTest::gmock) + get_target_property(LIBGMOCK_DEFINES GTest::gtest INTERFACE_COMPILE_DEFINITIONS) + if (NOT ${LIBGMOCK_DEFINES}) + # Explicitly set to empty string if not found to avoid it being + # set to NOTFOUND and breaking compilation + set(LIBGMOCK_DEFINES "") + endif() + get_target_property(LIBGMOCK_INCLUDE_DIR GTest::gtest INTERFACE_INCLUDE_DIRECTORIES) + set(LIBGMOCK_LIBRARIES GTest::gmock_main GTest::gmock GTest::gtest) + set(LIBGMOCK_FOUND ON) + message(STATUS "Found gmock via config, defines=${LIBGMOCK_DEFINES}, include=${LIBGMOCK_INCLUDE_DIR}, libs=${LIBGMOCK_LIBRARIES}") +else() + + FIND_PATH(LIBGMOCK_INCLUDE_DIR gmock/gmock.h) + + FIND_LIBRARY(LIBGMOCK_MAIN_LIBRARY_DEBUG NAMES gmock_maind) + FIND_LIBRARY(LIBGMOCK_MAIN_LIBRARY_RELEASE NAMES gmock_main) + FIND_LIBRARY(LIBGMOCK_LIBRARY_DEBUG NAMES gmockd) + FIND_LIBRARY(LIBGMOCK_LIBRARY_RELEASE NAMES gmock) + FIND_LIBRARY(LIBGTEST_LIBRARY_DEBUG NAMES gtestd) + FIND_LIBRARY(LIBGTEST_LIBRARY_RELEASE NAMES gtest) + + find_package(Threads REQUIRED) + INCLUDE(SelectLibraryConfigurations) + SELECT_LIBRARY_CONFIGURATIONS(LIBGMOCK_MAIN) + SELECT_LIBRARY_CONFIGURATIONS(LIBGMOCK) + SELECT_LIBRARY_CONFIGURATIONS(LIBGTEST) + + set(LIBGMOCK_LIBRARIES + ${LIBGMOCK_MAIN_LIBRARY} + ${LIBGMOCK_LIBRARY} + ${LIBGTEST_LIBRARY} + Threads::Threads + ) + + if(CMAKE_SYSTEM_NAME STREQUAL "Windows") + # The GTEST_LINKED_AS_SHARED_LIBRARY macro must be set properly on Windows. + # + # There isn't currently an easy way to determine if a library was compiled as + # a shared library on Windows, so just assume we've been built against a + # shared build of gmock for now. + SET(LIBGMOCK_DEFINES "GTEST_LINKED_AS_SHARED_LIBRARY=1" CACHE STRING "") + endif() + + # handle the QUIETLY and REQUIRED arguments and set LIBGMOCK_FOUND to TRUE if + # all listed variables are TRUE + INCLUDE(FindPackageHandleStandardArgs) + FIND_PACKAGE_HANDLE_STANDARD_ARGS( + GMock + DEFAULT_MSG + LIBGMOCK_MAIN_LIBRARY + LIBGMOCK_LIBRARY + LIBGTEST_LIBRARY + LIBGMOCK_LIBRARIES + LIBGMOCK_INCLUDE_DIR + ) + + MARK_AS_ADVANCED( + LIBGMOCK_DEFINES + LIBGMOCK_MAIN_LIBRARY + LIBGMOCK_LIBRARY + LIBGTEST_LIBRARY + LIBGMOCK_LIBRARIES + LIBGMOCK_INCLUDE_DIR + ) +endif() diff --git a/build/fbcode_builder/CMake/FindGflags.cmake b/build/fbcode_builder/CMake/FindGflags.cmake new file mode 100644 index 000000000..0101203e0 --- /dev/null +++ b/build/fbcode_builder/CMake/FindGflags.cmake @@ -0,0 +1,106 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# Find libgflags. +# There's a lot of compatibility cruft going on in here, both +# to deal with changes across the FB consumers of this and also +# to deal with variances in behavior of cmake itself. +# +# Since this file is named FindGflags.cmake the cmake convention +# is for the module to export both GFLAGS_FOUND and Gflags_FOUND. +# The convention expected by consumers is that we export the +# following variables, even though these do not match the cmake +# conventions: +# +# LIBGFLAGS_INCLUDE_DIR - where to find gflags/gflags.h, etc. +# LIBGFLAGS_LIBRARY - List of libraries when using libgflags. +# LIBGFLAGS_FOUND - True if libgflags found. +# +# We need to be able to locate gflags both from an installed +# cmake config file and just from the raw headers and libs, so +# test for the former and then the latter, and then stick +# the results together and export them into the variables +# listed above. +# +# For forwards compatibility, we export the following variables: +# +# gflags_INCLUDE_DIR - where to find gflags/gflags.h, etc. +# gflags_TARGET / GFLAGS_TARGET / gflags_LIBRARIES +# - List of libraries when using libgflags. +# gflags_FOUND - True if libgflags found. +# + +IF (LIBGFLAGS_INCLUDE_DIR) + # Already in cache, be silent + SET(Gflags_FIND_QUIETLY TRUE) +ENDIF () + +find_package(gflags CONFIG QUIET) +if (gflags_FOUND) + if (NOT Gflags_FIND_QUIETLY) + message(STATUS "Found gflags from package config ${gflags_CONFIG}") + endif() + # Re-export the config-specified libs with our local names + set(LIBGFLAGS_LIBRARY ${gflags_LIBRARIES}) + set(LIBGFLAGS_INCLUDE_DIR ${gflags_INCLUDE_DIR}) + if(NOT EXISTS "${gflags_INCLUDE_DIR}") + # The gflags-devel RPM on recent RedHat-based systems is somewhat broken. + # RedHat symlinks /lib64 to /usr/lib64, and this breaks some of the + # relative path computation performed in gflags-config.cmake. The package + # config file ends up being found via /lib64, but the relative path + # computation it does only works if it was found in /usr/lib64. + # If gflags_INCLUDE_DIR does not actually exist, simply default it to + # /usr/include on these systems. + set(LIBGFLAGS_INCLUDE_DIR "/usr/include") + set(GFLAGS_INCLUDE_DIR "/usr/include") + endif() + set(LIBGFLAGS_FOUND ${gflags_FOUND}) + # cmake module compat + set(GFLAGS_FOUND ${gflags_FOUND}) + set(Gflags_FOUND ${gflags_FOUND}) +else() + FIND_PATH(LIBGFLAGS_INCLUDE_DIR gflags/gflags.h) + + FIND_LIBRARY(LIBGFLAGS_LIBRARY_DEBUG NAMES gflagsd gflags_staticd) + FIND_LIBRARY(LIBGFLAGS_LIBRARY_RELEASE NAMES gflags gflags_static) + + INCLUDE(SelectLibraryConfigurations) + SELECT_LIBRARY_CONFIGURATIONS(LIBGFLAGS) + + # handle the QUIETLY and REQUIRED arguments and set LIBGFLAGS_FOUND to TRUE if + # all listed variables are TRUE + INCLUDE(FindPackageHandleStandardArgs) + FIND_PACKAGE_HANDLE_STANDARD_ARGS(gflags DEFAULT_MSG LIBGFLAGS_LIBRARY LIBGFLAGS_INCLUDE_DIR) + # cmake module compat + set(Gflags_FOUND ${GFLAGS_FOUND}) + # compat with some existing FindGflags consumers + set(LIBGFLAGS_FOUND ${GFLAGS_FOUND}) + + # Compat with the gflags CONFIG based detection + set(gflags_FOUND ${GFLAGS_FOUND}) + set(gflags_INCLUDE_DIR ${LIBGFLAGS_INCLUDE_DIR}) + set(gflags_LIBRARIES ${LIBGFLAGS_LIBRARY}) + set(GFLAGS_TARGET ${LIBGFLAGS_LIBRARY}) + set(gflags_TARGET ${LIBGFLAGS_LIBRARY}) + + MARK_AS_ADVANCED(LIBGFLAGS_LIBRARY LIBGFLAGS_INCLUDE_DIR) +endif() + +# Compat with the gflags CONFIG based detection +if (LIBGFLAGS_FOUND AND NOT TARGET gflags) + add_library(gflags UNKNOWN IMPORTED) + if(TARGET gflags-shared) + # If the installed gflags CMake package config defines a gflags-shared + # target but not gflags, just make the gflags target that we define + # depend on the gflags-shared target. + target_link_libraries(gflags INTERFACE gflags-shared) + # Export LIBGFLAGS_LIBRARY as the gflags-shared target in this case. + set(LIBGFLAGS_LIBRARY gflags-shared) + else() + set_target_properties( + gflags + PROPERTIES + IMPORTED_LINK_INTERFACE_LANGUAGES "C" + IMPORTED_LOCATION "${LIBGFLAGS_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${LIBGFLAGS_INCLUDE_DIR}" + ) + endif() +endif() diff --git a/build/fbcode_builder/CMake/FindGlog.cmake b/build/fbcode_builder/CMake/FindGlog.cmake new file mode 100644 index 000000000..19bfa187e --- /dev/null +++ b/build/fbcode_builder/CMake/FindGlog.cmake @@ -0,0 +1,49 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# - Try to find Glog +# Once done, this will define +# +# GLOG_FOUND - system has Glog +# GLOG_INCLUDE_DIRS - the Glog include directories +# GLOG_LIBRARIES - link these to use Glog + +include(FindPackageHandleStandardArgs) +include(SelectLibraryConfigurations) + +find_library(GLOG_LIBRARY_RELEASE glog + PATHS ${GLOG_LIBRARYDIR}) +find_library(GLOG_LIBRARY_DEBUG glogd + PATHS ${GLOG_LIBRARYDIR}) + +find_path(GLOG_INCLUDE_DIR glog/logging.h + PATHS ${GLOG_INCLUDEDIR}) + +select_library_configurations(GLOG) + +find_package_handle_standard_args(Glog DEFAULT_MSG + GLOG_LIBRARY + GLOG_INCLUDE_DIR) + +mark_as_advanced( + GLOG_LIBRARY + GLOG_INCLUDE_DIR) + +set(GLOG_LIBRARIES ${GLOG_LIBRARY}) +set(GLOG_INCLUDE_DIRS ${GLOG_INCLUDE_DIR}) + +if (NOT TARGET glog::glog) + add_library(glog::glog UNKNOWN IMPORTED) + set_target_properties(glog::glog PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${GLOG_INCLUDE_DIRS}") + set_target_properties(glog::glog PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES "C" IMPORTED_LOCATION "${GLOG_LIBRARIES}") + + find_package(Gflags) + if(GFLAGS_FOUND) + message(STATUS "Found gflags as a dependency of glog::glog, include=${LIBGFLAGS_INCLUDE_DIR}, libs=${LIBGFLAGS_LIBRARY}") + set_property(TARGET glog::glog APPEND PROPERTY IMPORTED_LINK_INTERFACE_LIBRARIES ${LIBGFLAGS_LIBRARY}) + endif() + + find_package(LibUnwind) + if(LIBUNWIND_FOUND) + message(STATUS "Found LibUnwind as a dependency of glog::glog, include=${LIBUNWIND_INCLUDE_DIR}, libs=${LIBUNWIND_LIBRARY}") + set_property(TARGET glog::glog APPEND PROPERTY IMPORTED_LINK_INTERFACE_LIBRARIES ${LIBUNWIND_LIBRARY}) + endif() +endif() diff --git a/build/fbcode_builder/CMake/FindLMDB.cmake b/build/fbcode_builder/CMake/FindLMDB.cmake new file mode 100644 index 000000000..51635e36e --- /dev/null +++ b/build/fbcode_builder/CMake/FindLMDB.cmake @@ -0,0 +1,19 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2. + +find_library(LMDB_LIBRARIES NAMES lmdb liblmdb) +mark_as_advanced(LMDB_LIBRARIES) + +find_path(LMDB_INCLUDE_DIR NAMES lmdb.h) +mark_as_advanced(LMDB_INCLUDE_DIR) + +find_package_handle_standard_args( + LMDB + REQUIRED_VARS LMDB_LIBRARIES LMDB_INCLUDE_DIR) + +if(LMDB_FOUND) + set(LMDB_LIBRARIES ${LMDB_LIBRARIES}) + set(LMDB_INCLUDE_DIR, ${LMDB_INCLUDE_DIR}) +endif() diff --git a/build/fbcode_builder/CMake/FindLibEvent.cmake b/build/fbcode_builder/CMake/FindLibEvent.cmake new file mode 100644 index 000000000..dd11ebd84 --- /dev/null +++ b/build/fbcode_builder/CMake/FindLibEvent.cmake @@ -0,0 +1,77 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# - Find LibEvent (a cross event library) +# This module defines +# LIBEVENT_INCLUDE_DIR, where to find LibEvent headers +# LIBEVENT_LIB, LibEvent libraries +# LibEvent_FOUND, If false, do not try to use libevent + +set(LibEvent_EXTRA_PREFIXES /usr/local /opt/local "$ENV{HOME}") +foreach(prefix ${LibEvent_EXTRA_PREFIXES}) + list(APPEND LibEvent_INCLUDE_PATHS "${prefix}/include") + list(APPEND LibEvent_LIB_PATHS "${prefix}/lib") +endforeach() + +find_package(Libevent CONFIG QUIET) +if (TARGET event) + # Re-export the config under our own names + + # Somewhat gross, but some vcpkg installed libevents have a relative + # `include` path exported into LIBEVENT_INCLUDE_DIRS, which triggers + # a cmake error because it resolves to the `include` dir within the + # folly repo, which is not something cmake allows to be in the + # INTERFACE_INCLUDE_DIRECTORIES. Thankfully on such a system the + # actual include directory is already part of the global include + # directories, so we can just skip it. + if (NOT "${LIBEVENT_INCLUDE_DIRS}" STREQUAL "include") + set(LIBEVENT_INCLUDE_DIR ${LIBEVENT_INCLUDE_DIRS}) + else() + set(LIBEVENT_INCLUDE_DIR) + endif() + + # Unfortunately, with a bare target name `event`, downstream consumers + # of the package that depends on `Libevent` located via CONFIG end + # up exporting just a bare `event` in their libraries. This is problematic + # because this in interpreted as just `-levent` with no library path. + # When libevent is not installed in the default installation prefix + # this results in linker errors. + # To resolve this, we ask cmake to lookup the full path to the library + # and use that instead. + cmake_policy(PUSH) + if(POLICY CMP0026) + # Allow reading the LOCATION property + cmake_policy(SET CMP0026 OLD) + endif() + get_target_property(LIBEVENT_LIB event LOCATION) + cmake_policy(POP) + + set(LibEvent_FOUND ${Libevent_FOUND}) + if (NOT LibEvent_FIND_QUIETLY) + message(STATUS "Found libevent from package config include=${LIBEVENT_INCLUDE_DIRS} lib=${LIBEVENT_LIB}") + endif() +else() + find_path(LIBEVENT_INCLUDE_DIR event.h PATHS ${LibEvent_INCLUDE_PATHS}) + find_library(LIBEVENT_LIB NAMES event PATHS ${LibEvent_LIB_PATHS}) + + if (LIBEVENT_LIB AND LIBEVENT_INCLUDE_DIR) + set(LibEvent_FOUND TRUE) + set(LIBEVENT_LIB ${LIBEVENT_LIB}) + else () + set(LibEvent_FOUND FALSE) + endif () + + if (LibEvent_FOUND) + if (NOT LibEvent_FIND_QUIETLY) + message(STATUS "Found libevent: ${LIBEVENT_LIB}") + endif () + else () + if (LibEvent_FIND_REQUIRED) + message(FATAL_ERROR "Could NOT find libevent.") + endif () + message(STATUS "libevent NOT found.") + endif () + + mark_as_advanced( + LIBEVENT_LIB + LIBEVENT_INCLUDE_DIR + ) +endif() diff --git a/build/fbcode_builder/CMake/FindLibUnwind.cmake b/build/fbcode_builder/CMake/FindLibUnwind.cmake new file mode 100644 index 000000000..b01a674a5 --- /dev/null +++ b/build/fbcode_builder/CMake/FindLibUnwind.cmake @@ -0,0 +1,29 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +find_path(LIBUNWIND_INCLUDE_DIR NAMES libunwind.h) +mark_as_advanced(LIBUNWIND_INCLUDE_DIR) + +find_library(LIBUNWIND_LIBRARY NAMES unwind) +mark_as_advanced(LIBUNWIND_LIBRARY) + +include(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS( + LIBUNWIND + REQUIRED_VARS LIBUNWIND_LIBRARY LIBUNWIND_INCLUDE_DIR) + +if(LIBUNWIND_FOUND) + set(LIBUNWIND_LIBRARIES ${LIBUNWIND_LIBRARY}) + set(LIBUNWIND_INCLUDE_DIRS ${LIBUNWIND_INCLUDE_DIR}) +endif() diff --git a/build/fbcode_builder/CMake/FindPCRE.cmake b/build/fbcode_builder/CMake/FindPCRE.cmake new file mode 100644 index 000000000..32ccb3725 --- /dev/null +++ b/build/fbcode_builder/CMake/FindPCRE.cmake @@ -0,0 +1,11 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +include(FindPackageHandleStandardArgs) +find_path(PCRE_INCLUDE_DIR NAMES pcre.h) +find_library(PCRE_LIBRARY NAMES pcre) +find_package_handle_standard_args( + PCRE + DEFAULT_MSG + PCRE_LIBRARY + PCRE_INCLUDE_DIR +) +mark_as_advanced(PCRE_INCLUDE_DIR PCRE_LIBRARY) diff --git a/build/fbcode_builder/CMake/FindPCRE2.cmake b/build/fbcode_builder/CMake/FindPCRE2.cmake new file mode 100644 index 000000000..c2c64a29b --- /dev/null +++ b/build/fbcode_builder/CMake/FindPCRE2.cmake @@ -0,0 +1,12 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +include(FindPackageHandleStandardArgs) +find_path(PCRE2_INCLUDE_DIR NAMES pcre2.h) +find_library(PCRE2_LIBRARY NAMES pcre2-8) +find_package_handle_standard_args( + PCRE2 + DEFAULT_MSG + PCRE2_LIBRARY + PCRE2_INCLUDE_DIR +) +set(PCRE2_DEFINES "PCRE2_CODE_UNIT_WIDTH=8") +mark_as_advanced(PCRE2_INCLUDE_DIR PCRE2_LIBRARY PCRE2_DEFINES) diff --git a/build/fbcode_builder/CMake/FindRe2.cmake b/build/fbcode_builder/CMake/FindRe2.cmake new file mode 100644 index 000000000..013ae7761 --- /dev/null +++ b/build/fbcode_builder/CMake/FindRe2.cmake @@ -0,0 +1,20 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2. + +find_library(RE2_LIBRARY re2) +mark_as_advanced(RE2_LIBRARY) + +find_path(RE2_INCLUDE_DIR NAMES re2/re2.h) +mark_as_advanced(RE2_INCLUDE_DIR) + +include(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS( + RE2 + REQUIRED_VARS RE2_LIBRARY RE2_INCLUDE_DIR) + +if(RE2_FOUND) + set(RE2_LIBRARY ${RE2_LIBRARY}) + set(RE2_INCLUDE_DIR, ${RE2_INCLUDE_DIR}) +endif() diff --git a/build/fbcode_builder/CMake/FindSodium.cmake b/build/fbcode_builder/CMake/FindSodium.cmake new file mode 100644 index 000000000..3c3f1245c --- /dev/null +++ b/build/fbcode_builder/CMake/FindSodium.cmake @@ -0,0 +1,297 @@ +# Written in 2016 by Henrik Steffen Gaßmann +# +# To the extent possible under law, the author(s) have dedicated all +# copyright and related and neighboring rights to this software to the +# public domain worldwide. This software is distributed without any warranty. +# +# You should have received a copy of the CC0 Public Domain Dedication +# along with this software. If not, see +# +# http://creativecommons.org/publicdomain/zero/1.0/ +# +######################################################################## +# Tries to find the local libsodium installation. +# +# On Windows the sodium_DIR environment variable is used as a default +# hint which can be overridden by setting the corresponding cmake variable. +# +# Once done the following variables will be defined: +# +# sodium_FOUND +# sodium_INCLUDE_DIR +# sodium_LIBRARY_DEBUG +# sodium_LIBRARY_RELEASE +# +# +# Furthermore an imported "sodium" target is created. +# + +if (CMAKE_C_COMPILER_ID STREQUAL "GNU" + OR CMAKE_C_COMPILER_ID STREQUAL "Clang") + set(_GCC_COMPATIBLE 1) +endif() + +# static library option +if (NOT DEFINED sodium_USE_STATIC_LIBS) + option(sodium_USE_STATIC_LIBS "enable to statically link against sodium" OFF) +endif() +if(NOT (sodium_USE_STATIC_LIBS EQUAL sodium_USE_STATIC_LIBS_LAST)) + unset(sodium_LIBRARY CACHE) + unset(sodium_LIBRARY_DEBUG CACHE) + unset(sodium_LIBRARY_RELEASE CACHE) + unset(sodium_DLL_DEBUG CACHE) + unset(sodium_DLL_RELEASE CACHE) + set(sodium_USE_STATIC_LIBS_LAST ${sodium_USE_STATIC_LIBS} CACHE INTERNAL "internal change tracking variable") +endif() + + +######################################################################## +# UNIX +if (UNIX) + # import pkg-config + find_package(PkgConfig QUIET) + if (PKG_CONFIG_FOUND) + pkg_check_modules(sodium_PKG QUIET libsodium) + endif() + + if(sodium_USE_STATIC_LIBS) + foreach(_libname ${sodium_PKG_STATIC_LIBRARIES}) + if (NOT _libname MATCHES "^lib.*\\.a$") # ignore strings already ending with .a + list(INSERT sodium_PKG_STATIC_LIBRARIES 0 "lib${_libname}.a") + endif() + endforeach() + list(REMOVE_DUPLICATES sodium_PKG_STATIC_LIBRARIES) + + # if pkgconfig for libsodium doesn't provide + # static lib info, then override PKG_STATIC here.. + if (NOT sodium_PKG_STATIC_FOUND) + set(sodium_PKG_STATIC_LIBRARIES libsodium.a) + endif() + + set(XPREFIX sodium_PKG_STATIC) + else() + if (NOT sodium_PKG_FOUND) + set(sodium_PKG_LIBRARIES sodium) + endif() + + set(XPREFIX sodium_PKG) + endif() + + find_path(sodium_INCLUDE_DIR sodium.h + HINTS ${${XPREFIX}_INCLUDE_DIRS} + ) + find_library(sodium_LIBRARY_DEBUG NAMES ${${XPREFIX}_LIBRARIES} + HINTS ${${XPREFIX}_LIBRARY_DIRS} + ) + find_library(sodium_LIBRARY_RELEASE NAMES ${${XPREFIX}_LIBRARIES} + HINTS ${${XPREFIX}_LIBRARY_DIRS} + ) + + +######################################################################## +# Windows +elseif (WIN32) + set(sodium_DIR "$ENV{sodium_DIR}" CACHE FILEPATH "sodium install directory") + mark_as_advanced(sodium_DIR) + + find_path(sodium_INCLUDE_DIR sodium.h + HINTS ${sodium_DIR} + PATH_SUFFIXES include + ) + + if (MSVC) + # detect target architecture + file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/arch.cpp" [=[ + #if defined _M_IX86 + #error ARCH_VALUE x86_32 + #elif defined _M_X64 + #error ARCH_VALUE x86_64 + #endif + #error ARCH_VALUE unknown + ]=]) + try_compile(_UNUSED_VAR "${CMAKE_CURRENT_BINARY_DIR}" "${CMAKE_CURRENT_BINARY_DIR}/arch.cpp" + OUTPUT_VARIABLE _COMPILATION_LOG + ) + string(REGEX REPLACE ".*ARCH_VALUE ([a-zA-Z0-9_]+).*" "\\1" _TARGET_ARCH "${_COMPILATION_LOG}") + + # construct library path + if (_TARGET_ARCH STREQUAL "x86_32") + string(APPEND _PLATFORM_PATH "Win32") + elseif(_TARGET_ARCH STREQUAL "x86_64") + string(APPEND _PLATFORM_PATH "x64") + else() + message(FATAL_ERROR "the ${_TARGET_ARCH} architecture is not supported by Findsodium.cmake.") + endif() + string(APPEND _PLATFORM_PATH "/$$CONFIG$$") + + if (MSVC_VERSION LESS 1900) + math(EXPR _VS_VERSION "${MSVC_VERSION} / 10 - 60") + else() + math(EXPR _VS_VERSION "${MSVC_VERSION} / 10 - 50") + endif() + string(APPEND _PLATFORM_PATH "/v${_VS_VERSION}") + + if (sodium_USE_STATIC_LIBS) + string(APPEND _PLATFORM_PATH "/static") + else() + string(APPEND _PLATFORM_PATH "/dynamic") + endif() + + string(REPLACE "$$CONFIG$$" "Debug" _DEBUG_PATH_SUFFIX "${_PLATFORM_PATH}") + string(REPLACE "$$CONFIG$$" "Release" _RELEASE_PATH_SUFFIX "${_PLATFORM_PATH}") + + find_library(sodium_LIBRARY_DEBUG libsodium.lib + HINTS ${sodium_DIR} + PATH_SUFFIXES ${_DEBUG_PATH_SUFFIX} + ) + find_library(sodium_LIBRARY_RELEASE libsodium.lib + HINTS ${sodium_DIR} + PATH_SUFFIXES ${_RELEASE_PATH_SUFFIX} + ) + if (NOT sodium_USE_STATIC_LIBS) + set(CMAKE_FIND_LIBRARY_SUFFIXES_BCK ${CMAKE_FIND_LIBRARY_SUFFIXES}) + set(CMAKE_FIND_LIBRARY_SUFFIXES ".dll") + find_library(sodium_DLL_DEBUG libsodium + HINTS ${sodium_DIR} + PATH_SUFFIXES ${_DEBUG_PATH_SUFFIX} + ) + find_library(sodium_DLL_RELEASE libsodium + HINTS ${sodium_DIR} + PATH_SUFFIXES ${_RELEASE_PATH_SUFFIX} + ) + set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_BCK}) + endif() + + elseif(_GCC_COMPATIBLE) + if (sodium_USE_STATIC_LIBS) + find_library(sodium_LIBRARY_DEBUG libsodium.a + HINTS ${sodium_DIR} + PATH_SUFFIXES lib + ) + find_library(sodium_LIBRARY_RELEASE libsodium.a + HINTS ${sodium_DIR} + PATH_SUFFIXES lib + ) + else() + find_library(sodium_LIBRARY_DEBUG libsodium.dll.a + HINTS ${sodium_DIR} + PATH_SUFFIXES lib + ) + find_library(sodium_LIBRARY_RELEASE libsodium.dll.a + HINTS ${sodium_DIR} + PATH_SUFFIXES lib + ) + + file(GLOB _DLL + LIST_DIRECTORIES false + RELATIVE "${sodium_DIR}/bin" + "${sodium_DIR}/bin/libsodium*.dll" + ) + find_library(sodium_DLL_DEBUG ${_DLL} libsodium + HINTS ${sodium_DIR} + PATH_SUFFIXES bin + ) + find_library(sodium_DLL_RELEASE ${_DLL} libsodium + HINTS ${sodium_DIR} + PATH_SUFFIXES bin + ) + endif() + else() + message(FATAL_ERROR "this platform is not supported by FindSodium.cmake") + endif() + + +######################################################################## +# unsupported +else() + message(FATAL_ERROR "this platform is not supported by FindSodium.cmake") +endif() + + +######################################################################## +# common stuff + +# extract sodium version +if (sodium_INCLUDE_DIR) + set(_VERSION_HEADER "${_INCLUDE_DIR}/sodium/version.h") + if (EXISTS _VERSION_HEADER) + file(READ "${_VERSION_HEADER}" _VERSION_HEADER_CONTENT) + string(REGEX REPLACE ".*#[ \t]*define[ \t]*SODIUM_VERSION_STRING[ \t]*\"([^\n]*)\".*" "\\1" + sodium_VERSION "${_VERSION_HEADER_CONTENT}") + set(sodium_VERSION "${sodium_VERSION}" PARENT_SCOPE) + endif() +endif() + +# communicate results +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args( + Sodium # The name must be either uppercase or match the filename case. + REQUIRED_VARS + sodium_LIBRARY_RELEASE + sodium_LIBRARY_DEBUG + sodium_INCLUDE_DIR + VERSION_VAR + sodium_VERSION +) + +if(Sodium_FOUND) + set(sodium_LIBRARIES + optimized ${sodium_LIBRARY_RELEASE} debug ${sodium_LIBRARY_DEBUG}) +endif() + +# mark file paths as advanced +mark_as_advanced(sodium_INCLUDE_DIR) +mark_as_advanced(sodium_LIBRARY_DEBUG) +mark_as_advanced(sodium_LIBRARY_RELEASE) +if (WIN32) + mark_as_advanced(sodium_DLL_DEBUG) + mark_as_advanced(sodium_DLL_RELEASE) +endif() + +# create imported target +if(sodium_USE_STATIC_LIBS) + set(_LIB_TYPE STATIC) +else() + set(_LIB_TYPE SHARED) +endif() + +if(NOT TARGET sodium) + add_library(sodium ${_LIB_TYPE} IMPORTED) +endif() + +set_target_properties(sodium PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${sodium_INCLUDE_DIR}" + IMPORTED_LINK_INTERFACE_LANGUAGES "C" +) + +if (sodium_USE_STATIC_LIBS) + set_target_properties(sodium PROPERTIES + INTERFACE_COMPILE_DEFINITIONS "SODIUM_STATIC" + IMPORTED_LOCATION "${sodium_LIBRARY_RELEASE}" + IMPORTED_LOCATION_DEBUG "${sodium_LIBRARY_DEBUG}" + ) +else() + if (UNIX) + set_target_properties(sodium PROPERTIES + IMPORTED_LOCATION "${sodium_LIBRARY_RELEASE}" + IMPORTED_LOCATION_DEBUG "${sodium_LIBRARY_DEBUG}" + ) + elseif (WIN32) + set_target_properties(sodium PROPERTIES + IMPORTED_IMPLIB "${sodium_LIBRARY_RELEASE}" + IMPORTED_IMPLIB_DEBUG "${sodium_LIBRARY_DEBUG}" + ) + if (NOT (sodium_DLL_DEBUG MATCHES ".*-NOTFOUND")) + set_target_properties(sodium PROPERTIES + IMPORTED_LOCATION_DEBUG "${sodium_DLL_DEBUG}" + ) + endif() + if (NOT (sodium_DLL_RELEASE MATCHES ".*-NOTFOUND")) + set_target_properties(sodium PROPERTIES + IMPORTED_LOCATION_RELWITHDEBINFO "${sodium_DLL_RELEASE}" + IMPORTED_LOCATION_MINSIZEREL "${sodium_DLL_RELEASE}" + IMPORTED_LOCATION_RELEASE "${sodium_DLL_RELEASE}" + ) + endif() + endif() +endif() diff --git a/build/fbcode_builder/CMake/FindZstd.cmake b/build/fbcode_builder/CMake/FindZstd.cmake new file mode 100644 index 000000000..89300ddfd --- /dev/null +++ b/build/fbcode_builder/CMake/FindZstd.cmake @@ -0,0 +1,41 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# +# - Try to find Facebook zstd library +# This will define +# ZSTD_FOUND +# ZSTD_INCLUDE_DIR +# ZSTD_LIBRARY +# + +find_path(ZSTD_INCLUDE_DIR NAMES zstd.h) + +find_library(ZSTD_LIBRARY_DEBUG NAMES zstdd zstd_staticd) +find_library(ZSTD_LIBRARY_RELEASE NAMES zstd zstd_static) + +include(SelectLibraryConfigurations) +SELECT_LIBRARY_CONFIGURATIONS(ZSTD) + +include(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS( + ZSTD DEFAULT_MSG + ZSTD_LIBRARY ZSTD_INCLUDE_DIR +) + +if (ZSTD_FOUND) + message(STATUS "Found Zstd: ${ZSTD_LIBRARY}") +endif() + +mark_as_advanced(ZSTD_INCLUDE_DIR ZSTD_LIBRARY) diff --git a/build/fbcode_builder/CMake/RustStaticLibrary.cmake b/build/fbcode_builder/CMake/RustStaticLibrary.cmake new file mode 100644 index 000000000..e6f839bdf --- /dev/null +++ b/build/fbcode_builder/CMake/RustStaticLibrary.cmake @@ -0,0 +1,534 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. + +include(FBCMakeParseArgs) + +set( + USE_CARGO_VENDOR AUTO CACHE STRING + "Download Rust Crates from an internally vendored location" +) +set_property(CACHE USE_CARGO_VENDOR PROPERTY STRINGS AUTO ON OFF) + +set( + GENERATE_CARGO_VENDOR_CONFIG AUTO CACHE STRING + "Whether to generate Rust cargo vendor config or use existing" +) +set_property(CACHE GENERATE_CARGO_VENDOR_CONFIG PROPERTY STRINGS AUTO ON OFF) + +set(RUST_VENDORED_CRATES_DIR "$ENV{RUST_VENDORED_CRATES_DIR}") + +if("${USE_CARGO_VENDOR}" STREQUAL "AUTO") + if(EXISTS "${RUST_VENDORED_CRATES_DIR}") + set(USE_CARGO_VENDOR ON) + else() + set(USE_CARGO_VENDOR OFF) + endif() +endif() + +if("${GENERATE_CARGO_VENDOR_CONFIG}" STREQUAL "AUTO") + set(GENERATE_CARGO_VENDOR_CONFIG "${USE_CARGO_VENDOR}") +endif() + +if(GENERATE_CARGO_VENDOR_CONFIG) + if(NOT EXISTS "${RUST_VENDORED_CRATES_DIR}") + message( + FATAL "vendored rust crates not present: " + "${RUST_VENDORED_CRATES_DIR}" + ) + endif() + + set(RUST_CARGO_HOME "${CMAKE_BINARY_DIR}/_cargo_home") + file(MAKE_DIRECTORY "${RUST_CARGO_HOME}") + + file( + TO_NATIVE_PATH "${RUST_VENDORED_CRATES_DIR}" + ESCAPED_RUST_VENDORED_CRATES_DIR + ) + string( + REPLACE "\\" "\\\\" + ESCAPED_RUST_VENDORED_CRATES_DIR + "${ESCAPED_RUST_VENDORED_CRATES_DIR}" + ) + file( + WRITE "${RUST_CARGO_HOME}/config" + "[source.crates-io]\n" + "replace-with = \"vendored-sources\"\n" + "\n" + "[source.vendored-sources]\n" + "directory = \"${ESCAPED_RUST_VENDORED_CRATES_DIR}\"\n" + ) +endif() + +find_program(CARGO_COMMAND cargo REQUIRED) + +# Cargo is a build system in itself, and thus will try to take advantage of all +# the cores on the system. Unfortunately, this conflicts with Ninja, since it +# also tries to utilize all the cores. This can lead to a system that is +# completely overloaded with compile jobs to the point where nothing else can +# be achieved on the system. +# +# Let's inform Ninja of this fact so it won't try to spawn other jobs while +# Rust being compiled. +set_property(GLOBAL APPEND PROPERTY JOB_POOLS rust_job_pool=1) + +# This function creates an interface library target based on the static library +# built by Cargo. It will call Cargo to build a staticlib and generate a CMake +# interface library with it. +# +# This function requires `find_package(Python COMPONENTS Interpreter)`. +# +# You need to set `lib:crate-type = ["staticlib"]` in your Cargo.toml to make +# Cargo build static library. +# +# ```cmake +# rust_static_library( [CRATE ] [FEATURES ] [USE_CXX_INCLUDE]) +# ``` +# +# Parameters: +# - TARGET: +# Name of the target name. This function will create an interface library +# target with this name. +# - CRATE_NAME: +# Name of the crate. This parameter is optional. If unspecified, it will +# fallback to `${TARGET}`. +# - FEATURE_NAME: +# Name of the Rust feature to enable. +# - USE_CXX_INCLUDE: +# Include cxx.rs include path in `${TARGET}` INTERFACE. +# +# This function creates two targets: +# - "${TARGET}": an interface library target contains the static library built +# from Cargo. +# - "${TARGET}.cargo": an internal custom target that invokes Cargo. +# +# If you are going to use this static library from C/C++, you will need to +# write header files for the library (or generate with cbindgen) and bind these +# headers with the interface library. +# +function(rust_static_library TARGET) + fb_cmake_parse_args(ARG "USE_CXX_INCLUDE" "CRATE;FEATURES" "" "${ARGN}") + + if(DEFINED ARG_CRATE) + set(crate_name "${ARG_CRATE}") + else() + set(crate_name "${TARGET}") + endif() + if(DEFINED ARG_FEATURES) + set(features --features ${ARG_FEATURES}) + else() + set(features ) + endif() + + set(cargo_target "${TARGET}.cargo") + set(target_dir $,debug,release>) + set(staticlib_name "${CMAKE_STATIC_LIBRARY_PREFIX}${crate_name}${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(rust_staticlib "${CMAKE_CURRENT_BINARY_DIR}/${target_dir}/${staticlib_name}") + + if(DEFINED ARG_FEATURES) + set(cargo_flags build $,,--release> -p ${crate_name} --features ${ARG_FEATURES}) + else() + set(cargo_flags build $,,--release> -p ${crate_name}) + endif() + if(USE_CARGO_VENDOR) + set(extra_cargo_env "CARGO_HOME=${RUST_CARGO_HOME}") + set(cargo_flags ${cargo_flags}) + endif() + + add_custom_target( + ${cargo_target} + COMMAND + "${CMAKE_COMMAND}" -E remove -f "${CMAKE_CURRENT_SOURCE_DIR}/Cargo.lock" + COMMAND + "${CMAKE_COMMAND}" -E env + "CARGO_TARGET_DIR=${CMAKE_CURRENT_BINARY_DIR}" + ${extra_cargo_env} + ${CARGO_COMMAND} + ${cargo_flags} + COMMENT "Building Rust crate '${crate_name}'..." + JOB_POOL rust_job_pool + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + BYPRODUCTS + "${CMAKE_CURRENT_BINARY_DIR}/debug/${staticlib_name}" + "${CMAKE_CURRENT_BINARY_DIR}/release/${staticlib_name}" + ) + + add_library(${TARGET} INTERFACE) + add_dependencies(${TARGET} ${cargo_target}) + set_target_properties( + ${TARGET} + PROPERTIES + INTERFACE_STATICLIB_OUTPUT_PATH "${rust_staticlib}" + INTERFACE_INSTALL_LIBNAME + "${CMAKE_STATIC_LIBRARY_PREFIX}${crate_name}_rs${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + + if(DEFINED ARG_USE_CXX_INCLUDE) + target_include_directories( + ${TARGET} + INTERFACE ${CMAKE_CURRENT_BINARY_DIR}/cxxbridge/ + ) + endif() + + target_link_libraries( + ${TARGET} + INTERFACE "$" + ) +endfunction() + +# This function instructs CMake to define a target that will use `cargo build` +# to build a bin crate referenced by the Cargo.toml file in the current source +# directory. +# It accepts a single `TARGET` parameter which will be passed as the package +# name to `cargo build -p TARGET`. If binary has different name as package, +# use optional flag BINARY_NAME to override it. +# It also accepts a `FEATURES` parameter if you want to enable certain features +# in your Rust binary. +# The CMake target will be registered to build by default as part of the +# ALL target. +function(rust_executable TARGET) + fb_cmake_parse_args(ARG "" "BINARY_NAME;FEATURES" "" "${ARGN}") + + set(crate_name "${TARGET}") + set(cargo_target "${TARGET}.cargo") + set(target_dir $,debug,release>) + + if(DEFINED ARG_BINARY_NAME) + set(executable_name "${ARG_BINARY_NAME}${CMAKE_EXECUTABLE_SUFFIX}") + else() + set(executable_name "${crate_name}${CMAKE_EXECUTABLE_SUFFIX}") + endif() + if(DEFINED ARG_FEATURES) + set(features --features ${ARG_FEATURES}) + else() + set(features ) + endif() + + if(DEFINED ARG_FEATURES) + set(cargo_flags build $,,--release> -p ${crate_name} --features ${ARG_FEATURES}) + else() + set(cargo_flags build $,,--release> -p ${crate_name}) + endif() + if(USE_CARGO_VENDOR) + set(extra_cargo_env "CARGO_HOME=${RUST_CARGO_HOME}") + set(cargo_flags ${cargo_flags}) + endif() + + add_custom_target( + ${cargo_target} + ALL + COMMAND + "${CMAKE_COMMAND}" -E remove -f "${CMAKE_CURRENT_SOURCE_DIR}/Cargo.lock" + COMMAND + "${CMAKE_COMMAND}" -E env + "CARGO_TARGET_DIR=${CMAKE_CURRENT_BINARY_DIR}" + ${extra_cargo_env} + ${CARGO_COMMAND} + ${cargo_flags} + COMMENT "Building Rust executable '${crate_name}'..." + JOB_POOL rust_job_pool + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + BYPRODUCTS + "${CMAKE_CURRENT_BINARY_DIR}/debug/${executable_name}" + "${CMAKE_CURRENT_BINARY_DIR}/release/${executable_name}" + ) + + set_property(TARGET "${cargo_target}" + PROPERTY EXECUTABLE "${CMAKE_CURRENT_BINARY_DIR}/${target_dir}/${executable_name}") +endfunction() + +# This function can be used to install the executable generated by a prior +# call to the `rust_executable` function. +# It requires a `TARGET` parameter to identify the target to be installed, +# and an optional `DESTINATION` parameter to specify the installation +# directory. If DESTINATION is not specified then the `bin` directory +# will be assumed. +function(install_rust_executable TARGET) + # Parse the arguments + set(one_value_args DESTINATION) + set(multi_value_args) + fb_cmake_parse_args( + ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" + ) + + if(NOT DEFINED ARG_DESTINATION) + set(ARG_DESTINATION bin) + endif() + + get_target_property(foo "${TARGET}.cargo" EXECUTABLE) + + install( + PROGRAMS "${foo}" + DESTINATION "${ARG_DESTINATION}" + ) +endfunction() + +# This function installs the interface target generated from the function +# `rust_static_library`. Use this function if you want to export your Rust +# target to external CMake targets. +# +# ```cmake +# install_rust_static_library( +# +# INSTALL_DIR +# [EXPORT ] +# ) +# ``` +# +# Parameters: +# - TARGET: Name of the Rust static library target. +# - EXPORT_NAME: Name of the exported target. +# - INSTALL_DIR: Path to the directory where this library will be installed. +# +function(install_rust_static_library TARGET) + fb_cmake_parse_args(ARG "" "EXPORT;INSTALL_DIR" "" "${ARGN}") + + get_property( + staticlib_output_path + TARGET "${TARGET}" + PROPERTY INTERFACE_STATICLIB_OUTPUT_PATH + ) + get_property( + staticlib_output_name + TARGET "${TARGET}" + PROPERTY INTERFACE_INSTALL_LIBNAME + ) + + if(NOT DEFINED staticlib_output_path) + message(FATAL_ERROR "Not a rust_static_library target.") + endif() + + if(NOT DEFINED ARG_INSTALL_DIR) + message(FATAL_ERROR "Missing required argument.") + endif() + + if(DEFINED ARG_EXPORT) + set(install_export_args EXPORT "${ARG_EXPORT}") + endif() + + set(install_interface_dir "${ARG_INSTALL_DIR}") + if(NOT IS_ABSOLUTE "${install_interface_dir}") + set(install_interface_dir "\${_IMPORT_PREFIX}/${install_interface_dir}") + endif() + + target_link_libraries( + ${TARGET} INTERFACE + "$" + ) + install( + TARGETS ${TARGET} + ${install_export_args} + LIBRARY DESTINATION ${ARG_INSTALL_DIR} + ) + install( + FILES ${staticlib_output_path} + RENAME ${staticlib_output_name} + DESTINATION ${ARG_INSTALL_DIR} + ) +endfunction() + +# This function creates C++ bindings using the [cxx] crate. +# +# Original function found here: https://github.com/corrosion-rs/corrosion/blob/master/cmake/Corrosion.cmake#L1390 +# Simplified for use as part of RustStaticLibrary module. License below. +# +# MIT License +# +# Copyright (c) 2018 Andrew Gaspar +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +# The rules approximately do the following: +# - Check which version of `cxx` the Rust crate depends on. +# - Check if the exact same version of `cxxbridge-cmd` is installed +# - If not, create a rule to build the exact same version of `cxxbridge-cmd`. +# - Create rules to run `cxxbridge` and generate +# - The `rust/cxx.h` header +# - A header and source file for the specified CXX_BRIDGE_FILE. +# - The generated sources (and header include directories) are added to the +# `${TARGET}` CMake library target. +# +# ```cmake +# rust_cxx_bridge( [CRATE ] [LIBS ]) +# ``` +# +# Parameters: +# - TARGET: +# Name of the target name. The target that the bridge will be included with. +# - CXX_BRIDGE_FILE: +# Name of the file that include the cxxbridge (e.g., "src/ffi.rs"). +# - CRATE_NAME: +# Name of the crate. This parameter is optional. If unspecified, it will +# fallback to `${TARGET}`. +# - LIBS [ ...]: +# A list of libraries that this library depends on. +# +function(rust_cxx_bridge TARGET CXX_BRIDGE_FILE) + fb_cmake_parse_args(ARG "" "CRATE" "LIBS" "${ARGN}") + + if(DEFINED ARG_CRATE) + set(crate_name "${ARG_CRATE}") + else() + set(crate_name "${TARGET}") + endif() + + if(USE_CARGO_VENDOR) + set(extra_cargo_env "CARGO_HOME=${RUST_CARGO_HOME}") + endif() + + execute_process( + COMMAND + "${CMAKE_COMMAND}" -E env + ${extra_cargo_env} + "${CARGO_COMMAND}" tree -i cxx --depth=0 + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + RESULT_VARIABLE cxx_version_result + OUTPUT_VARIABLE cxx_version_output + ) + + if(NOT "${cxx_version_result}" EQUAL "0") + message(FATAL_ERROR "Crate ${crate_name} does not depend on cxx.") + endif() + if(cxx_version_output MATCHES "cxx v([0-9]+.[0-9]+.[0-9]+)") + set(cxx_required_version "${CMAKE_MATCH_1}") + else() + message( + FATAL_ERROR + "Failed to parse cxx version from cargo tree output: `cxx_version_output`") + endif() + + # First check if a suitable version of cxxbridge is installed + find_program(INSTALLED_CXXBRIDGE cxxbridge PATHS "$ENV{HOME}/.cargo/bin/") + mark_as_advanced(INSTALLED_CXXBRIDGE) + if(INSTALLED_CXXBRIDGE) + execute_process( + COMMAND "${INSTALLED_CXXBRIDGE}" --version + OUTPUT_VARIABLE cxxbridge_version_output + ) + if(cxxbridge_version_output MATCHES "cxxbridge ([0-9]+.[0-9]+.[0-9]+)") + set(cxxbridge_version "${CMAKE_MATCH_1}") + else() + set(cxxbridge_version "") + endif() + endif() + + set(cxxbridge "") + if(cxxbridge_version) + if(cxxbridge_version VERSION_EQUAL cxx_required_version) + set(cxxbridge "${INSTALLED_CXXBRIDGE}") + if(NOT TARGET "cxxbridge_v${cxx_required_version}") + # Add an empty target. + add_custom_target("cxxbridge_v${cxx_required_version}") + endif() + endif() + endif() + + # No suitable version of cxxbridge was installed, + # so use custom target to install correct version. + if(NOT cxxbridge) + if(NOT TARGET "cxxbridge_v${cxx_required_version}") + add_custom_command( + OUTPUT + "${CMAKE_BINARY_DIR}/cxxbridge_v${cxx_required_version}/bin/cxxbridge" + COMMAND + "${CMAKE_COMMAND}" -E make_directory + "${CMAKE_BINARY_DIR}/cxxbridge_v${cxx_required_version}" + COMMAND + "${CMAKE_COMMAND}" -E remove -f "${CMAKE_CURRENT_SOURCE_DIR}/Cargo.lock" + COMMAND + "${CMAKE_COMMAND}" -E env + ${extra_cargo_env} + "${CARGO_COMMAND}" install cxxbridge-cmd + --version "${cxx_required_version}" + --root "${CMAKE_BINARY_DIR}/cxxbridge_v${cxx_required_version}" + --quiet + COMMAND + "${CMAKE_COMMAND}" -E remove -f "${CMAKE_CURRENT_SOURCE_DIR}/Cargo.lock" + COMMENT "Installing cxxbridge (version ${cxx_required_version})" + ) + add_custom_target( + "cxxbridge_v${cxx_required_version}" + DEPENDS "${CMAKE_BINARY_DIR}/cxxbridge_v${cxx_required_version}/bin/cxxbridge" + ) + endif() + set( + cxxbridge + "${CMAKE_BINARY_DIR}/cxxbridge_v${cxx_required_version}/bin/cxxbridge" + ) + endif() + + add_library(${crate_name} STATIC) + target_include_directories( + ${crate_name} + PUBLIC + $ + $ + ) + target_link_libraries( + ${crate_name} + PUBLIC + ${ARG_LIBS} + ) + + file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/rust") + add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/rust/cxx.h" + COMMAND + "${cxxbridge}" --header --output "${CMAKE_CURRENT_BINARY_DIR}/rust/cxx.h" + DEPENDS "cxxbridge_v${cxx_required_version}" + COMMENT "Generating rust/cxx.h header" + ) + + get_filename_component(filename_component ${CXX_BRIDGE_FILE} NAME) + get_filename_component(directory_component ${CXX_BRIDGE_FILE} DIRECTORY) + set(directory "") + if(directory_component) + set(directory "${directory_component}") + endif() + + set(cxx_header ${directory}/${filename_component}.h) + set(cxx_source ${directory}/${filename_component}.cc) + set(rust_source_path "${CMAKE_CURRENT_SOURCE_DIR}/${CXX_BRIDGE_FILE}") + + file( + MAKE_DIRECTORY + "${CMAKE_CURRENT_BINARY_DIR}/${directory_component}" + ) + + add_custom_command( + OUTPUT + "${CMAKE_CURRENT_BINARY_DIR}/${cxx_header}" + "${CMAKE_CURRENT_BINARY_DIR}/${cxx_source}" + COMMAND + ${cxxbridge} ${rust_source_path} + --header --output "${CMAKE_CURRENT_BINARY_DIR}/${cxx_header}" + COMMAND + ${cxxbridge} ${rust_source_path} + --output "${CMAKE_CURRENT_BINARY_DIR}/${cxx_source}" + --include "${cxx_header}" + DEPENDS "cxxbridge_v${cxx_required_version}" "${rust_source_path}" + COMMENT "Generating cxx bindings for crate ${crate_name}" + ) + + target_sources( + ${crate_name} + PRIVATE + "${CMAKE_CURRENT_BINARY_DIR}/${cxx_header}" + "${CMAKE_CURRENT_BINARY_DIR}/rust/cxx.h" + "${CMAKE_CURRENT_BINARY_DIR}/${cxx_source}" + ) +endfunction() diff --git a/build/fbcode_builder/CMake/fb_py_test_main.py b/build/fbcode_builder/CMake/fb_py_test_main.py new file mode 100644 index 000000000..a9499e221 --- /dev/null +++ b/build/fbcode_builder/CMake/fb_py_test_main.py @@ -0,0 +1,805 @@ +#!/usr/bin/env python +# +# Copyright (c) Facebook, Inc. and its affiliates. +# +""" +This file contains the main module code for Python test programs. +""" + + +import contextlib +import ctypes +import fnmatch +import json +import logging +import optparse +import os +import platform +import re +import sys +import tempfile +import time +import traceback +import unittest +import warnings +from importlib.machinery import PathFinder + + +try: + from StringIO import StringIO +except ImportError: + from io import StringIO +try: + import coverage +except ImportError: + coverage = None # type: ignore +try: + from importlib.machinery import SourceFileLoader +except ImportError: + SourceFileLoader = None # type: ignore + + +class get_cpu_instr_counter(object): + def read(self): + # TODO + return 0 + + +EXIT_CODE_SUCCESS = 0 +EXIT_CODE_TEST_FAILURE = 70 + + +class TestStatus(object): + + ABORTED = "FAILURE" + PASSED = "SUCCESS" + FAILED = "FAILURE" + EXPECTED_FAILURE = "SUCCESS" + UNEXPECTED_SUCCESS = "FAILURE" + SKIPPED = "ASSUMPTION_VIOLATION" + + +class PathMatcher(object): + def __init__(self, include_patterns, omit_patterns): + self.include_patterns = include_patterns + self.omit_patterns = omit_patterns + + def omit(self, path): + """ + Omit iff matches any of the omit_patterns or the include patterns are + not empty and none is matched + """ + path = os.path.realpath(path) + return any(fnmatch.fnmatch(path, p) for p in self.omit_patterns) or ( + self.include_patterns + and not any(fnmatch.fnmatch(path, p) for p in self.include_patterns) + ) + + def include(self, path): + return not self.omit(path) + + +class DebugWipeFinder(PathFinder): + """ + PEP 302 finder that uses a DebugWipeLoader for all files which do not need + coverage + """ + + def __init__(self, matcher): + self.matcher = matcher + + def find_spec(self, fullname, path=None, target=None): + spec = super().find_spec(fullname, path=path, target=target) + if spec is None or spec.origin is None: + return None + if not spec.origin.endswith(".py"): + return None + if self.matcher.include(spec.origin): + return None + + class PyVarObject(ctypes.Structure): + _fields_ = [ + ("ob_refcnt", ctypes.c_long), + ("ob_type", ctypes.c_void_p), + ("ob_size", ctypes.c_ulong), + ] + + class DebugWipeLoader(SourceFileLoader): + """ + PEP302 loader that zeros out debug information before execution + """ + + def get_code(self, fullname): + code = super().get_code(fullname) + if code: + # Ideally we'd do + # code.co_lnotab = b'' + # But code objects are READONLY. Not to worry though; we'll + # directly modify CPython's object + code_impl = PyVarObject.from_address(id(code.co_lnotab)) + code_impl.ob_size = 0 + return code + + if isinstance(spec.loader, SourceFileLoader): + spec.loader = DebugWipeLoader(fullname, spec.origin) + return spec + + +def optimize_for_coverage(cov, include_patterns, omit_patterns): + """ + We get better performance if we zero out debug information for files which + we're not interested in. Only available in CPython 3.3+ + """ + matcher = PathMatcher(include_patterns, omit_patterns) + if SourceFileLoader and platform.python_implementation() == "CPython": + sys.meta_path.insert(0, DebugWipeFinder(matcher)) + + +class TeeStream(object): + def __init__(self, *streams): + self._streams = streams + + def write(self, data): + for stream in self._streams: + stream.write(data) + + def flush(self): + for stream in self._streams: + stream.flush() + + def isatty(self): + return False + + +class CallbackStream(object): + def __init__(self, callback, bytes_callback=None, orig=None): + self._callback = callback + self._fileno = orig.fileno() if orig else None + + # Python 3 APIs: + # - `encoding` is a string holding the encoding name + # - `errors` is a string holding the error-handling mode for encoding + # - `buffer` should look like an io.BufferedIOBase object + + self.errors = orig.errors if orig else None + if bytes_callback: + # those members are only on the io.TextIOWrapper + self.encoding = orig.encoding if orig else "UTF-8" + self.buffer = CallbackStream(bytes_callback, orig=orig) + + def write(self, data): + self._callback(data) + + def flush(self): + pass + + def isatty(self): + return False + + def fileno(self): + return self._fileno + + +class BuckTestResult(unittest.TextTestResult): + """ + Our own TestResult class that outputs data in a format that can be easily + parsed by buck's test runner. + """ + + _instr_counter = get_cpu_instr_counter() + + def __init__( + self, stream, descriptions, verbosity, show_output, main_program, suite + ): + super(BuckTestResult, self).__init__(stream, descriptions, verbosity) + self._main_program = main_program + self._suite = suite + self._results = [] + self._current_test = None + self._saved_stdout = sys.stdout + self._saved_stderr = sys.stderr + self._show_output = show_output + + def getResults(self): + return self._results + + def startTest(self, test): + super(BuckTestResult, self).startTest(test) + + # Pass in the real stdout and stderr filenos. We can't really do much + # here to intercept callers who directly operate on these fileno + # objects. + sys.stdout = CallbackStream( + self.addStdout, self.addStdoutBytes, orig=sys.stdout + ) + sys.stderr = CallbackStream( + self.addStderr, self.addStderrBytes, orig=sys.stderr + ) + self._current_test = test + self._test_start_time = time.time() + self._current_status = TestStatus.ABORTED + self._messages = [] + self._stacktrace = None + self._stdout = "" + self._stderr = "" + self._start_instr_count = self._instr_counter.read() + + def _find_next_test(self, suite): + """ + Find the next test that has not been run. + """ + + for test in suite: + + # We identify test suites by test that are iterable (as is done in + # the builtin python test harness). If we see one, recurse on it. + if hasattr(test, "__iter__"): + test = self._find_next_test(test) + + # The builtin python test harness sets test references to `None` + # after they have run, so we know we've found the next test up + # if it's not `None`. + if test is not None: + return test + + def stopTest(self, test): + sys.stdout = self._saved_stdout + sys.stderr = self._saved_stderr + + super(BuckTestResult, self).stopTest(test) + + # If a failure occurred during module/class setup, then this "test" may + # actually be a `_ErrorHolder`, which doesn't contain explicit info + # about the upcoming test. Since we really only care about the test + # name field (i.e. `_testMethodName`), we use that to detect an actual + # test cases, and fall back to looking the test up from the suite + # otherwise. + if not hasattr(test, "_testMethodName"): + test = self._find_next_test(self._suite) + + result = { + "testCaseName": "{0}.{1}".format( + test.__class__.__module__, test.__class__.__name__ + ), + "testCase": test._testMethodName, + "type": self._current_status, + "time": int((time.time() - self._test_start_time) * 1000), + "message": os.linesep.join(self._messages), + "stacktrace": self._stacktrace, + "stdOut": self._stdout, + "stdErr": self._stderr, + } + + # TestPilot supports an instruction count field. + if "TEST_PILOT" in os.environ: + result["instrCount"] = ( + int(self._instr_counter.read() - self._start_instr_count), + ) + + self._results.append(result) + self._current_test = None + + def stopTestRun(self): + cov = self._main_program.get_coverage() + if cov is not None: + self._results.append({"coverage": cov}) + + @contextlib.contextmanager + def _withTest(self, test): + self.startTest(test) + yield + self.stopTest(test) + + def _setStatus(self, test, status, message=None, stacktrace=None): + assert test == self._current_test + self._current_status = status + self._stacktrace = stacktrace + if message is not None: + if message.endswith(os.linesep): + message = message[:-1] + self._messages.append(message) + + def setStatus(self, test, status, message=None, stacktrace=None): + # addError() may be called outside of a test if one of the shared + # fixtures (setUpClass/tearDownClass/setUpModule/tearDownModule) + # throws an error. + # + # In this case, create a fake test result to record the error. + if self._current_test is None: + with self._withTest(test): + self._setStatus(test, status, message, stacktrace) + else: + self._setStatus(test, status, message, stacktrace) + + def setException(self, test, status, excinfo): + exctype, value, tb = excinfo + self.setStatus( + test, + status, + "{0}: {1}".format(exctype.__name__, value), + "".join(traceback.format_tb(tb)), + ) + + def addSuccess(self, test): + super(BuckTestResult, self).addSuccess(test) + self.setStatus(test, TestStatus.PASSED) + + def addError(self, test, err): + super(BuckTestResult, self).addError(test, err) + self.setException(test, TestStatus.ABORTED, err) + + def addFailure(self, test, err): + super(BuckTestResult, self).addFailure(test, err) + self.setException(test, TestStatus.FAILED, err) + + def addSkip(self, test, reason): + super(BuckTestResult, self).addSkip(test, reason) + self.setStatus(test, TestStatus.SKIPPED, "Skipped: %s" % (reason,)) + + def addExpectedFailure(self, test, err): + super(BuckTestResult, self).addExpectedFailure(test, err) + self.setException(test, TestStatus.EXPECTED_FAILURE, err) + + def addUnexpectedSuccess(self, test): + super(BuckTestResult, self).addUnexpectedSuccess(test) + self.setStatus(test, TestStatus.UNEXPECTED_SUCCESS, "Unexpected success") + + def addStdout(self, val): + self._stdout += val + if self._show_output: + self._saved_stdout.write(val) + self._saved_stdout.flush() + + def addStdoutBytes(self, val): + string = val.decode("utf-8", errors="backslashreplace") + self.addStdout(string) + + def addStderr(self, val): + self._stderr += val + if self._show_output: + self._saved_stderr.write(val) + self._saved_stderr.flush() + + def addStderrBytes(self, val): + string = val.decode("utf-8", errors="backslashreplace") + self.addStderr(string) + + +class BuckTestRunner(unittest.TextTestRunner): + def __init__(self, main_program, suite, show_output=True, **kwargs): + super(BuckTestRunner, self).__init__(**kwargs) + self.show_output = show_output + self._main_program = main_program + self._suite = suite + + def _makeResult(self): + return BuckTestResult( + self.stream, + self.descriptions, + self.verbosity, + self.show_output, + self._main_program, + self._suite, + ) + + +def _format_test_name(test_class, attrname): + return "{0}.{1}.{2}".format(test_class.__module__, test_class.__name__, attrname) + + +class StderrLogHandler(logging.StreamHandler): + """ + This class is very similar to logging.StreamHandler, except that it + always uses the current sys.stderr object. + + StreamHandler caches the current sys.stderr object when it is constructed. + This makes it behave poorly in unit tests, which may replace sys.stderr + with a StringIO buffer during tests. The StreamHandler will continue using + the old sys.stderr object instead of the desired StringIO buffer. + """ + + def __init__(self): + logging.Handler.__init__(self) + + @property + def stream(self): + return sys.stderr + + +class RegexTestLoader(unittest.TestLoader): + def __init__(self, regex=None): + self.regex = regex + super(RegexTestLoader, self).__init__() + + def getTestCaseNames(self, testCaseClass): + """ + Return a sorted sequence of method names found within testCaseClass + """ + + testFnNames = super(RegexTestLoader, self).getTestCaseNames(testCaseClass) + if self.regex is None: + return testFnNames + robj = re.compile(self.regex) + matched = [] + for attrname in testFnNames: + fullname = _format_test_name(testCaseClass, attrname) + if robj.search(fullname): + matched.append(attrname) + return matched + + +class Loader(object): + + suiteClass = unittest.TestSuite + + def __init__(self, modules, regex=None): + self.modules = modules + self.regex = regex + + def load_all(self): + loader = RegexTestLoader(self.regex) + test_suite = self.suiteClass() + for module_name in self.modules: + __import__(module_name, level=0) + module = sys.modules[module_name] + module_suite = loader.loadTestsFromModule(module) + test_suite.addTest(module_suite) + return test_suite + + def load_args(self, args): + loader = RegexTestLoader(self.regex) + + suites = [] + for arg in args: + suite = loader.loadTestsFromName(arg) + # loadTestsFromName() can only process names that refer to + # individual test functions or modules. It can't process package + # names. If there were no module/function matches, check to see if + # this looks like a package name. + if suite.countTestCases() != 0: + suites.append(suite) + continue + + # Load all modules whose name is . + prefix = arg + "." + for module in self.modules: + if module.startswith(prefix): + suite = loader.loadTestsFromName(module) + suites.append(suite) + + return loader.suiteClass(suites) + + +_COVERAGE_INI = """\ +[report] +exclude_lines = + pragma: no cover + pragma: nocover + pragma:.*no${PLATFORM} + pragma:.*no${PY_IMPL}${PY_MAJOR}${PY_MINOR} + pragma:.*no${PY_IMPL}${PY_MAJOR} + pragma:.*nopy${PY_MAJOR} + pragma:.*nopy${PY_MAJOR}${PY_MINOR} +""" + + +class MainProgram(object): + """ + This class implements the main program. It can be subclassed by + users who wish to customize some parts of the main program. + (Adding additional command line options, customizing test loading, etc.) + """ + + DEFAULT_VERBOSITY = 2 + + def __init__(self, argv): + self.init_option_parser() + self.parse_options(argv) + self.setup_logging() + + def init_option_parser(self): + usage = "%prog [options] [TEST] ..." + op = optparse.OptionParser(usage=usage, add_help_option=False) + self.option_parser = op + + op.add_option( + "--hide-output", + dest="show_output", + action="store_false", + default=True, + help="Suppress data that tests print to stdout/stderr, and only " + "show it if the test fails.", + ) + op.add_option( + "-o", + "--output", + help="Write results to a file in a JSON format to be read by Buck", + ) + op.add_option( + "-f", + "--failfast", + action="store_true", + default=False, + help="Stop after the first failure", + ) + op.add_option( + "-l", + "--list-tests", + action="store_true", + dest="list", + default=False, + help="List tests and exit", + ) + op.add_option( + "-r", + "--regex", + default=None, + help="Regex to apply to tests, to only run those tests", + ) + op.add_option( + "--collect-coverage", + action="store_true", + default=False, + help="Collect test coverage information", + ) + op.add_option( + "--coverage-include", + default="*", + help='File globs to include in converage (split by ",")', + ) + op.add_option( + "--coverage-omit", + default="", + help='File globs to omit from converage (split by ",")', + ) + op.add_option( + "--logger", + action="append", + metavar="=", + default=[], + help="Configure log levels for specific logger categories", + ) + op.add_option( + "-q", + "--quiet", + action="count", + default=0, + help="Decrease the verbosity (may be specified multiple times)", + ) + op.add_option( + "-v", + "--verbosity", + action="count", + default=self.DEFAULT_VERBOSITY, + help="Increase the verbosity (may be specified multiple times)", + ) + op.add_option( + "-?", "--help", action="help", help="Show this help message and exit" + ) + + def parse_options(self, argv): + self.options, self.test_args = self.option_parser.parse_args(argv[1:]) + self.options.verbosity -= self.options.quiet + + if self.options.collect_coverage and coverage is None: + self.option_parser.error("coverage module is not available") + self.options.coverage_include = self.options.coverage_include.split(",") + if self.options.coverage_omit == "": + self.options.coverage_omit = [] + else: + self.options.coverage_omit = self.options.coverage_omit.split(",") + + def setup_logging(self): + # Configure the root logger to log at INFO level. + # This is similar to logging.basicConfig(), but uses our + # StderrLogHandler instead of a StreamHandler. + fmt = logging.Formatter("%(pathname)s:%(lineno)s: %(message)s") + log_handler = StderrLogHandler() + log_handler.setFormatter(fmt) + root_logger = logging.getLogger() + root_logger.addHandler(log_handler) + root_logger.setLevel(logging.INFO) + + level_names = { + "debug": logging.DEBUG, + "info": logging.INFO, + "warn": logging.WARNING, + "warning": logging.WARNING, + "error": logging.ERROR, + "critical": logging.CRITICAL, + "fatal": logging.FATAL, + } + + for value in self.options.logger: + parts = value.rsplit("=", 1) + if len(parts) != 2: + self.option_parser.error( + "--logger argument must be of the " + "form =: %s" % value + ) + name = parts[0] + level_name = parts[1].lower() + level = level_names.get(level_name) + if level is None: + self.option_parser.error( + "invalid log level %r for log " "category %s" % (parts[1], name) + ) + logging.getLogger(name).setLevel(level) + + def create_loader(self): + import __test_modules__ + + return Loader(__test_modules__.TEST_MODULES, self.options.regex) + + def load_tests(self): + loader = self.create_loader() + if self.options.collect_coverage: + self.start_coverage() + include = self.options.coverage_include + omit = self.options.coverage_omit + if include and "*" not in include: + optimize_for_coverage(self.cov, include, omit) + + if self.test_args: + suite = loader.load_args(self.test_args) + else: + suite = loader.load_all() + if self.options.collect_coverage: + self.cov.start() + return suite + + def get_tests(self, test_suite): + tests = [] + + for test in test_suite: + if isinstance(test, unittest.TestSuite): + tests.extend(self.get_tests(test)) + else: + tests.append(test) + + return tests + + def run(self): + test_suite = self.load_tests() + + if self.options.list: + for test in self.get_tests(test_suite): + method_name = getattr(test, "_testMethodName", "") + name = _format_test_name(test.__class__, method_name) + print(name) + return EXIT_CODE_SUCCESS + else: + result = self.run_tests(test_suite) + if self.options.output is not None: + with open(self.options.output, "w") as f: + json.dump(result.getResults(), f, indent=4, sort_keys=True) + if not result.wasSuccessful(): + return EXIT_CODE_TEST_FAILURE + return EXIT_CODE_SUCCESS + + def run_tests(self, test_suite): + # Install a signal handler to catch Ctrl-C and display the results + # (but only if running >2.6). + if sys.version_info[0] > 2 or sys.version_info[1] > 6: + unittest.installHandler() + + # Run the tests + runner = BuckTestRunner( + self, + test_suite, + verbosity=self.options.verbosity, + show_output=self.options.show_output, + ) + result = runner.run(test_suite) + + if self.options.collect_coverage and self.options.show_output: + self.cov.stop() + try: + self.cov.report(file=sys.stdout) + except coverage.misc.CoverageException: + print("No lines were covered, potentially restricted by file filters") + + return result + + def get_abbr_impl(self): + """Return abbreviated implementation name.""" + impl = platform.python_implementation() + if impl == "PyPy": + return "pp" + elif impl == "Jython": + return "jy" + elif impl == "IronPython": + return "ip" + elif impl == "CPython": + return "cp" + else: + raise RuntimeError("unknown python runtime") + + def start_coverage(self): + if not self.options.collect_coverage: + return + + with tempfile.NamedTemporaryFile("w", delete=False) as coverage_ini: + coverage_ini.write(_COVERAGE_INI) + self._coverage_ini_path = coverage_ini.name + + # Keep the original working dir in case tests use os.chdir + self._original_working_dir = os.getcwd() + + # for coverage config ignores by platform/python version + os.environ["PLATFORM"] = sys.platform + os.environ["PY_IMPL"] = self.get_abbr_impl() + os.environ["PY_MAJOR"] = str(sys.version_info.major) + os.environ["PY_MINOR"] = str(sys.version_info.minor) + + self.cov = coverage.Coverage( + include=self.options.coverage_include, + omit=self.options.coverage_omit, + config_file=coverage_ini.name, + ) + self.cov.erase() + self.cov.start() + + def get_coverage(self): + if not self.options.collect_coverage: + return None + + try: + os.remove(self._coverage_ini_path) + except OSError: + pass # Better to litter than to fail the test + + # Switch back to the original working directory. + os.chdir(self._original_working_dir) + + result = {} + + self.cov.stop() + + try: + f = StringIO() + self.cov.report(file=f) + lines = f.getvalue().split("\n") + except coverage.misc.CoverageException: + # Nothing was covered. That's fine by us + return result + + # N.B.: the format of the coverage library's output differs + # depending on whether one or more files are in the results + for line in lines[2:]: + if line.strip("-") == "": + break + r = line.split()[0] + analysis = self.cov.analysis2(r) + covString = self.convert_to_diff_cov_str(analysis) + if covString: + result[r] = covString + + return result + + def convert_to_diff_cov_str(self, analysis): + # Info on the format of analysis: + # http://nedbatchelder.com/code/coverage/api.html + if not analysis: + return None + numLines = max( + analysis[1][-1] if len(analysis[1]) else 0, + analysis[2][-1] if len(analysis[2]) else 0, + analysis[3][-1] if len(analysis[3]) else 0, + ) + lines = ["N"] * numLines + for l in analysis[1]: + lines[l - 1] = "C" + for l in analysis[2]: + lines[l - 1] = "X" + for l in analysis[3]: + lines[l - 1] = "U" + return "".join(lines) + + +def main(argv): + return MainProgram(sys.argv).run() + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/build/fbcode_builder/CMake/fb_py_win_main.c b/build/fbcode_builder/CMake/fb_py_win_main.c new file mode 100644 index 000000000..85a95b315 --- /dev/null +++ b/build/fbcode_builder/CMake/fb_py_win_main.c @@ -0,0 +1,140 @@ +// Copyright (c) Facebook, Inc. and its affiliates. + +#define WIN32_LEAN_AND_MEAN + +#include +#include +#include + +#define PATH_SIZE 32768 + +typedef int (*Py_Main)(int, wchar_t**); + +// Add the given path to Windows's DLL search path. +// For Windows DLL search path resolution, see: +// https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-search-order +void add_search_path(const wchar_t* path) { + wchar_t buffer[PATH_SIZE]; + wchar_t** lppPart = NULL; + + if (!GetFullPathNameW(path, PATH_SIZE, buffer, lppPart)) { + fwprintf( + stderr, + L"warning: %d unable to expand path %s\n", + GetLastError(), + path); + return; + } + + if (!AddDllDirectory(buffer)) { + DWORD error = GetLastError(); + if (error != ERROR_FILE_NOT_FOUND) { + fwprintf( + stderr, + L"warning: %d unable to set DLL search path for %s\n", + GetLastError(), + path); + } + } +} + +int locate_py_main(int argc, wchar_t** argv) { + /* + * We have to dynamically locate Python3.dll because we may be loading a + * Python native module while running. If that module is built with a + * different Python version, we will end up a DLL import error. To resolve + * this, we can either ship an embedded version of Python with us or + * dynamically look up existing Python distribution installed on user's + * machine. This way, we should be able to get a consistent version of + * Python3.dll and .pyd modules. + */ + HINSTANCE python_dll; + Py_Main pymain; + + // last added directory has highest priority + add_search_path(L"C:\\Python36\\"); + add_search_path(L"C:\\tools\\fb-python\\fb-python36\\"); + add_search_path(L"C:\\Python37\\"); + add_search_path(L"C:\\tools\\fb-python\\fb-python37\\"); + add_search_path(L"C:\\Python38\\"); + add_search_path(L"C:\\tools\\fb-python\\fb-python38\\"); + // TODO(T123615656): Re-enable Python 3.9 after the fix + // add_search_path(L"C:\\tools\\fb-python\\fb-python39\\"); + + python_dll = + LoadLibraryExW(L"python3.dll", NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS); + + int returncode = 0; + if (python_dll != NULL) { + pymain = (Py_Main)GetProcAddress(python_dll, "Py_Main"); + + if (pymain != NULL) { + returncode = (pymain)(argc, argv); + } else { + fprintf(stderr, "error: %d unable to load Py_Main\n", GetLastError()); + } + + FreeLibrary(python_dll); + } else { + fprintf(stderr, "error: %d unable to locate python3.dll\n", GetLastError()); + return 1; + } + return returncode; +} + +int wmain() { + /* + * This executable will be prepended to the start of a Python ZIP archive. + * Python will be able to directly execute the ZIP archive, so we simply + * need to tell Py_Main() to run our own file. Duplicate the argument list + * and add our file name to the beginning to tell Python what file to invoke. + */ + wchar_t** pyargv = malloc(sizeof(wchar_t*) * (__argc + 1)); + if (!pyargv) { + fprintf(stderr, "error: failed to allocate argument vector\n"); + return 1; + } + + /* Py_Main wants the wide character version of the argv so we pull those + * values from the global __wargv array that has been prepared by MSVCRT. + * + * In order for the zipapp to run we need to insert an extra argument in + * the front of the argument vector that points to ourselves. + * + * An additional complication is that, depending on who prepared the argument + * string used to start our process, the computed __wargv[0] can be a simple + * shell word like `watchman-wait` which is normally resolved together with + * the PATH by the shell. + * That unresolved path isn't sufficient to start the zipapp on windows; + * we need the fully qualified path. + * + * Given: + * __wargv == {"watchman-wait", "-h"} + * + * we want to pass the following to Py_Main: + * + * { + * "z:\build\watchman\python\watchman-wait.exe", + * "z:\build\watchman\python\watchman-wait.exe", + * "-h" + * } + */ + wchar_t full_path_to_argv0[PATH_SIZE]; + DWORD len = GetModuleFileNameW(NULL, full_path_to_argv0, PATH_SIZE); + if (len == 0 || + len == PATH_SIZE && GetLastError() == ERROR_INSUFFICIENT_BUFFER) { + fprintf( + stderr, + "error: %d while retrieving full path to this executable\n", + GetLastError()); + return 1; + } + + for (int n = 1; n < __argc; ++n) { + pyargv[n + 1] = __wargv[n]; + } + pyargv[0] = full_path_to_argv0; + pyargv[1] = full_path_to_argv0; + + return locate_py_main(__argc + 1, pyargv); +} diff --git a/build/fbcode_builder/CMake/make_fbpy_archive.py b/build/fbcode_builder/CMake/make_fbpy_archive.py new file mode 100755 index 000000000..3724feb21 --- /dev/null +++ b/build/fbcode_builder/CMake/make_fbpy_archive.py @@ -0,0 +1,327 @@ +#!/usr/bin/env python3 +# +# Copyright (c) Facebook, Inc. and its affiliates. +# +import argparse +import collections +import errno +import os +import shutil +import sys +import tempfile +import zipapp + +MANIFEST_SEPARATOR = " :: " +MANIFEST_HEADER_V1 = "FBPY_MANIFEST 1\n" + + +class UsageError(Exception): + def __init__(self, message): + self.message = message + + def __str__(self): + return self.message + + +class BadManifestError(UsageError): + def __init__(self, path, line_num, message): + full_msg = "%s:%s: %s" % (path, line_num, message) + super().__init__(full_msg) + self.path = path + self.line_num = line_num + self.raw_message = message + + +PathInfo = collections.namedtuple( + "PathInfo", ("src", "dest", "manifest_path", "manifest_line") +) + + +def parse_manifest(manifest, path_map): + bad_prefix = ".." + os.path.sep + manifest_dir = os.path.dirname(manifest) + with open(manifest, "r") as f: + line_num = 1 + line = f.readline() + if line != MANIFEST_HEADER_V1: + raise BadManifestError( + manifest, line_num, "Unexpected manifest file header" + ) + + for line in f: + line_num += 1 + if line.startswith("#"): + continue + line = line.rstrip("\n") + parts = line.split(MANIFEST_SEPARATOR) + if len(parts) != 2: + msg = "line must be of the form SRC %s DEST" % MANIFEST_SEPARATOR + raise BadManifestError(manifest, line_num, msg) + src, dest = parts + dest = os.path.normpath(dest) + if dest.startswith(bad_prefix): + msg = "destination path starts with %s: %s" % (bad_prefix, dest) + raise BadManifestError(manifest, line_num, msg) + + if not os.path.isabs(src): + src = os.path.normpath(os.path.join(manifest_dir, src)) + + if dest in path_map: + prev_info = path_map[dest] + msg = ( + "multiple source paths specified for destination " + "path %s. Previous source was %s from %s:%s" + % ( + dest, + prev_info.src, + prev_info.manifest_path, + prev_info.manifest_line, + ) + ) + raise BadManifestError(manifest, line_num, msg) + + info = PathInfo( + src=src, + dest=dest, + manifest_path=manifest, + manifest_line=line_num, + ) + path_map[dest] = info + + +def populate_install_tree(inst_dir, path_map): + os.mkdir(inst_dir) + dest_dirs = {"": False} + + def make_dest_dir(path): + if path in dest_dirs: + return + parent = os.path.dirname(path) + make_dest_dir(parent) + abs_path = os.path.join(inst_dir, path) + os.mkdir(abs_path) + dest_dirs[path] = False + + def install_file(info): + dir_name, base_name = os.path.split(info.dest) + make_dest_dir(dir_name) + if base_name == "__init__.py": + dest_dirs[dir_name] = True + abs_dest = os.path.join(inst_dir, info.dest) + shutil.copy2(info.src, abs_dest) + + # Copy all of the destination files + for info in path_map.values(): + install_file(info) + + # Create __init__ files in any directories that don't have them. + for dir_path, has_init in dest_dirs.items(): + if has_init: + continue + init_path = os.path.join(inst_dir, dir_path, "__init__.py") + with open(init_path, "w"): + pass + + +def build_zipapp(args, path_map): + """Create a self executing python binary using Python 3's built-in + zipapp module. + + This type of Python binary is relatively simple, as zipapp is part of the + standard library, but it does not support native language extensions + (.so/.dll files). + """ + dest_dir = os.path.dirname(args.output) + with tempfile.TemporaryDirectory(prefix="make_fbpy.", dir=dest_dir) as tmpdir: + inst_dir = os.path.join(tmpdir, "tree") + populate_install_tree(inst_dir, path_map) + + tmp_output = os.path.join(tmpdir, "output.exe") + zipapp.create_archive( + inst_dir, target=tmp_output, interpreter=args.python, main=args.main + ) + os.replace(tmp_output, args.output) + + +def create_main_module(args, inst_dir, path_map): + if not args.main: + assert "__main__.py" in path_map + return + + dest_path = os.path.join(inst_dir, "__main__.py") + main_module, main_fn = args.main.split(":") + main_contents = """\ +#!{python} + +if __name__ == "__main__": + import {main_module} + {main_module}.{main_fn}() +""".format( + python=args.python, main_module=main_module, main_fn=main_fn + ) + with open(dest_path, "w") as f: + f.write(main_contents) + os.chmod(dest_path, 0o755) + + +def build_install_dir(args, path_map): + """Create a directory that contains all of the sources, with a __main__ + module to run the program. + """ + # Populate a temporary directory first, then rename to the destination + # location. This ensures that we don't ever leave a halfway-built + # directory behind at the output path if something goes wrong. + dest_dir = os.path.dirname(args.output) + with tempfile.TemporaryDirectory(prefix="make_fbpy.", dir=dest_dir) as tmpdir: + inst_dir = os.path.join(tmpdir, "tree") + populate_install_tree(inst_dir, path_map) + create_main_module(args, inst_dir, path_map) + os.rename(inst_dir, args.output) + + +def ensure_directory(path): + try: + os.makedirs(path) + except OSError as ex: + if ex.errno != errno.EEXIST: + raise + + +def install_library(args, path_map): + """Create an installation directory a python library.""" + out_dir = args.output + out_manifest = args.output + ".manifest" + + install_dir = args.install_dir + if not install_dir: + install_dir = out_dir + + os.makedirs(out_dir) + with open(out_manifest, "w") as manifest: + manifest.write(MANIFEST_HEADER_V1) + for info in path_map.values(): + abs_dest = os.path.join(out_dir, info.dest) + ensure_directory(os.path.dirname(abs_dest)) + print("copy %r --> %r" % (info.src, abs_dest)) + shutil.copy2(info.src, abs_dest) + installed_dest = os.path.join(install_dir, info.dest) + manifest.write("%s%s%s\n" % (installed_dest, MANIFEST_SEPARATOR, info.dest)) + + +def parse_manifests(args): + # Process args.manifest_separator to help support older versions of CMake + if args.manifest_separator: + manifests = [] + for manifest_arg in args.manifests: + split_arg = manifest_arg.split(args.manifest_separator) + manifests.extend(split_arg) + args.manifests = manifests + + path_map = {} + for manifest in args.manifests: + parse_manifest(manifest, path_map) + + return path_map + + +def check_main_module(args, path_map): + # Translate an empty string in the --main argument to None, + # just to allow the CMake logic to be slightly simpler and pass in an + # empty string when it really wants the default __main__.py module to be + # used. + if args.main == "": + args.main = None + + if args.type == "lib-install": + if args.main is not None: + raise UsageError("cannot specify a --main argument with --type=lib-install") + return + + main_info = path_map.get("__main__.py") + if args.main: + if main_info is not None: + msg = ( + "specified an explicit main module with --main, " + "but the file listing already includes __main__.py" + ) + raise BadManifestError( + main_info.manifest_path, main_info.manifest_line, msg + ) + parts = args.main.split(":") + if len(parts) != 2: + raise UsageError( + "argument to --main must be of the form MODULE:CALLABLE " + "(received %s)" % (args.main,) + ) + else: + if main_info is None: + raise UsageError( + "no main module specified with --main, " + "and no __main__.py module present" + ) + + +BUILD_TYPES = { + "zipapp": build_zipapp, + "dir": build_install_dir, + "lib-install": install_library, +} + + +def main(): + ap = argparse.ArgumentParser() + ap.add_argument("-o", "--output", required=True, help="The output file path") + ap.add_argument( + "--install-dir", + help="When used with --type=lib-install, this parameter specifies the " + "final location where the library where be installed. This can be " + "used to generate the library in one directory first, when you plan " + "to move or copy it to another final location later.", + ) + ap.add_argument( + "--manifest-separator", + help="Split manifest arguments around this separator. This is used " + "to support older versions of CMake that cannot supply the manifests " + "as separate arguments.", + ) + ap.add_argument( + "--main", + help="The main module to run, specified as :. " + "This must be specified if and only if the archive does not contain " + "a __main__.py file.", + ) + ap.add_argument( + "--python", + help="Explicitly specify the python interpreter to use for the " "executable.", + ) + ap.add_argument( + "--type", choices=BUILD_TYPES.keys(), help="The type of output to build." + ) + ap.add_argument( + "manifests", + nargs="+", + help="The manifest files specifying how to construct the archive", + ) + args = ap.parse_args() + + if args.python is None: + args.python = sys.executable + + if args.type is None: + # In the future we might want different default output types + # for different platforms. + args.type = "zipapp" + build_fn = BUILD_TYPES[args.type] + + try: + path_map = parse_manifests(args) + check_main_module(args, path_map) + except UsageError as ex: + print("error: %s" % (ex,), file=sys.stderr) + sys.exit(1) + + build_fn(args, path_map) + + +if __name__ == "__main__": + main() diff --git a/build/fbcode_builder/LICENSE b/build/fbcode_builder/LICENSE new file mode 100644 index 000000000..b96dcb048 --- /dev/null +++ b/build/fbcode_builder/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Facebook, Inc. and its affiliates. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/build/fbcode_builder/README.md b/build/fbcode_builder/README.md new file mode 100644 index 000000000..d47dd41c0 --- /dev/null +++ b/build/fbcode_builder/README.md @@ -0,0 +1,43 @@ +# Easy builds for Facebook projects + +This directory contains tools designed to simplify continuous-integration +(and other builds) of Facebook open source projects. In particular, this helps +manage builds for cross-project dependencies. + +The main entry point is the `getdeps.py` script. This script has several +subcommands, but the most notable is the `build` command. This will download +and build all dependencies for a project, and then build the project itself. + +## Deployment + +This directory is copied literally into a number of different Facebook open +source repositories. Any change made to code in this directory will be +automatically be replicated by our open source tooling into all GitHub hosted +repositories that use `fbcode_builder`. Typically this directory is copied +into the open source repositories as `build/fbcode_builder/`. + + +# Project Configuration Files + +The `manifests` subdirectory contains configuration files for many different +projects, describing how to build each project. These files also list +dependencies between projects, enabling `getdeps.py` to build all dependencies +for a project before building the project itself. + + +# Shared CMake utilities + +Since this directory is copied into many Facebook open source repositories, +it is also used to help share some CMake utility files across projects. The +`CMake/` subdirectory contains a number of `.cmake` files that are shared by +the CMake-based build systems across several different projects. + + +# Older Build Scripts + +This directory also still contains a handful of older build scripts that +pre-date the current `getdeps.py` build system. Most of the other `.py` files +in this top directory, apart from `getdeps.py` itself, are from this older +build system. This older system is only used by a few remaining projects, and +new projects should generally use the newer `getdeps.py` script, by adding a +new configuration file in the `manifests/` subdirectory. diff --git a/build/fbcode_builder/getdeps.py b/build/fbcode_builder/getdeps.py new file mode 100755 index 000000000..dfb028a94 --- /dev/null +++ b/build/fbcode_builder/getdeps.py @@ -0,0 +1,1422 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import argparse +import json +import os +import shutil +import subprocess +import sys +import tarfile +import tempfile + +# We don't import cache.create_cache directly as the facebook +# specific import below may monkey patch it, and we want to +# observe the patched version of this function! +import getdeps.cache as cache_module +from getdeps.buildopts import setup_build_options +from getdeps.dyndeps import create_dyn_dep_munger +from getdeps.errors import TransientFailure +from getdeps.fetcher import ( + file_name_is_cmake_file, + list_files_under_dir_newer_than_timestamp, + SystemPackageFetcher, +) +from getdeps.load import ManifestLoader +from getdeps.manifest import ManifestParser +from getdeps.platform import HostType +from getdeps.runcmd import run_cmd +from getdeps.subcmd import add_subcommands, cmd, SubCmd + +try: + import getdeps.facebook # noqa: F401 +except ImportError: + # we don't ship the facebook specific subdir, + # so allow that to fail silently + pass + + +sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "getdeps")) + + +class UsageError(Exception): + pass + + +@cmd("validate-manifest", "parse a manifest and validate that it is correct") +class ValidateManifest(SubCmd): + def run(self, args): + try: + ManifestParser(file_name=args.file_name) + print("OK", file=sys.stderr) + return 0 + except Exception as exc: + print("ERROR: %s" % str(exc), file=sys.stderr) + return 1 + + def setup_parser(self, parser): + parser.add_argument("file_name", help="path to the manifest file") + + +@cmd("show-host-type", "outputs the host type tuple for the host machine") +class ShowHostType(SubCmd): + def run(self, args): + host = HostType() + print("%s" % host.as_tuple_string()) + return 0 + + +class ProjectCmdBase(SubCmd): + def run(self, args): + opts = setup_build_options(args) + + if args.current_project is not None: + opts.repo_project = args.current_project + if args.project is None: + if opts.repo_project is None: + raise UsageError( + "no project name specified, and no .projectid file found" + ) + if opts.repo_project == "fbsource": + # The fbsource repository is a little special. There is no project + # manifest file for it. A specific project must always be explicitly + # specified when building from fbsource. + raise UsageError( + "no project name specified (required when building in fbsource)" + ) + args.project = opts.repo_project + + ctx_gen = opts.get_context_generator() + if args.test_dependencies: + ctx_gen.set_value_for_all_projects("test", "on") + if args.enable_tests: + ctx_gen.set_value_for_project(args.project, "test", "on") + else: + ctx_gen.set_value_for_project(args.project, "test", "off") + + if opts.shared_libs: + ctx_gen.set_value_for_all_projects("shared_libs", "on") + + loader = ManifestLoader(opts, ctx_gen) + self.process_project_dir_arguments(args, loader) + + manifest = loader.load_manifest(args.project) + + self.run_project_cmd(args, loader, manifest) + + def process_project_dir_arguments(self, args, loader): + def parse_project_arg(arg, arg_type): + parts = arg.split(":") + if len(parts) == 2: + project, path = parts + elif len(parts) == 1: + project = args.project + path = parts[0] + # On Windows path contains colon, e.g. C:\open + elif os.name == "nt" and len(parts) == 3: + project = parts[0] + path = parts[1] + ":" + parts[2] + else: + raise UsageError( + "invalid %s argument; too many ':' characters: %s" % (arg_type, arg) + ) + + return project, os.path.abspath(path) + + # If we are currently running from a project repository, + # use the current repository for the project sources. + build_opts = loader.build_opts + if build_opts.repo_project is not None and build_opts.repo_root is not None: + loader.set_project_src_dir(build_opts.repo_project, build_opts.repo_root) + + for arg in args.src_dir: + project, path = parse_project_arg(arg, "--src-dir") + loader.set_project_src_dir(project, path) + + for arg in args.build_dir: + project, path = parse_project_arg(arg, "--build-dir") + loader.set_project_build_dir(project, path) + + for arg in args.install_dir: + project, path = parse_project_arg(arg, "--install-dir") + loader.set_project_install_dir(project, path) + + for arg in args.project_install_prefix: + project, path = parse_project_arg(arg, "--install-prefix") + loader.set_project_install_prefix(project, path) + + def setup_parser(self, parser): + parser.add_argument( + "project", + nargs="?", + help=( + "name of the project or path to a manifest " + "file describing the project" + ), + ) + parser.add_argument( + "--no-tests", + action="store_false", + dest="enable_tests", + default=True, + help="Disable building tests for this project.", + ) + parser.add_argument( + "--test-dependencies", + action="store_true", + help="Enable building tests for dependencies as well.", + ) + parser.add_argument( + "--current-project", + help="Specify the name of the fbcode_builder manifest file for the " + "current repository. If not specified, the code will attempt to find " + "this in a .projectid file in the repository root.", + ) + parser.add_argument( + "--src-dir", + default=[], + action="append", + help="Specify a local directory to use for the project source, " + "rather than fetching it.", + ) + parser.add_argument( + "--build-dir", + default=[], + action="append", + help="Explicitly specify the build directory to use for the " + "project, instead of the default location in the scratch path. " + "This only affects the project specified, and not its dependencies.", + ) + parser.add_argument( + "--install-dir", + default=[], + action="append", + help="Explicitly specify the install directory to use for the " + "project, instead of the default location in the scratch path. " + "This only affects the project specified, and not its dependencies.", + ) + parser.add_argument( + "--project-install-prefix", + default=[], + action="append", + help="Specify the final deployment installation path for a project", + ) + + self.setup_project_cmd_parser(parser) + + def setup_project_cmd_parser(self, parser): + pass + + def create_builder(self, loader, manifest): + fetcher = loader.create_fetcher(manifest) + src_dir = fetcher.get_src_dir() + ctx = loader.ctx_gen.get_context(manifest.name) + build_dir = loader.get_project_build_dir(manifest) + inst_dir = loader.get_project_install_dir(manifest) + return manifest.create_builder( + loader.build_opts, + src_dir, + build_dir, + inst_dir, + ctx, + loader, + loader.dependencies_of(manifest), + ) + + def check_built(self, loader, manifest): + built_marker = os.path.join( + loader.get_project_install_dir(manifest), ".built-by-getdeps" + ) + return os.path.exists(built_marker) + + +class CachedProject(object): + """A helper that allows calling the cache logic for a project + from both the build and the fetch code""" + + def __init__(self, cache, loader, m): + self.m = m + self.inst_dir = loader.get_project_install_dir(m) + self.project_hash = loader.get_project_hash(m) + self.ctx = loader.ctx_gen.get_context(m.name) + self.loader = loader + self.cache = cache + + self.cache_file_name = "-".join( + ( + m.name, + self.ctx.get("os"), + self.ctx.get("distro") or "none", + self.ctx.get("distro_vers") or "none", + self.project_hash, + "buildcache.tgz", + ) + ) + + def is_cacheable(self): + """We only cache third party projects""" + return self.cache and self.m.shipit_project is None + + def was_cached(self): + cached_marker = os.path.join(self.inst_dir, ".getdeps-cached-build") + return os.path.exists(cached_marker) + + def download(self): + if self.is_cacheable() and not os.path.exists(self.inst_dir): + print("check cache for %s" % self.cache_file_name) + dl_dir = os.path.join(self.loader.build_opts.scratch_dir, "downloads") + if not os.path.exists(dl_dir): + os.makedirs(dl_dir) + try: + target_file_name = os.path.join(dl_dir, self.cache_file_name) + if self.cache.download_to_file(self.cache_file_name, target_file_name): + tf = tarfile.open(target_file_name, "r") + print( + "Extracting %s -> %s..." % (self.cache_file_name, self.inst_dir) + ) + tf.extractall(self.inst_dir) + + cached_marker = os.path.join(self.inst_dir, ".getdeps-cached-build") + with open(cached_marker, "w") as f: + f.write("\n") + + return True + except Exception as exc: + print("%s" % str(exc)) + + return False + + def upload(self): + if self.is_cacheable(): + # We can prepare an archive and stick it in LFS + tempdir = tempfile.mkdtemp() + tarfilename = os.path.join(tempdir, self.cache_file_name) + print("Archiving for cache: %s..." % tarfilename) + tf = tarfile.open(tarfilename, "w:gz") + tf.add(self.inst_dir, arcname=".") + tf.close() + try: + self.cache.upload_from_file(self.cache_file_name, tarfilename) + except Exception as exc: + print( + "Failed to upload to cache (%s), continue anyway" % str(exc), + file=sys.stderr, + ) + shutil.rmtree(tempdir) + + +@cmd("fetch", "fetch the code for a given project") +class FetchCmd(ProjectCmdBase): + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--recursive", + help="fetch the transitive deps also", + action="store_true", + default=False, + ) + parser.add_argument( + "--host-type", + help=( + "When recursively fetching, fetch deps for " + "this host type rather than the current system" + ), + ) + + def run_project_cmd(self, args, loader, manifest): + if args.recursive: + projects = loader.manifests_in_dependency_order() + else: + projects = [manifest] + + cache = cache_module.create_cache() + for m in projects: + cached_project = CachedProject(cache, loader, m) + if cached_project.download(): + continue + + inst_dir = loader.get_project_install_dir(m) + built_marker = os.path.join(inst_dir, ".built-by-getdeps") + if os.path.exists(built_marker): + with open(built_marker, "r") as f: + built_hash = f.read().strip() + + project_hash = loader.get_project_hash(m) + if built_hash == project_hash: + continue + + # We need to fetch the sources + fetcher = loader.create_fetcher(m) + fetcher.update() + + +@cmd("install-system-deps", "Install system packages to satisfy the deps for a project") +class InstallSysDepsCmd(ProjectCmdBase): + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--recursive", + help="install the transitive deps also", + action="store_true", + default=False, + ) + parser.add_argument( + "--dry-run", + action="store_true", + default=False, + help="Don't install, just print the commands specs we would run", + ) + parser.add_argument( + "--os-type", + help="Filter to just this OS type to run", + choices=["linux", "darwin", "windows", "pacman-package"], + action="store", + dest="ostype", + default=None, + ) + parser.add_argument( + "--distro", + help="Filter to just this distro to run", + choices=["ubuntu", "centos_stream"], + action="store", + dest="distro", + default=None, + ) + parser.add_argument( + "--distro-version", + help="Filter to just this distro version", + action="store", + dest="distrovers", + default=None, + ) + + def run_project_cmd(self, args, loader, manifest): + if args.recursive: + projects = loader.manifests_in_dependency_order() + else: + projects = [manifest] + + rebuild_ctx_gen = False + if args.ostype: + loader.build_opts.host_type.ostype = args.ostype + loader.build_opts.host_type.distro = None + loader.build_opts.host_type.distrovers = None + rebuild_ctx_gen = True + + if args.distro: + loader.build_opts.host_type.distro = args.distro + loader.build_opts.host_type.distrovers = None + rebuild_ctx_gen = True + + if args.distrovers: + loader.build_opts.host_type.distrovers = args.distrovers + rebuild_ctx_gen = True + + if rebuild_ctx_gen: + loader.ctx_gen = loader.build_opts.get_context_generator() + + manager = loader.build_opts.host_type.get_package_manager() + + all_packages = {} + for m in projects: + ctx = loader.ctx_gen.get_context(m.name) + packages = m.get_required_system_packages(ctx) + for k, v in packages.items(): + merged = all_packages.get(k, []) + merged += v + all_packages[k] = merged + + cmd_args = None + if manager == "rpm": + packages = sorted(set(all_packages["rpm"])) + if packages: + cmd_args = ["sudo", "dnf", "install", "-y"] + packages + elif manager == "deb": + packages = sorted(set(all_packages["deb"])) + if packages: + cmd_args = ["sudo", "apt", "install", "-y"] + packages + elif manager == "homebrew": + packages = sorted(set(all_packages["homebrew"])) + if packages: + cmd_args = ["brew", "install"] + packages + elif manager == "pacman-package": + packages = sorted(list(set(all_packages["pacman-package"]))) + if packages: + cmd_args = ["pacman", "-S"] + packages + else: + host_tuple = loader.build_opts.host_type.as_tuple_string() + print( + f"I don't know how to install any packages on this system {host_tuple}" + ) + return + + if cmd_args: + if args.dry_run: + print(" ".join(cmd_args)) + else: + run_cmd(cmd_args) + else: + print("no packages to install") + + +@cmd("list-deps", "lists the transitive deps for a given project") +class ListDepsCmd(ProjectCmdBase): + def run_project_cmd(self, args, loader, manifest): + for m in loader.manifests_in_dependency_order(): + print(m.name) + return 0 + + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--host-type", + help=( + "Produce the list for the specified host type, " + "rather than that of the current system" + ), + ) + + +def clean_dirs(opts): + for d in ["build", "installed", "extracted", "shipit"]: + d = os.path.join(opts.scratch_dir, d) + print("Cleaning %s..." % d) + if os.path.exists(d): + shutil.rmtree(d) + + +@cmd("clean", "clean up the scratch dir") +class CleanCmd(SubCmd): + def run(self, args): + opts = setup_build_options(args) + clean_dirs(opts) + + +@cmd("show-scratch-dir", "show the scratch dir") +class ShowScratchDirCmd(SubCmd): + def run(self, args): + opts = setup_build_options(args) + print(opts.scratch_dir) + + +@cmd("show-build-dir", "print the build dir for a given project") +class ShowBuildDirCmd(ProjectCmdBase): + def run_project_cmd(self, args, loader, manifest): + if args.recursive: + manifests = loader.manifests_in_dependency_order() + else: + manifests = [manifest] + + for m in manifests: + inst_dir = loader.get_project_build_dir(m) + print(inst_dir) + + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--recursive", + help="print the transitive deps also", + action="store_true", + default=False, + ) + + +@cmd("show-inst-dir", "print the installation dir for a given project") +class ShowInstDirCmd(ProjectCmdBase): + def run_project_cmd(self, args, loader, manifest): + if args.recursive: + manifests = loader.manifests_in_dependency_order() + else: + manifests = [manifest] + + for m in manifests: + fetcher = loader.create_fetcher(m) + if isinstance(fetcher, SystemPackageFetcher): + # We are guaranteed that if the fetcher is set to + # SystemPackageFetcher then this item is completely + # satisfied by the appropriate system packages + continue + inst_dir = loader.get_project_install_dir_respecting_install_prefix(m) + print(inst_dir) + + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--recursive", + help="print the transitive deps also", + action="store_true", + default=False, + ) + + +@cmd("show-source-dir", "print the source dir for a given project") +class ShowSourceDirCmd(ProjectCmdBase): + def run_project_cmd(self, args, loader, manifest): + if args.recursive: + manifests = loader.manifests_in_dependency_order() + else: + manifests = [manifest] + + for m in manifests: + fetcher = loader.create_fetcher(m) + print(fetcher.get_src_dir()) + + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--recursive", + help="print the transitive deps also", + action="store_true", + default=False, + ) + + +@cmd("build", "build a given project") +class BuildCmd(ProjectCmdBase): + def run_project_cmd(self, args, loader, manifest): + if args.clean: + clean_dirs(loader.build_opts) + + print("Building on %s" % loader.ctx_gen.get_context(args.project)) + projects = loader.manifests_in_dependency_order() + + cache = cache_module.create_cache() if args.use_build_cache else None + + dep_manifests = [] + + for m in projects: + dep_manifests.append(m) + + fetcher = loader.create_fetcher(m) + + if args.build_skip_lfs_download and hasattr(fetcher, "skip_lfs_download"): + print("skipping lfs download for %s" % m.name) + fetcher.skip_lfs_download() + + if isinstance(fetcher, SystemPackageFetcher): + # We are guaranteed that if the fetcher is set to + # SystemPackageFetcher then this item is completely + # satisfied by the appropriate system packages + continue + + if args.clean: + fetcher.clean() + + build_dir = loader.get_project_build_dir(m) + inst_dir = loader.get_project_install_dir(m) + + if ( + m == manifest + and not args.only_deps + or m != manifest + and not args.no_deps + ): + print("Assessing %s..." % m.name) + project_hash = loader.get_project_hash(m) + ctx = loader.ctx_gen.get_context(m.name) + built_marker = os.path.join(inst_dir, ".built-by-getdeps") + + cached_project = CachedProject(cache, loader, m) + + reconfigure, sources_changed = self.compute_source_change_status( + cached_project, fetcher, m, built_marker, project_hash + ) + + if os.path.exists(built_marker) and not cached_project.was_cached(): + # We've previously built this. We may need to reconfigure if + # our deps have changed, so let's check them. + dep_reconfigure, dep_build = self.compute_dep_change_status( + m, built_marker, loader + ) + if dep_reconfigure: + reconfigure = True + if dep_build: + sources_changed = True + + extra_cmake_defines = ( + json.loads(args.extra_cmake_defines) + if args.extra_cmake_defines + else {} + ) + + extra_b2_args = args.extra_b2_args or [] + + if sources_changed or reconfigure or not os.path.exists(built_marker): + if os.path.exists(built_marker): + os.unlink(built_marker) + src_dir = fetcher.get_src_dir() + # Prepare builders write out config before the main builder runs + prepare_builders = m.create_prepare_builders( + loader.build_opts, + ctx, + src_dir, + build_dir, + inst_dir, + loader, + dep_manifests, + ) + for preparer in prepare_builders: + preparer.prepare(reconfigure=reconfigure) + + builder = m.create_builder( + loader.build_opts, + src_dir, + build_dir, + inst_dir, + ctx, + loader, + dep_manifests, + final_install_prefix=loader.get_project_install_prefix(m), + extra_cmake_defines=extra_cmake_defines, + cmake_target=args.cmake_target if m == manifest else "install", + extra_b2_args=extra_b2_args, + ) + builder.build(reconfigure=reconfigure) + + # If we are building the project (not dependency) and a specific + # cmake_target (not 'install') has been requested, then we don't + # set the built_marker. This allows subsequent runs of getdeps.py + # for the project to run with different cmake_targets to trigger + # cmake + has_built_marker = False + if not (m == manifest and args.cmake_target != "install"): + with open(built_marker, "w") as f: + f.write(project_hash) + has_built_marker = True + + # Only populate the cache from continuous build runs, and + # only if we have a built_marker. + if ( + not args.skip_upload + and args.schedule_type == "continuous" + and has_built_marker + ): + cached_project.upload() + elif args.verbose: + print("found good %s" % built_marker) + + def compute_dep_change_status(self, m, built_marker, loader): + reconfigure = False + sources_changed = False + st = os.lstat(built_marker) + + ctx = loader.ctx_gen.get_context(m.name) + dep_list = m.get_dependencies(ctx) + for dep in dep_list: + if reconfigure and sources_changed: + break + + dep_manifest = loader.load_manifest(dep) + dep_root = loader.get_project_install_dir(dep_manifest) + for dep_file in list_files_under_dir_newer_than_timestamp( + dep_root, st.st_mtime + ): + if os.path.basename(dep_file) == ".built-by-getdeps": + continue + if file_name_is_cmake_file(dep_file): + if not reconfigure: + reconfigure = True + print( + f"Will reconfigure cmake because {dep_file} is newer than {built_marker}" + ) + else: + if not sources_changed: + sources_changed = True + print( + f"Will run build because {dep_file} is newer than {built_marker}" + ) + + if reconfigure and sources_changed: + break + + return reconfigure, sources_changed + + def compute_source_change_status( + self, cached_project, fetcher, m, built_marker, project_hash + ): + reconfigure = False + sources_changed = False + if cached_project.download(): + if not os.path.exists(built_marker): + fetcher.update() + else: + check_fetcher = True + if os.path.exists(built_marker): + check_fetcher = False + with open(built_marker, "r") as f: + built_hash = f.read().strip() + if built_hash == project_hash: + if cached_project.is_cacheable(): + # We can blindly trust the build status + reconfigure = False + sources_changed = False + else: + # Otherwise, we may have changed the source, so let's + # check in with the fetcher layer + check_fetcher = True + else: + # Some kind of inconsistency with a prior build, + # let's run it again to be sure + os.unlink(built_marker) + reconfigure = True + sources_changed = True + # While we don't need to consult the fetcher for the + # status in this case, we may still need to have eg: shipit + # run in order to have a correct source tree. + fetcher.update() + + if check_fetcher: + change_status = fetcher.update() + reconfigure = change_status.build_changed() + sources_changed = change_status.sources_changed() + + return reconfigure, sources_changed + + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--clean", + action="store_true", + default=False, + help=( + "Clean up the build and installation area prior to building, " + "causing the projects to be built from scratch" + ), + ) + parser.add_argument( + "--no-deps", + action="store_true", + default=False, + help=( + "Only build the named project, not its deps. " + "This is most useful after you've built all of the deps, " + "and helps to avoid waiting for relatively " + "slow up-to-date-ness checks" + ), + ) + parser.add_argument( + "--only-deps", + action="store_true", + default=False, + help=( + "Only build the named project's deps. " + "This is most useful when you want to separate out building " + "of all of the deps and your project" + ), + ) + parser.add_argument( + "--no-build-cache", + action="store_false", + default=True, + dest="use_build_cache", + help="Do not attempt to use the build cache.", + ) + parser.add_argument( + "--schedule-type", help="Indicates how the build was activated" + ) + parser.add_argument( + "--cmake-target", + help=("Target for cmake build."), + default="install", + ) + parser.add_argument( + "--extra-b2-args", + help=( + "Repeatable argument that contains extra arguments to pass " + "to b2, which compiles boost. " + "e.g.: 'cxxflags=-fPIC' 'cflags=-fPIC'" + ), + action="append", + ) + parser.add_argument( + "--free-up-disk", + help="Remove unused tools and clean up intermediate files if possible to maximise space for the build", + action="store_true", + default=False, + ) + parser.add_argument( + "--build-type", + help="Set the build type explicitly. Cmake and cargo builders act on them. Only Debug and RelWithDebInfo widely supported.", + choices=["Debug", "Release", "RelWithDebInfo", "MinSizeRel"], + action="store", + default=None, + ) + + +@cmd("fixup-dyn-deps", "Adjusts dynamic dependencies for packaging purposes") +class FixupDeps(ProjectCmdBase): + def run_project_cmd(self, args, loader, manifest): + projects = loader.manifests_in_dependency_order() + + # Accumulate the install directories so that the build steps + # can find their dep installation + install_dirs = [] + dep_manifests = [] + + for m in projects: + inst_dir = loader.get_project_install_dir_respecting_install_prefix(m) + install_dirs.append(inst_dir) + dep_manifests.append(m) + + if m == manifest: + ctx = loader.ctx_gen.get_context(m.name) + env = loader.build_opts.compute_env_for_install_dirs( + loader, dep_manifests, ctx + ) + dep_munger = create_dyn_dep_munger( + loader.build_opts, env, install_dirs, args.strip + ) + if dep_munger is None: + print(f"dynamic dependency fixups not supported on {sys.platform}") + else: + dep_munger.process_deps(args.destdir, args.final_install_prefix) + + def setup_project_cmd_parser(self, parser): + parser.add_argument("destdir", help="Where to copy the fixed up executables") + parser.add_argument( + "--final-install-prefix", help="specify the final installation prefix" + ) + parser.add_argument( + "--strip", + action="store_true", + default=False, + help="Strip debug info while processing executables", + ) + + +@cmd("test", "test a given project") +class TestCmd(ProjectCmdBase): + def run_project_cmd(self, args, loader, manifest): + if not self.check_built(loader, manifest): + print("project %s has not been built" % manifest.name) + return 1 + self.create_builder(loader, manifest).run_tests( + schedule_type=args.schedule_type, + owner=args.test_owner, + test_filter=args.filter, + retry=args.retry, + no_testpilot=args.no_testpilot, + ) + + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--schedule-type", help="Indicates how the build was activated" + ) + parser.add_argument("--test-owner", help="Owner for testpilot") + parser.add_argument("--filter", help="Only run the tests matching the regex") + parser.add_argument( + "--retry", + type=int, + default=3, + help="Number of immediate retries for failed tests " + "(noop in continuous and testwarden runs)", + ) + parser.add_argument( + "--no-testpilot", + help="Do not use Test Pilot even when available", + action="store_true", + ) + + +@cmd( + "debug", + "start a shell in the given project's build dir with the correct environment for running the build", +) +class DebugCmd(ProjectCmdBase): + def run_project_cmd(self, args, loader, manifest): + self.create_builder(loader, manifest).debug(reconfigure=False) + + +@cmd("generate-github-actions", "generate a GitHub actions configuration") +class GenerateGitHubActionsCmd(ProjectCmdBase): + RUN_ON_ALL = """ [push, pull_request]""" + + def run_project_cmd(self, args, loader, manifest): + platforms = [ + HostType("linux", "ubuntu", "22"), + HostType("darwin", None, None), + HostType("windows", None, None), + ] + + for p in platforms: + if args.os_types and p.ostype not in args.os_types: + continue + self.write_job_for_platform(p, args) + + def get_run_on(self, args): + if args.run_on_all_branches: + return self.RUN_ON_ALL + if args.cron: + return f""" + schedule: + - cron: '{args.cron}'""" + + return f""" + push: + branches: + - {args.main_branch} + pull_request: + branches: + - {args.main_branch}""" + + # TODO: Break up complex function + def write_job_for_platform(self, platform, args): # noqa: C901 + build_opts = setup_build_options(args, platform) + ctx_gen = build_opts.get_context_generator() + loader = ManifestLoader(build_opts, ctx_gen) + manifest = loader.load_manifest(args.project) + manifest_ctx = loader.ctx_gen.get_context(manifest.name) + run_on = self.get_run_on(args) + + # Some projects don't do anything "useful" as a leaf project, only + # as a dep for a leaf project. Check for those here; we don't want + # to waste the effort scheduling them on CI. + # We do this by looking at the builder type in the manifest file + # rather than creating a builder and checking its type because we + # don't know enough to create the full builder instance here. + builder_name = manifest.get("build", "builder", ctx=manifest_ctx) + if builder_name == "nop": + return None + + # We want to be sure that we're running things with python 3 + # but python versioning is honestly a bit of a frustrating mess. + # `python` may be version 2 or version 3 depending on the system. + # python3 may not be a thing at all! + # Assume an optimistic default + py3 = "python3" + + if build_opts.is_linux(): + artifacts = "linux" + runs_on = f"ubuntu-{args.ubuntu_version}" + elif build_opts.is_windows(): + artifacts = "windows" + runs_on = "windows-2019" + # The windows runners are python 3 by default; python2.exe + # is available if needed. + py3 = "python" + else: + artifacts = "mac" + runs_on = "macOS-latest" + + os.makedirs(args.output_dir, exist_ok=True) + + job_file_prefix = "getdeps_" + if args.job_file_prefix: + job_file_prefix = args.job_file_prefix + + output_file = os.path.join(args.output_dir, f"{job_file_prefix}{artifacts}.yml") + + if args.job_name_prefix: + job_name = args.job_name_prefix + artifacts.capitalize() + else: + job_name = artifacts + + with open(output_file, "w") as out: + # Deliberate line break here because the @ and the generated + # symbols are meaningful to our internal tooling when they + # appear in a single token + out.write("# This file was @") + out.write("generated by getdeps.py\n") + out.write( + f""" +name: {job_name} + +on:{run_on} + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: +""" + ) + + getdepscmd = f"{py3} build/fbcode_builder/getdeps.py" + + out.write(" build:\n") + out.write(" runs-on: %s\n" % runs_on) + out.write(" steps:\n") + + if build_opts.is_windows(): + # cmake relies on BOOST_ROOT but GH deliberately don't set it in order + # to avoid versioning issues: + # https://github.com/actions/virtual-environments/issues/319 + # Instead, set the version we think we need; this is effectively + # coupled with the boost manifest + # This is the unusual syntax for setting an env var for the rest of + # the steps in a workflow: + # https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/ + out.write(" - name: Export boost environment\n") + out.write( + ' run: "echo BOOST_ROOT=%BOOST_ROOT_1_83_0% >> %GITHUB_ENV%"\n' + ) + out.write(" shell: cmd\n") + + # The git installation may not like long filenames, so tell it + # that we want it to use them! + out.write(" - name: Fix Git config\n") + out.write(" run: git config --system core.longpaths true\n") + out.write(" - name: Disable autocrlf\n") + out.write(" run: git config --system core.autocrlf false\n") + + out.write(" - uses: actions/checkout@v4\n") + + build_type_arg = "" + if args.build_type: + build_type_arg = f"--build-type {args.build_type} " + + if build_opts.free_up_disk: + free_up_disk = "--free-up-disk " + if not build_opts.is_windows(): + out.write(" - name: Show disk space at start\n") + out.write(" run: df -h\n") + # remove the unused github supplied android dev tools + out.write(" - name: Free up disk space\n") + out.write(" run: sudo rm -rf /usr/local/lib/android\n") + out.write(" - name: Show disk space after freeing up\n") + out.write(" run: df -h\n") + else: + free_up_disk = "" + + allow_sys_arg = "" + if ( + build_opts.allow_system_packages + and build_opts.host_type.get_package_manager() + ): + sudo_arg = "sudo " + allow_sys_arg = " --allow-system-packages" + if build_opts.host_type.get_package_manager() == "deb": + out.write(" - name: Update system package info\n") + out.write(f" run: {sudo_arg}apt-get update\n") + + out.write(" - name: Install system deps\n") + if build_opts.is_darwin(): + # brew is installed as regular user + sudo_arg = "" + out.write( + f" run: {sudo_arg}python3 build/fbcode_builder/getdeps.py --allow-system-packages install-system-deps --recursive {manifest.name}\n" + ) + if build_opts.is_linux() or build_opts.is_freebsd(): + out.write(" - name: Install packaging system deps\n") + out.write( + f" run: {sudo_arg}python3 build/fbcode_builder/getdeps.py --allow-system-packages install-system-deps --recursive patchelf\n" + ) + + projects = loader.manifests_in_dependency_order() + + main_repo_url = manifest.get_repo_url(manifest_ctx) + has_same_repo_dep = False + + # Add the rust dep which doesn't have a manifest + for m in projects: + if m == manifest: + continue + mbuilder_name = m.get("build", "builder", ctx=manifest_ctx) + if ( + m.name == "rust" + or builder_name == "cargo" + or mbuilder_name == "cargo" + ): + out.write(" - name: Install Rust Stable\n") + out.write(" uses: dtolnay/rust-toolchain@stable\n") + break + + # Normal deps that have manifests + for m in projects: + if m == manifest or m.name == "rust": + continue + ctx = loader.ctx_gen.get_context(m.name) + if m.get_repo_url(ctx) != main_repo_url: + out.write(" - name: Fetch %s\n" % m.name) + out.write( + f" run: {getdepscmd}{allow_sys_arg} fetch --no-tests {m.name}\n" + ) + + for m in projects: + if m != manifest: + if m.name == "rust": + continue + else: + src_dir_arg = "" + ctx = loader.ctx_gen.get_context(m.name) + if main_repo_url and m.get_repo_url(ctx) == main_repo_url: + # Its in the same repo, so src-dir is also . + src_dir_arg = "--src-dir=. " + has_same_repo_dep = True + out.write(" - name: Build %s\n" % m.name) + out.write( + f" run: {getdepscmd}{allow_sys_arg} build {build_type_arg}{src_dir_arg}{free_up_disk}--no-tests {m.name}\n" + ) + + out.write(" - name: Build %s\n" % manifest.name) + + project_prefix = "" + if not build_opts.is_windows(): + project_prefix = ( + " --project-install-prefix %s:/usr/local" % manifest.name + ) + + # If we have dep from same repo, we already built it and don't want to rebuild it again + no_deps_arg = "" + if has_same_repo_dep: + no_deps_arg = "--no-deps " + + no_tests_arg = "" + if not args.enable_tests: + no_tests_arg = "--no-tests " + + out.write( + f" run: {getdepscmd}{allow_sys_arg} build {build_type_arg}{no_tests_arg}{no_deps_arg}--src-dir=. {manifest.name} {project_prefix}\n" + ) + + out.write(" - name: Copy artifacts\n") + if build_opts.is_linux(): + # Strip debug info from the binaries, but only on linux. + # While the `strip` utility is also available on macOS, + # attempting to strip there results in an error. + # The `strip` utility is not available on Windows. + strip = " --strip" + else: + strip = "" + + out.write( + f" run: {getdepscmd}{allow_sys_arg} fixup-dyn-deps{strip} " + f"--src-dir=. {manifest.name} _artifacts/{artifacts} {project_prefix} " + f"--final-install-prefix /usr/local\n" + ) + + out.write(" - uses: actions/upload-artifact@v2\n") + out.write(" with:\n") + out.write(" name: %s\n" % manifest.name) + out.write(" path: _artifacts\n") + + if ( + args.enable_tests + and manifest.get("github.actions", "run_tests", ctx=manifest_ctx) + != "off" + ): + out.write(" - name: Test %s\n" % manifest.name) + out.write( + f" run: {getdepscmd}{allow_sys_arg} test --src-dir=. {manifest.name} {project_prefix}\n" + ) + if build_opts.free_up_disk and not build_opts.is_windows(): + out.write(" - name: Show disk space at end\n") + out.write(" run: df -h\n") + + def setup_project_cmd_parser(self, parser): + parser.add_argument( + "--disallow-system-packages", + help="Disallow satisfying third party deps from installed system packages", + action="store_true", + default=False, + ) + parser.add_argument( + "--output-dir", help="The directory that will contain the yml files" + ) + parser.add_argument( + "--run-on-all-branches", + action="store_true", + help="Allow CI to fire on all branches - Handy for testing", + ) + parser.add_argument( + "--ubuntu-version", default="22.04", help="Version of Ubuntu to use" + ) + parser.add_argument( + "--cron", + help="Specify that the job runs on a cron schedule instead of on pushes", + ) + parser.add_argument( + "--main-branch", + default="main", + help="Main branch to trigger GitHub Action on", + ) + parser.add_argument( + "--os-type", + help="Filter to just this OS type to run", + choices=["linux", "darwin", "windows"], + action="append", + dest="os_types", + default=[], + ) + parser.add_argument( + "--job-file-prefix", + type=str, + help="add a prefix to all job file names", + default=None, + ) + parser.add_argument( + "--job-name-prefix", + type=str, + help="add a prefix to all job names", + default=None, + ) + parser.add_argument( + "--free-up-disk", + help="Remove unused tools and clean up intermediate files if possible to maximise space for the build", + action="store_true", + default=False, + ) + parser.add_argument( + "--build-type", + help="Set the build type explicitly. Cmake and cargo builders act on them. Only Debug and RelWithDebInfo widely supported.", + choices=["Debug", "Release", "RelWithDebInfo", "MinSizeRel"], + action="store", + default=None, + ) + + +def get_arg_var_name(args): + for arg in args: + if arg.startswith("--"): + return arg[2:].replace("-", "_") + + raise Exception("unable to determine argument variable name from %r" % (args,)) + + +def parse_args(): + # We want to allow common arguments to be specified either before or after + # the subcommand name. In order to do this we add them to the main parser + # and to subcommand parsers. In order for this to work, we need to tell + # argparse that the default value is SUPPRESS, so that the default values + # from the subparser arguments won't override values set by the user from + # the main parser. We maintain our own list of desired defaults in the + # common_defaults dictionary, and manually set those if the argument wasn't + # present at all. + common_args = argparse.ArgumentParser(add_help=False) + common_defaults = {} + + def add_common_arg(*args, **kwargs): + var_name = get_arg_var_name(args) + default_value = kwargs.pop("default", None) + common_defaults[var_name] = default_value + kwargs["default"] = argparse.SUPPRESS + common_args.add_argument(*args, **kwargs) + + add_common_arg("--scratch-path", help="Where to maintain checkouts and build dirs") + add_common_arg( + "--vcvars-path", default=None, help="Path to the vcvarsall.bat on Windows." + ) + add_common_arg( + "--install-prefix", + help=( + "Where the final build products will be installed " + "(default is [scratch-path]/installed)" + ), + ) + add_common_arg( + "--num-jobs", + type=int, + help=( + "Number of concurrent jobs to use while building. " + "(default=number of cpu cores)" + ), + ) + add_common_arg( + "--use-shipit", + help="use the real ShipIt instead of the simple shipit transformer", + action="store_true", + default=False, + ) + add_common_arg( + "--facebook-internal", + help="Setup the build context as an FB internal build", + action="store_true", + default=None, + ) + add_common_arg( + "--no-facebook-internal", + help="Perform a non-FB internal build, even when in an fbsource repository", + action="store_false", + dest="facebook_internal", + ) + add_common_arg( + "--shared-libs", + help="Build shared libraries if possible", + action="store_true", + default=False, + ) + add_common_arg( + "--extra-cmake-defines", + help=( + "Input json map that contains extra cmake defines to be used " + "when compiling the current project and all its deps. " + 'e.g: \'{"CMAKE_CXX_FLAGS": "--bla"}\'' + ), + ) + add_common_arg( + "--allow-system-packages", + help="Allow satisfying third party deps from installed system packages", + action="store_true", + default=False, + ) + add_common_arg( + "-v", + "--verbose", + help="Print more output", + action="store_true", + default=False, + ) + add_common_arg( + "-su", + "--skip-upload", + help="skip upload steps", + action="store_true", + default=False, + ) + add_common_arg( + "--lfs-path", + help="Provide a parent directory for lfs when fbsource is unavailable", + default=None, + ) + add_common_arg( + "--build-skip-lfs-download", + action="store_true", + default=False, + help=( + "Download from the URL, rather than LFS. This is useful " + "in cases where the upstream project has uploaded a new " + "version of the archive with a different hash" + ), + ) + + ap = argparse.ArgumentParser( + description="Get and build dependencies and projects", parents=[common_args] + ) + sub = ap.add_subparsers( + # metavar suppresses the long and ugly default list of subcommands on a + # single line. We still render the nicer list below where we would + # have shown the nasty one. + metavar="", + title="Available commands", + help="", + ) + + add_subcommands(sub, common_args) + + args = ap.parse_args() + for var_name, default_value in common_defaults.items(): + if not hasattr(args, var_name): + setattr(args, var_name, default_value) + + return ap, args + + +def main(): + ap, args = parse_args() + if getattr(args, "func", None) is None: + ap.print_help() + return 0 + try: + return args.func(args) + except UsageError as exc: + ap.error(str(exc)) + return 1 + except TransientFailure as exc: + print("TransientFailure: %s" % str(exc)) + # This return code is treated as a retryable transient infrastructure + # error by Facebook's internal CI, rather than eg: a build or code + # related error that needs to be fixed before progress can be made. + return 128 + except subprocess.CalledProcessError as exc: + print("%s" % str(exc), file=sys.stderr) + print("!! Failed", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/build/fbcode_builder/getdeps/__init__.py b/build/fbcode_builder/getdeps/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/fbcode_builder/getdeps/builder.py b/build/fbcode_builder/getdeps/builder.py new file mode 100644 index 000000000..5a5ea2303 --- /dev/null +++ b/build/fbcode_builder/getdeps/builder.py @@ -0,0 +1,1326 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import glob +import json +import os +import pathlib +import shutil +import stat +import subprocess +import sys +import typing +from typing import Optional + +from .dyndeps import create_dyn_dep_munger +from .envfuncs import add_path_entry, Env, path_search +from .fetcher import copy_if_different +from .runcmd import run_cmd + +if typing.TYPE_CHECKING: + from .buildopts import BuildOptions + + +class BuilderBase(object): + def __init__( + self, + loader, + dep_manifests, # manifests of dependencies + build_opts: "BuildOptions", + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + env=None, + final_install_prefix=None, + ) -> None: + self.env = Env() + if env: + self.env.update(env) + + subdir = manifest.get("build", "subdir", ctx=ctx) + if subdir: + src_dir = os.path.join(src_dir, subdir) + + self.patchfile = manifest.get("build", "patchfile", ctx=ctx) + self.patchfile_opts = manifest.get("build", "patchfile_opts", ctx=ctx) or "" + self.ctx = ctx + self.src_dir = src_dir + self.build_dir = build_dir or src_dir + self.inst_dir = inst_dir + self.build_opts = build_opts + self.manifest = manifest + self.final_install_prefix = final_install_prefix + self.loader = loader + self.dep_manifests = dep_manifests + self.install_dirs = [loader.get_project_install_dir(m) for m in dep_manifests] + + def _get_cmd_prefix(self): + if self.build_opts.is_windows(): + vcvarsall = self.build_opts.get_vcvars_path() + if vcvarsall is not None: + # Since it sets rather a large number of variables we mildly abuse + # the cmd quoting rules to assemble a command that calls the script + # to prep the environment and then triggers the actual command that + # we wanted to run. + + # Due to changes in vscrsall.bat, it now reports an ERRORLEVEL of 1 + # even when succeeding. This occurs when an extension is not present. + # To continue, we must ignore the ERRORLEVEL returned. We do this by + # wrapping the call in a batch file that always succeeds. + wrapper = os.path.join(self.build_dir, "succeed.bat") + with open(wrapper, "w") as f: + f.write("@echo off\n") + f.write(f'call "{vcvarsall}" amd64\n') + f.write("set ERRORLEVEL=0\n") + f.write("exit /b 0\n") + return [wrapper, "&&"] + return [] + + def _run_cmd( + self, + cmd, + cwd=None, + env=None, + use_cmd_prefix: bool = True, + allow_fail: bool = False, + ) -> int: + if env: + e = self.env.copy() + e.update(env) + env = e + else: + env = self.env + + if use_cmd_prefix: + cmd_prefix = self._get_cmd_prefix() + if cmd_prefix: + cmd = cmd_prefix + cmd + + log_file = os.path.join(self.build_dir, "getdeps_build.log") + return run_cmd( + cmd=cmd, + env=env, + cwd=cwd or self.build_dir, + log_file=log_file, + allow_fail=allow_fail, + ) + + def _reconfigure(self, reconfigure: bool) -> bool: + if self.build_dir is not None: + if not os.path.isdir(self.build_dir): + os.makedirs(self.build_dir) + reconfigure = True + return reconfigure + + def _apply_patchfile(self) -> None: + if self.patchfile is None: + return + patched_sentinel_file = pathlib.Path(self.src_dir + "/.getdeps_patched") + if patched_sentinel_file.exists(): + return + old_wd = os.getcwd() + os.chdir(self.src_dir) + print(f"Patching {self.manifest.name} with {self.patchfile} in {self.src_dir}") + patchfile = os.path.join( + self.build_opts.fbcode_builder_dir, "patches", self.patchfile + ) + patchcmd = ["git", "apply", "--ignore-space-change"] + if self.patchfile_opts: + patchcmd.append(self.patchfile_opts) + try: + subprocess.check_call(patchcmd + [patchfile]) + except subprocess.CalledProcessError: + raise ValueError(f"Failed to apply patch to {self.manifest.name}") + os.chdir(old_wd) + patched_sentinel_file.touch() + + def prepare(self, reconfigure: bool) -> None: + print("Preparing %s..." % self.manifest.name) + reconfigure = self._reconfigure(reconfigure) + self._apply_patchfile() + self._prepare(reconfigure=reconfigure) + + def debug(self, reconfigure: bool) -> None: + reconfigure = self._reconfigure(reconfigure) + self._apply_patchfile() + self._prepare(reconfigure=reconfigure) + env = self._compute_env() + print("Starting a shell in %s, ^D to exit..." % self.build_dir) + # TODO: print the command to run the build + shell = ["powershell.exe"] if sys.platform == "win32" else ["/bin/sh", "-i"] + self._run_cmd(shell, cwd=self.build_dir, env=env) + + def build(self, reconfigure: bool) -> None: + print("Building %s..." % self.manifest.name) + reconfigure = self._reconfigure(reconfigure) + self._apply_patchfile() + self._prepare(reconfigure=reconfigure) + self._build(reconfigure=reconfigure) + + if self.build_opts.free_up_disk: + # don't clean --src-dir=. case as user may want to build again or run tests on the build + if self.src_dir.startswith(self.build_opts.scratch_dir) and os.path.isdir( + self.build_dir + ): + if os.path.islink(self.build_dir): + os.remove(self.build_dir) + else: + shutil.rmtree(self.build_dir) + elif self.build_opts.is_windows(): + # On Windows, emit a wrapper script that can be used to run build artifacts + # directly from the build directory, without installing them. On Windows $PATH + # needs to be updated to include all of the directories containing the runtime + # library dependencies in order to run the binaries. + script_path = self.get_dev_run_script_path() + dep_munger = create_dyn_dep_munger( + self.build_opts, self._compute_env(), self.install_dirs + ) + dep_dirs = self.get_dev_run_extra_path_dirs(dep_munger) + # pyre-fixme[16]: Optional type has no attribute `emit_dev_run_script`. + dep_munger.emit_dev_run_script(script_path, dep_dirs) + + @property + def num_jobs(self) -> int: + # This is a hack, but we don't have a "defaults manifest" that we can + # customize per platform. + # TODO: Introduce some sort of defaults config that can select by + # platform, just like manifest contexts. + if sys.platform.startswith("freebsd"): + # clang on FreeBSD is quite memory-efficient. + default_job_weight = 512 + else: + # 1.5 GiB is a lot to assume, but it's typical of Facebook-style C++. + # Some manifests are even heavier and should override. + default_job_weight = 1536 + return self.build_opts.get_num_jobs( + int( + self.manifest.get( + "build", "job_weight_mib", default_job_weight, ctx=self.ctx + ) + ) + ) + + def run_tests(self, schedule_type, owner, test_filter, retry, no_testpilot) -> None: + """Execute any tests that we know how to run. If they fail, + raise an exception.""" + pass + + def _prepare(self, reconfigure) -> None: + """Prepare the build. Useful when need to generate config, + but builder is not the primary build system. + e.g. cargo when called from cmake""" + pass + + def _build(self, reconfigure) -> None: + """Perform the build. + reconfigure will be set to true if the fetcher determined + that the sources have changed in such a way that the build + system needs to regenerate its rules.""" + pass + + def _compute_env(self): + # CMAKE_PREFIX_PATH is only respected when passed through the + # environment, so we construct an appropriate path to pass down + return self.build_opts.compute_env_for_install_dirs( + self.loader, + self.dep_manifests, + self.ctx, + env=self.env, + manifest=self.manifest, + ) + + def get_dev_run_script_path(self): + assert self.build_opts.is_windows() + return os.path.join(self.build_dir, "run.ps1") + + def get_dev_run_extra_path_dirs(self, dep_munger=None): + assert self.build_opts.is_windows() + if dep_munger is None: + dep_munger = create_dyn_dep_munger( + self.build_opts, self._compute_env(), self.install_dirs + ) + return dep_munger.compute_dependency_paths(self.build_dir) + + +class MakeBuilder(BuilderBase): + def __init__( + self, + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + build_args, + install_args, + test_args, + ) -> None: + super(MakeBuilder, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) + self.build_args = build_args or [] + self.install_args = install_args or [] + self.test_args = test_args + + @property + def _make_binary(self): + return self.manifest.get("build", "make_binary", "make", ctx=self.ctx) + + def _get_prefix(self): + return ["PREFIX=" + self.inst_dir, "prefix=" + self.inst_dir] + + def _build(self, reconfigure) -> None: + + env = self._compute_env() + + # Need to ensure that PREFIX is set prior to install because + # libbpf uses it when generating its pkg-config file. + # The lowercase prefix is used by some projects. + cmd = ( + [self._make_binary, "-j%s" % self.num_jobs] + + self.build_args + + self._get_prefix() + ) + self._run_cmd(cmd, env=env) + + install_cmd = [self._make_binary] + self.install_args + self._get_prefix() + self._run_cmd(install_cmd, env=env) + + # bz2's Makefile doesn't install its .so properly + if self.manifest and self.manifest.name == "bz2": + libdir = os.path.join(self.inst_dir, "lib") + srcpattern = os.path.join(self.src_dir, "lib*.so.*") + print(f"copying to {libdir} from {srcpattern}") + for file in glob.glob(srcpattern): + shutil.copy(file, libdir) + + def run_tests(self, schedule_type, owner, test_filter, retry, no_testpilot) -> None: + if not self.test_args: + return + + env = self._compute_env() + + cmd = [self._make_binary] + self.test_args + self._get_prefix() + self._run_cmd(cmd, env=env) + + +class CMakeBootStrapBuilder(MakeBuilder): + def _build(self, reconfigure) -> None: + self._run_cmd( + [ + "./bootstrap", + "--prefix=" + self.inst_dir, + f"--parallel={self.num_jobs}", + ] + ) + super(CMakeBootStrapBuilder, self)._build(reconfigure) + + +class AutoconfBuilder(BuilderBase): + def __init__( + self, + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + args, + conf_env_args, + ) -> None: + super(AutoconfBuilder, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) + self.args = args or [] + self.conf_env_args = conf_env_args or {} + + @property + def _make_binary(self): + return self.manifest.get("build", "make_binary", "make", ctx=self.ctx) + + def _build(self, reconfigure) -> None: + configure_path = os.path.join(self.src_dir, "configure") + autogen_path = os.path.join(self.src_dir, "autogen.sh") + + env = self._compute_env() + + # Some configure scripts need additional env values passed derived from cmds + for k, cmd_args in self.conf_env_args.items(): + out = ( + subprocess.check_output(cmd_args, env=dict(env.items())) + .decode("utf-8") + .strip() + ) + if out: + env.set(k, out) + + if not os.path.exists(configure_path): + print("%s doesn't exist, so reconfiguring" % configure_path) + # This libtoolize call is a bit gross; the issue is that + # `autoreconf` as invoked by libsodium's `autogen.sh` doesn't + # seem to realize that it should invoke libtoolize and then + # error out when the configure script references a libtool + # related symbol. + self._run_cmd(["libtoolize"], cwd=self.src_dir, env=env) + + # We generally prefer to call the `autogen.sh` script provided + # by the project on the basis that it may know more than plain + # autoreconf does. + if os.path.exists(autogen_path): + self._run_cmd(["bash", autogen_path], cwd=self.src_dir, env=env) + else: + self._run_cmd(["autoreconf", "-ivf"], cwd=self.src_dir, env=env) + configure_cmd = [configure_path, "--prefix=" + self.inst_dir] + self.args + self._run_cmd(configure_cmd, env=env) + only_install = self.manifest.get("build", "only_install", "false", ctx=self.ctx) + if not only_install: + self._run_cmd([self._make_binary, "-j%s" % self.num_jobs], env=env) + self._run_cmd([self._make_binary, "install"], env=env) + + +class Iproute2Builder(BuilderBase): + # ./configure --prefix does not work for iproute2. + # Thus, explicitly copy sources from src_dir to build_dir, build, + # and then install to inst_dir using DESTDIR + # lastly, also copy include from build_dir to inst_dir + def __init__( + self, + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) -> None: + super(Iproute2Builder, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) + + def _build(self, reconfigure) -> None: + configure_path = os.path.join(self.src_dir, "configure") + env = self.env.copy() + self._run_cmd([configure_path], env=env) + shutil.rmtree(self.build_dir) + shutil.copytree(self.src_dir, self.build_dir) + self._run_cmd(["make", "-j%s" % self.num_jobs], env=env) + install_cmd = ["make", "install", "DESTDIR=" + self.inst_dir] + + for d in ["include", "lib"]: + if not os.path.isdir(os.path.join(self.inst_dir, d)): + shutil.copytree( + os.path.join(self.build_dir, d), os.path.join(self.inst_dir, d) + ) + + self._run_cmd(install_cmd, env=env) + + +class CMakeBuilder(BuilderBase): + MANUAL_BUILD_SCRIPT = """\ +#!{sys.executable} + + +import argparse +import subprocess +import sys + +CMAKE = {cmake!r} +CTEST = {ctest!r} +SRC_DIR = {src_dir!r} +BUILD_DIR = {build_dir!r} +INSTALL_DIR = {install_dir!r} +CMD_PREFIX = {cmd_prefix!r} +CMAKE_ENV = {env_str} +CMAKE_DEFINE_ARGS = {define_args_str} + + +def get_jobs_argument(num_jobs_arg: int) -> str: + if num_jobs_arg > 0: + return "-j" + str(num_jobs_arg) + + import multiprocessing + num_jobs = multiprocessing.cpu_count() // 2 + return "-j" + str(num_jobs) + + +def main(): + ap = argparse.ArgumentParser() + ap.add_argument( + "cmake_args", + nargs=argparse.REMAINDER, + help='Any extra arguments after an "--" argument will be passed ' + "directly to CMake." + ) + ap.add_argument( + "--mode", + choices=["configure", "build", "install", "test"], + default="configure", + help="The mode to run: configure, build, or install. " + "Defaults to configure", + ) + ap.add_argument( + "--build", + action="store_const", + const="build", + dest="mode", + help="An alias for --mode=build", + ) + ap.add_argument( + "-j", + "--num-jobs", + action="store", + type=int, + default=0, + help="Run the build or tests with the specified number of parallel jobs", + ) + ap.add_argument( + "--install", + action="store_const", + const="install", + dest="mode", + help="An alias for --mode=install", + ) + ap.add_argument( + "--test", + action="store_const", + const="test", + dest="mode", + help="An alias for --mode=test", + ) + args = ap.parse_args() + + # Strip off a leading "--" from the additional CMake arguments + if args.cmake_args and args.cmake_args[0] == "--": + args.cmake_args = args.cmake_args[1:] + + env = CMAKE_ENV + + if args.mode == "configure": + full_cmd = CMD_PREFIX + [CMAKE, SRC_DIR] + CMAKE_DEFINE_ARGS + args.cmake_args + elif args.mode in ("build", "install"): + target = "all" if args.mode == "build" else "install" + full_cmd = CMD_PREFIX + [ + CMAKE, + "--build", + BUILD_DIR, + "--target", + target, + "--config", + "{build_type}", + get_jobs_argument(args.num_jobs), + ] + args.cmake_args + elif args.mode == "test": + full_cmd = CMD_PREFIX + [ + {dev_run_script}CTEST, + "--output-on-failure", + get_jobs_argument(args.num_jobs), + ] + args.cmake_args + else: + ap.error("unknown invocation mode: %s" % (args.mode,)) + + cmd_str = " ".join(full_cmd) + print("Running: %r" % (cmd_str,)) + proc = subprocess.run(full_cmd, env=env, cwd=BUILD_DIR) + sys.exit(proc.returncode) + + +if __name__ == "__main__": + main() +""" + + def __init__( + self, + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + defines, + final_install_prefix=None, + extra_cmake_defines=None, + cmake_target="install", + ) -> None: + super(CMakeBuilder, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + final_install_prefix=final_install_prefix, + ) + self.defines = defines or {} + if extra_cmake_defines: + self.defines.update(extra_cmake_defines) + self.cmake_target = cmake_target + + try: + from .facebook.vcvarsall import extra_vc_cmake_defines + except ImportError: + pass + else: + self.defines.update(extra_vc_cmake_defines) + + self.loader = loader + if build_opts.shared_libs: + self.defines["BUILD_SHARED_LIBS"] = "ON" + + def _invalidate_cache(self) -> None: + for name in [ + "CMakeCache.txt", + "CMakeFiles/CMakeError.log", + "CMakeFiles/CMakeOutput.log", + ]: + name = os.path.join(self.build_dir, name) + if os.path.isdir(name): + shutil.rmtree(name) + elif os.path.exists(name): + os.unlink(name) + + def _needs_reconfigure(self) -> bool: + for name in ["CMakeCache.txt", "build.ninja"]: + name = os.path.join(self.build_dir, name) + if not os.path.exists(name): + return True + return False + + def _write_build_script(self, **kwargs) -> None: + env_lines = [" {!r}: {!r},".format(k, v) for k, v in kwargs["env"].items()] + kwargs["env_str"] = "\n".join(["{"] + env_lines + ["}"]) + + if self.build_opts.is_windows(): + kwargs["dev_run_script"] = '"powershell.exe", {!r}, '.format( + self.get_dev_run_script_path() + ) + else: + kwargs["dev_run_script"] = "" + + define_arg_lines = ["["] + for arg in kwargs["define_args"]: + # Replace the CMAKE_INSTALL_PREFIX argument to use the INSTALL_DIR + # variable that we define in the MANUAL_BUILD_SCRIPT code. + if arg.startswith("-DCMAKE_INSTALL_PREFIX="): + value = " {!r}.format(INSTALL_DIR),".format( + "-DCMAKE_INSTALL_PREFIX={}" + ) + else: + value = " {!r},".format(arg) + define_arg_lines.append(value) + define_arg_lines.append("]") + kwargs["define_args_str"] = "\n".join(define_arg_lines) + + # In order to make it easier for developers to manually run builds for + # CMake-based projects, write out some build scripts that can be used to invoke + # CMake manually. + build_script_path = os.path.join(self.build_dir, "run_cmake.py") + script_contents = self.MANUAL_BUILD_SCRIPT.format(**kwargs) + with open(build_script_path, "wb") as f: + f.write(script_contents.encode()) + os.chmod(build_script_path, 0o755) + + def _compute_cmake_define_args(self, env): + defines = { + "CMAKE_INSTALL_PREFIX": self.final_install_prefix or self.inst_dir, + "BUILD_SHARED_LIBS": "OFF", + # Some of the deps (rsocket) default to UBSAN enabled if left + # unspecified. Some of the deps fail to compile in release mode + # due to warning->error promotion. RelWithDebInfo is the happy + # medium. + "CMAKE_BUILD_TYPE": self.build_opts.build_type, + } + + if "SANDCASTLE" not in os.environ: + # We sometimes see intermittent ccache related breakages on some + # of the FB internal CI hosts, so we prefer to disable ccache + # when running in that environment. + ccache = path_search(env, "ccache") + if ccache: + defines["CMAKE_CXX_COMPILER_LAUNCHER"] = ccache + else: + # rocksdb does its own probing for ccache. + # Ensure that it is disabled on sandcastle + env["CCACHE_DISABLE"] = "1" + # Some sandcastle hosts have broken ccache related dirs, and + # even though we've asked for it to be disabled ccache is + # still invoked by rocksdb's cmake. + # Redirect its config directory to somewhere that is guaranteed + # fresh to us, and that won't have any ccache data inside. + env["CCACHE_DIR"] = f"{self.build_opts.scratch_dir}/ccache" + + if "GITHUB_ACTIONS" in os.environ and self.build_opts.is_windows(): + # GitHub actions: the host has both gcc and msvc installed, and + # the default behavior of cmake is to prefer gcc. + # Instruct cmake that we want it to use cl.exe; this is important + # because Boost prefers cl.exe and the mismatch results in cmake + # with gcc not being able to find boost built with cl.exe. + defines["CMAKE_C_COMPILER"] = "cl.exe" + defines["CMAKE_CXX_COMPILER"] = "cl.exe" + + if self.build_opts.is_darwin(): + # Try to persuade cmake to set the rpath to match the lib + # dirs of the dependencies. This isn't automatic, and to + # make things more interesting, cmake uses `;` as the path + # separator, so translate the runtime path to something + # that cmake will parse + defines["CMAKE_INSTALL_RPATH"] = ";".join( + env.get("DYLD_LIBRARY_PATH", "").split(":") + ) + # Tell cmake that we want to set the rpath in the tree + # at build time. Without this the rpath is only set + # at the moment that the binaries are installed. That + # default is problematic for example when using the + # gtest integration in cmake which runs the built test + # executables during the build to discover the set of + # tests. + defines["CMAKE_BUILD_WITH_INSTALL_RPATH"] = "ON" + + boost_169_is_required = False + if self.loader: + for m in self.loader.manifests_in_dependency_order(): + preinstalled = m.get_section_as_dict("preinstalled.env", self.ctx) + boost_169_is_required = "BOOST_ROOT_1_69_0" in preinstalled.keys() + if boost_169_is_required: + break + + if ( + boost_169_is_required + and self.build_opts.allow_system_packages + and self.build_opts.host_type.get_package_manager() + and self.build_opts.host_type.get_package_manager() == "rpm" + ): + # Boost 1.69 rpms don't install cmake config to the system, so to point to them explicitly + defines["BOOST_INCLUDEDIR"] = "/usr/include/boost169" + defines["BOOST_LIBRARYDIR"] = "/usr/lib64/boost169" + + defines.update(self.defines) + define_args = ["-D%s=%s" % (k, v) for (k, v) in defines.items()] + + # if self.build_opts.is_windows(): + # define_args += ["-G", "Visual Studio 15 2017 Win64"] + define_args += ["-G", "Ninja"] + + return define_args + + def _build(self, reconfigure: bool) -> None: + reconfigure = reconfigure or self._needs_reconfigure() + + env = self._compute_env() + if not self.build_opts.is_windows() and self.final_install_prefix: + env["DESTDIR"] = self.inst_dir + + # Resolve the cmake that we installed + cmake = path_search(env, "cmake") + if cmake is None: + raise Exception("Failed to find CMake") + + if reconfigure: + define_args = self._compute_cmake_define_args(env) + self._write_build_script( + cmd_prefix=self._get_cmd_prefix(), + cmake=cmake, + ctest=path_search(env, "ctest"), + env=env, + define_args=define_args, + src_dir=self.src_dir, + build_dir=self.build_dir, + install_dir=self.inst_dir, + sys=sys, + build_type=self.build_opts.build_type, + ) + + self._invalidate_cache() + self._run_cmd([cmake, self.src_dir] + define_args, env=env) + + self._run_cmd( + [ + cmake, + "--build", + self.build_dir, + "--target", + self.cmake_target, + "--config", + self.build_opts.build_type, + "-j", + str(self.num_jobs), + ], + env=env, + ) + + def run_tests( + self, schedule_type, owner, test_filter, retry: int, no_testpilot + ) -> None: + env = self._compute_env() + ctest = path_search(env, "ctest") + cmake = path_search(env, "cmake") + + def require_command(path: Optional[str], name: str) -> str: + if path is None: + raise RuntimeError("unable to find command `{}`".format(name)) + return path + + # On Windows, we also need to update $PATH to include the directories that + # contain runtime library dependencies. This is not needed on other platforms + # since CMake will emit RPATH properly in the binary so they can find these + # dependencies. + if self.build_opts.is_windows(): + path_entries = self.get_dev_run_extra_path_dirs() + path = env.get("PATH") + if path: + path_entries.insert(0, path) + env["PATH"] = ";".join(path_entries) + + # Don't use the cmd_prefix when running tests. This is vcvarsall.bat on + # Windows. vcvarsall.bat is only needed for the build, not tests. It + # unfortunately fails if invoked with a long PATH environment variable when + # running the tests. + use_cmd_prefix = False + + def get_property(test, propname, defval=None): + """extracts a named property from a cmake test info json blob. + The properties look like: + [{"name": "WORKING_DIRECTORY"}, + {"value": "something"}] + We assume that it is invalid for the same named property to be + listed more than once. + """ + props = test.get("properties", []) + for p in props: + if p.get("name", None) == propname: + return p.get("value", defval) + return defval + + def list_tests(): + output = subprocess.check_output( + [require_command(ctest, "ctest"), "--show-only=json-v1"], + env=env, + cwd=self.build_dir, + ) + try: + data = json.loads(output.decode("utf-8")) + except ValueError as exc: + raise Exception( + "Failed to decode cmake test info using %s: %s. Output was: %r" + % (ctest, str(exc), output) + ) + + tests = [] + machine_suffix = self.build_opts.host_type.as_tuple_string() + for test in data["tests"]: + working_dir = get_property(test, "WORKING_DIRECTORY") + labels = [] + machine_suffix = self.build_opts.host_type.as_tuple_string() + labels.append("tpx-fb-test-type=3") + labels.append("tpx_test_config::buildsystem=getdeps") + labels.append("tpx_test_config::platform={}".format(machine_suffix)) + + if get_property(test, "DISABLED"): + labels.append("disabled") + command = test["command"] + if working_dir: + command = [ + require_command(cmake, "cmake"), + "-E", + "chdir", + working_dir, + ] + command + + import os + + tests.append( + { + "type": "custom", + "target": "%s-%s-getdeps-%s" + % (self.manifest.name, test["name"], machine_suffix), + "command": command, + "labels": labels, + "env": {}, + "required_paths": [], + "contacts": [], + "cwd": os.getcwd(), + } + ) + return tests + + if schedule_type == "continuous" or schedule_type == "testwarden": + # for continuous and testwarden runs, disabling retry can give up + # better signals for flaky tests. + retry = 0 + + tpx = path_search(env, "tpx") + if tpx and not no_testpilot: + buck_test_info = list_tests() + import os + + from .facebook.testinfra import start_run + + buck_test_info_name = os.path.join(self.build_dir, ".buck-test-info.json") + with open(buck_test_info_name, "w") as f: + json.dump(buck_test_info, f) + + env.set("http_proxy", "") + env.set("https_proxy", "") + runs = [] + + with start_run(env["FBSOURCE_HASH"]) as run_id: + testpilot_args = [ + tpx, + "--force-local-execution", + "--buck-test-info", + buck_test_info_name, + "--retry=%d" % retry, + "-j=%s" % str(self.num_jobs), + "--print-long-results", + ] + + if owner: + testpilot_args += ["--contacts", owner] + + if env: + testpilot_args.append("--env") + testpilot_args.extend(f"{key}={val}" for key, val in env.items()) + + if run_id is not None: + testpilot_args += ["--run-id", run_id] + + if test_filter: + testpilot_args += ["--", test_filter] + + if schedule_type == "diff": + runs.append(["--collection", "oss-diff", "--purpose", "diff"]) + elif schedule_type == "continuous": + runs.append( + [ + "--tag-new-tests", + "--collection", + "oss-continuous", + "--purpose", + "continuous", + ] + ) + elif schedule_type == "testwarden": + # One run to assess new tests + runs.append( + [ + "--tag-new-tests", + "--collection", + "oss-new-test-stress", + "--stress-runs", + "10", + "--purpose", + "stress-run-new-test", + ] + ) + # And another for existing tests + runs.append( + [ + "--tag-new-tests", + "--collection", + "oss-existing-test-stress", + "--stress-runs", + "10", + "--purpose", + "stress-run", + ] + ) + else: + runs.append([]) + + for run in runs: + self._run_cmd( + testpilot_args + run, + cwd=self.build_opts.fbcode_builder_dir, + env=env, + use_cmd_prefix=use_cmd_prefix, + ) + else: + args = [ + require_command(ctest, "ctest"), + "--output-on-failure", + "-j", + str(self.num_jobs), + ] + if test_filter: + args += ["-R", test_filter] + + count = 0 + while count <= retry: + retcode = self._run_cmd( + args, env=env, use_cmd_prefix=use_cmd_prefix, allow_fail=True + ) + + if retcode == 0: + break + if count == 0: + # Only add this option in the second run. + args += ["--rerun-failed"] + count += 1 + # pyre-fixme[61]: `retcode` is undefined, or not always defined. + if retcode != 0: + # Allow except clause in getdeps.main to catch and exit gracefully + # This allows non-testpilot runs to fail through the same logic as failed testpilot runs, which may become handy in case if post test processing is needed in the future + # pyre-fixme[61]: `retcode` is undefined, or not always defined. + raise subprocess.CalledProcessError(retcode, args) + + +class NinjaBootstrap(BuilderBase): + def __init__( + self, + loader, + dep_manifests, + build_opts, + ctx, + manifest, + build_dir, + src_dir, + inst_dir, + ) -> None: + super(NinjaBootstrap, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) + + def _build(self, reconfigure) -> None: + self._run_cmd([sys.executable, "configure.py", "--bootstrap"], cwd=self.src_dir) + src_ninja = os.path.join(self.src_dir, "ninja") + dest_ninja = os.path.join(self.inst_dir, "bin/ninja") + bin_dir = os.path.dirname(dest_ninja) + if not os.path.exists(bin_dir): + os.makedirs(bin_dir) + shutil.copyfile(src_ninja, dest_ninja) + shutil.copymode(src_ninja, dest_ninja) + + +class OpenSSLBuilder(BuilderBase): + def __init__( + self, + loader, + dep_manifests, + build_opts, + ctx, + manifest, + build_dir, + src_dir, + inst_dir, + ) -> None: + super(OpenSSLBuilder, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) + + def _build(self, reconfigure) -> None: + configure = os.path.join(self.src_dir, "Configure") + + # prefer to resolve the perl that we installed from + # our manifest on windows, but fall back to the system + # path on eg: darwin + env = self.env.copy() + for m in self.dep_manifests: + bindir = os.path.join(self.loader.get_project_install_dir(m), "bin") + add_path_entry(env, "PATH", bindir, append=False) + + perl = typing.cast(str, path_search(env, "perl", "perl")) + + make_j_args = [] + if self.build_opts.is_windows(): + make = "nmake.exe" + args = ["VC-WIN64A-masm", "-utf-8"] + elif self.build_opts.is_darwin(): + make = "make" + make_j_args = ["-j%s" % self.num_jobs] + args = ( + ["darwin64-x86_64-cc"] + if not self.build_opts.is_arm() + else ["darwin64-arm64-cc"] + ) + elif self.build_opts.is_linux(): + make = "make" + make_j_args = ["-j%s" % self.num_jobs] + args = ( + ["linux-x86_64"] if not self.build_opts.is_arm() else ["linux-aarch64"] + ) + else: + raise Exception("don't know how to build openssl for %r" % self.ctx) + + self._run_cmd( + [ + perl, + configure, + "--prefix=%s" % self.inst_dir, + "--openssldir=%s" % self.inst_dir, + ] + + args + + [ + "enable-static-engine", + "enable-capieng", + "no-makedepend", + "no-unit-test", + "no-tests", + ] + ) + make_build = [make] + make_j_args + self._run_cmd(make_build) + make_install = [make, "install_sw", "install_ssldirs"] + self._run_cmd(make_install) + + +class Boost(BuilderBase): + def __init__( + self, + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + b2_args, + ) -> None: + children = os.listdir(src_dir) + assert len(children) == 1, "expected a single directory entry: %r" % (children,) + boost_src = children[0] + assert boost_src.startswith("boost") + src_dir = os.path.join(src_dir, children[0]) + super(Boost, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) + self.b2_args = b2_args + + def _build(self, reconfigure) -> None: + env = self._compute_env() + linkage = ["static"] + if self.build_opts.is_windows() or self.build_opts.shared_libs: + linkage.append("shared") + + args = [] + if self.build_opts.is_darwin(): + clang = subprocess.check_output(["xcrun", "--find", "clang"]) + user_config = os.path.join(self.build_dir, "project-config.jam") + with open(user_config, "w") as jamfile: + jamfile.write("using clang : : %s ;\n" % clang.decode().strip()) + args.append("--user-config=%s" % user_config) + + for link in linkage: + bootstrap_args = self.manifest.get_section_as_args( + "bootstrap.args", self.ctx + ) + if self.build_opts.is_windows(): + bootstrap = os.path.join(self.src_dir, "bootstrap.bat") + self._run_cmd([bootstrap] + bootstrap_args, cwd=self.src_dir, env=env) + args += ["address-model=64"] + else: + bootstrap = os.path.join(self.src_dir, "bootstrap.sh") + self._run_cmd( + [bootstrap, "--prefix=%s" % self.inst_dir] + bootstrap_args, + cwd=self.src_dir, + env=env, + ) + + b2 = os.path.join(self.src_dir, "b2") + self._run_cmd( + [ + b2, + "-j%s" % self.num_jobs, + "--prefix=%s" % self.inst_dir, + "--builddir=%s" % self.build_dir, + ] + + args + + self.b2_args + + [ + "link=%s" % link, + "runtime-link=shared", + "variant=release", + "threading=multi", + "debug-symbols=on", + "visibility=global", + "-d2", + "install", + ], + cwd=self.src_dir, + env=env, + ) + + +class NopBuilder(BuilderBase): + def __init__( + self, loader, dep_manifests, build_opts, ctx, manifest, src_dir, inst_dir + ) -> None: + super(NopBuilder, self).__init__( + loader, dep_manifests, build_opts, ctx, manifest, src_dir, None, inst_dir + ) + + def build(self, reconfigure: bool) -> None: + print("Installing %s -> %s" % (self.src_dir, self.inst_dir)) + parent = os.path.dirname(self.inst_dir) + if not os.path.exists(parent): + os.makedirs(parent) + + install_files = self.manifest.get_section_as_ordered_pairs( + "install.files", self.ctx + ) + if install_files: + for src_name, dest_name in self.manifest.get_section_as_ordered_pairs( + "install.files", self.ctx + ): + full_dest = os.path.join(self.inst_dir, dest_name) + full_src = os.path.join(self.src_dir, src_name) + + dest_parent = os.path.dirname(full_dest) + if not os.path.exists(dest_parent): + os.makedirs(dest_parent) + if os.path.isdir(full_src): + if not os.path.exists(full_dest): + shutil.copytree(full_src, full_dest) + else: + shutil.copyfile(full_src, full_dest) + shutil.copymode(full_src, full_dest) + # This is a bit gross, but the mac ninja.zip doesn't + # give ninja execute permissions, so force them on + # for things that look like they live in a bin dir + if os.path.dirname(dest_name) == "bin": + st = os.lstat(full_dest) + os.chmod(full_dest, st.st_mode | stat.S_IXUSR) + else: + if not os.path.exists(self.inst_dir): + shutil.copytree(self.src_dir, self.inst_dir) + + +class SqliteBuilder(BuilderBase): + def __init__( + self, + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) -> None: + super(SqliteBuilder, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) + + def _build(self, reconfigure) -> None: + for f in ["sqlite3.c", "sqlite3.h", "sqlite3ext.h"]: + src = os.path.join(self.src_dir, f) + dest = os.path.join(self.build_dir, f) + copy_if_different(src, dest) + + cmake_lists = """ +cmake_minimum_required(VERSION 3.1.3 FATAL_ERROR) +project(sqlite3 C) +add_library(sqlite3 STATIC sqlite3.c) +# These options are taken from the defaults in Makefile.msc in +# the sqlite distribution +target_compile_definitions(sqlite3 PRIVATE + -DSQLITE_ENABLE_COLUMN_METADATA=1 + -DSQLITE_ENABLE_FTS3=1 + -DSQLITE_ENABLE_RTREE=1 + -DSQLITE_ENABLE_GEOPOLY=1 + -DSQLITE_ENABLE_JSON1=1 + -DSQLITE_ENABLE_STMTVTAB=1 + -DSQLITE_ENABLE_DBPAGE_VTAB=1 + -DSQLITE_ENABLE_DBSTAT_VTAB=1 + -DSQLITE_INTROSPECTION_PRAGMAS=1 + -DSQLITE_ENABLE_DESERIALIZE=1 +) +install(TARGETS sqlite3) +install(FILES sqlite3.h sqlite3ext.h DESTINATION include) + """ + + with open(os.path.join(self.build_dir, "CMakeLists.txt"), "w") as f: + f.write(cmake_lists) + + defines = { + "CMAKE_INSTALL_PREFIX": self.inst_dir, + "BUILD_SHARED_LIBS": "ON" if self.build_opts.shared_libs else "OFF", + "CMAKE_BUILD_TYPE": "RelWithDebInfo", + } + define_args = ["-D%s=%s" % (k, v) for (k, v) in defines.items()] + define_args += ["-G", "Ninja"] + + env = self._compute_env() + + # Resolve the cmake that we installed + cmake = path_search(env, "cmake") + + self._run_cmd([cmake, self.build_dir] + define_args, env=env) + self._run_cmd( + [ + cmake, + "--build", + self.build_dir, + "--target", + "install", + "--config", + self.build_opts.build_type, + "-j", + str(self.num_jobs), + ], + env=env, + ) diff --git a/build/fbcode_builder/getdeps/buildopts.py b/build/fbcode_builder/getdeps/buildopts.py new file mode 100644 index 000000000..47c49a15f --- /dev/null +++ b/build/fbcode_builder/getdeps/buildopts.py @@ -0,0 +1,670 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import errno +import glob +import ntpath +import os +import subprocess +import sys +import tempfile +from typing import Mapping, Optional + +from .copytree import containing_repo_type +from .envfuncs import add_flag, add_path_entry, Env +from .fetcher import get_fbsource_repo_data, homebrew_package_prefix +from .manifest import ContextGenerator +from .platform import get_available_ram, HostType, is_windows + + +def detect_project(path): + repo_type, repo_root = containing_repo_type(path) + if repo_type is None: + return None, None + + # Look for a .projectid file. If it exists, read the project name from it. + project_id_path = os.path.join(repo_root, ".projectid") + try: + with open(project_id_path, "r") as f: + project_name = f.read().strip() + return repo_root, project_name + except EnvironmentError as ex: + if ex.errno != errno.ENOENT: + raise + + return repo_root, None + + +class BuildOptions(object): + def __init__( + self, + fbcode_builder_dir, + scratch_dir, + host_type, + install_dir=None, + num_jobs: int = 0, + use_shipit: bool = False, + vcvars_path=None, + allow_system_packages: bool = False, + lfs_path=None, + shared_libs: bool = False, + facebook_internal=None, + free_up_disk: bool = False, + build_type: Optional[str] = None, + ) -> None: + """fbcode_builder_dir - the path to either the in-fbsource fbcode_builder dir, + or for shipit-transformed repos, the build dir that + has been mapped into that dir. + scratch_dir - a place where we can store repos and build bits. + This path should be stable across runs and ideally + should not be in the repo of the project being built, + but that is ultimately where we generally fall back + for builds outside of FB + install_dir - where the project will ultimately be installed + num_jobs - the level of concurrency to use while building + use_shipit - use real shipit instead of the simple shipit transformer + vcvars_path - Path to external VS toolchain's vsvarsall.bat + shared_libs - whether to build shared libraries + free_up_disk - take extra actions to save runner disk space + build_type - CMAKE_BUILD_TYPE, used by cmake and cargo builders + """ + + if not install_dir: + install_dir = os.path.join(scratch_dir, "installed") + + self.project_hashes = None + for p in ["../deps/github_hashes", "../project_hashes"]: + hashes = os.path.join(fbcode_builder_dir, p) + if os.path.exists(hashes): + self.project_hashes = hashes + break + + # Detect what repository and project we are being run from. + self.repo_root, self.repo_project = detect_project(os.getcwd()) + + # If we are running from an fbsource repository, set self.fbsource_dir + # to allow the ShipIt-based fetchers to use it. + if self.repo_project == "fbsource": + self.fbsource_dir: Optional[str] = self.repo_root + else: + self.fbsource_dir = None + + if facebook_internal is None: + if self.fbsource_dir: + facebook_internal = True + else: + facebook_internal = False + + self.facebook_internal = facebook_internal + self.specified_num_jobs = num_jobs + self.scratch_dir = scratch_dir + self.install_dir = install_dir + self.fbcode_builder_dir = fbcode_builder_dir + self.host_type = host_type + self.use_shipit = use_shipit + self.allow_system_packages = allow_system_packages + self.lfs_path = lfs_path + self.shared_libs = shared_libs + self.free_up_disk = free_up_disk + + if build_type is None: + build_type = "RelWithDebInfo" + + self.build_type = build_type + + lib_path = None + if self.is_darwin(): + lib_path = "DYLD_LIBRARY_PATH" + elif self.is_linux(): + lib_path = "LD_LIBRARY_PATH" + elif self.is_windows(): + lib_path = "PATH" + else: + lib_path = None + self.lib_path = lib_path + + if vcvars_path is None and is_windows(): + + try: + # Allow a site-specific vcvarsall path. + from .facebook.vcvarsall import build_default_vcvarsall + except ImportError: + vcvarsall = [] + else: + vcvarsall = ( + build_default_vcvarsall(self.fbsource_dir) + if self.fbsource_dir is not None + else [] + ) + + # On Windows, the compiler is not available in the PATH by + # default so we need to run the vcvarsall script to populate the + # environment. We use a glob to find some version of this script + # as deployed with Visual Studio 2017. This logic can also + # locate Visual Studio 2019 but note that at the time of writing + # the version of boost in our manifest cannot be built with + # VS 2019, so we're effectively tied to VS 2017 until we upgrade + # the boost dependency. + for year in ["2017", "2019"]: + vcvarsall += glob.glob( + os.path.join( + os.environ["ProgramFiles(x86)"], + "Microsoft Visual Studio", + year, + "*", + "VC", + "Auxiliary", + "Build", + "vcvarsall.bat", + ) + ) + vcvars_path = vcvarsall[0] + + self.vcvars_path = vcvars_path + + @property + def manifests_dir(self): + return os.path.join(self.fbcode_builder_dir, "manifests") + + def is_darwin(self): + return self.host_type.is_darwin() + + def is_windows(self): + return self.host_type.is_windows() + + def is_arm(self): + return self.host_type.is_arm() + + def get_vcvars_path(self): + return self.vcvars_path + + def is_linux(self): + return self.host_type.is_linux() + + def is_freebsd(self): + return self.host_type.is_freebsd() + + def get_num_jobs(self, job_weight: int) -> int: + """Given an estimated job_weight in MiB, compute a reasonable concurrency limit.""" + if self.specified_num_jobs: + return self.specified_num_jobs + + available_ram = get_available_ram() + + import multiprocessing + + return max(1, min(multiprocessing.cpu_count(), available_ram // job_weight)) + + def get_context_generator(self, host_tuple=None): + """Create a manifest ContextGenerator for the specified target platform.""" + if host_tuple is None: + host_type = self.host_type + elif isinstance(host_tuple, HostType): + host_type = host_tuple + else: + host_type = HostType.from_tuple_string(host_tuple) + + return ContextGenerator( + { + "os": host_type.ostype, + "distro": host_type.distro, + "distro_vers": host_type.distrovers, + "fb": "on" if self.facebook_internal else "off", + "fbsource": "on" if self.fbsource_dir else "off", + "test": "off", + "shared_libs": "on" if self.shared_libs else "off", + } + ) + + def compute_env_for_install_dirs( + self, loader, dep_manifests, ctx, env=None, manifest=None + ): # noqa: C901 + if env is not None: + env = env.copy() + else: + env = Env() + + env["GETDEPS_BUILD_DIR"] = os.path.join(self.scratch_dir, "build") + env["GETDEPS_INSTALL_DIR"] = self.install_dir + + # Python setuptools attempts to discover a local MSVC for + # building Python extensions. On Windows, getdeps already + # supports invoking a vcvarsall prior to compilation. + # + # Tell setuptools to bypass its own search. This fixes a bug + # where setuptools would fail when run from CMake on GitHub + # Actions with the inscrutable message 'error: Microsoft + # Visual C++ 14.0 is required. Get it with "Build Tools for + # Visual Studio"'. I suspect the actual error is that the + # environment or PATH is overflowing. + # + # For extra credit, someone could patch setuptools to + # propagate the actual error message from vcvarsall, because + # often it does not mean Visual C++ is not available. + # + # Related discussions: + # - https://github.com/pypa/setuptools/issues/2028 + # - https://github.com/pypa/setuptools/issues/2307 + # - https://developercommunity.visualstudio.com/t/error-microsoft-visual-c-140-is-required/409173 + # - https://github.com/OpenMS/OpenMS/pull/4779 + # - https://github.com/actions/virtual-environments/issues/1484 + + if self.is_windows() and self.get_vcvars_path(): + env["DISTUTILS_USE_SDK"] = "1" + + # On macOS we need to set `SDKROOT` when we use clang for system + # header files. + if self.is_darwin() and "SDKROOT" not in env: + sdkroot = subprocess.check_output(["xcrun", "--show-sdk-path"]) + env["SDKROOT"] = sdkroot.decode().strip() + + if ( + self.is_darwin() + and self.allow_system_packages + and self.host_type.get_package_manager() == "homebrew" + and manifest + and manifest.resolved_system_packages + ): + # Homebrew packages may not be on the default PATHs + brew_packages = manifest.resolved_system_packages.get("homebrew", []) + for p in brew_packages: + found = self.add_homebrew_package_to_env(p, env) + # Try extra hard to find openssl, needed with homebrew on macOS + if found and p.startswith("openssl"): + candidate = homebrew_package_prefix("openssl@1.1") + if os.path.exists(candidate): + os.environ["OPENSSL_ROOT_DIR"] = candidate + env["OPENSSL_ROOT_DIR"] = os.environ["OPENSSL_ROOT_DIR"] + + if self.fbsource_dir: + env["YARN_YARN_OFFLINE_MIRROR"] = os.path.join( + self.fbsource_dir, "xplat/third-party/yarn/offline-mirror" + ) + yarn_exe = "yarn.bat" if self.is_windows() else "yarn" + env["YARN_PATH"] = os.path.join( + self.fbsource_dir, "xplat/third-party/yarn/", yarn_exe + ) + node_exe = "node-win-x64.exe" if self.is_windows() else "node" + env["NODE_BIN"] = os.path.join( + self.fbsource_dir, "xplat/third-party/node/bin/", node_exe + ) + env["RUST_VENDORED_CRATES_DIR"] = os.path.join( + self.fbsource_dir, "third-party/rust/vendor" + ) + hash_data = get_fbsource_repo_data(self) + env["FBSOURCE_HASH"] = hash_data.hash + env["FBSOURCE_DATE"] = hash_data.date + + # reverse as we are prepending to the PATHs + for m in reversed(dep_manifests): + is_direct_dep = ( + manifest is not None and m.name in manifest.get_dependencies(ctx) + ) + self.add_prefix_to_env( + loader.get_project_install_dir(m), + env, + append=False, + is_direct_dep=is_direct_dep, + ) + + # Linux is always system openssl + system_openssl = self.is_linux() + + # For other systems lets see if package is requested + if not system_openssl and manifest and manifest.resolved_system_packages: + for _pkg_type, pkgs in manifest.resolved_system_packages.items(): + for p in pkgs: + if p.startswith("openssl") or p.startswith("libssl"): + system_openssl = True + break + + # Let openssl know to pick up the system certs if present + if system_openssl or "OPENSSL_DIR" in env: + for system_ssl_cfg in ["/etc/pki/tls", "/etc/ssl"]: + if os.path.isdir(system_ssl_cfg): + cert_dir = system_ssl_cfg + "/certs" + if os.path.isdir(cert_dir): + env["SSL_CERT_DIR"] = cert_dir + cert_file = system_ssl_cfg + "/cert.pem" + if os.path.isfile(cert_file): + env["SSL_CERT_FILE"] = cert_file + + return env + + def add_homebrew_package_to_env(self, package, env) -> bool: + prefix = homebrew_package_prefix(package) + if prefix and os.path.exists(prefix): + return self.add_prefix_to_env( + prefix, env, append=False, add_library_path=True + ) + return False + + def add_prefix_to_env( + self, + d, + env, + append: bool = True, + add_library_path: bool = False, + is_direct_dep: bool = False, + ) -> bool: # noqa: C901 + bindir = os.path.join(d, "bin") + found = False + has_pkgconfig = False + pkgconfig = os.path.join(d, "lib", "pkgconfig") + if os.path.exists(pkgconfig): + found = True + has_pkgconfig = True + add_path_entry(env, "PKG_CONFIG_PATH", pkgconfig, append=append) + + pkgconfig = os.path.join(d, "lib64", "pkgconfig") + if os.path.exists(pkgconfig): + found = True + has_pkgconfig = True + add_path_entry(env, "PKG_CONFIG_PATH", pkgconfig, append=append) + + add_path_entry(env, "CMAKE_PREFIX_PATH", d, append=append) + + # Tell the thrift compiler about includes it needs to consider + thriftdir = os.path.join(d, "include", "thrift-files") + if os.path.exists(thriftdir): + found = True + add_path_entry(env, "THRIFT_INCLUDE_PATH", thriftdir, append=append) + + # module detection for python is old fashioned and needs flags + includedir = os.path.join(d, "include") + if os.path.exists(includedir): + found = True + ncursesincludedir = os.path.join(d, "include", "ncurses") + if os.path.exists(ncursesincludedir): + add_path_entry(env, "C_INCLUDE_PATH", ncursesincludedir, append=append) + add_flag(env, "CPPFLAGS", f"-I{includedir}", append=append) + add_flag(env, "CPPFLAGS", f"-I{ncursesincludedir}", append=append) + elif "/bz2-" in d: + add_flag(env, "CPPFLAGS", f"-I{includedir}", append=append) + # For non-pkgconfig projects Cabal has no way to find the includes or + # libraries, so we provide a set of extra Cabal flags in the env + if not has_pkgconfig and is_direct_dep: + add_flag( + env, + "GETDEPS_CABAL_FLAGS", + f"--extra-include-dirs={includedir}", + append=append, + ) + + # The thrift compiler's built-in includes are installed directly to the include dir + includethriftdir = os.path.join(d, "include", "thrift") + if os.path.exists(includethriftdir): + add_path_entry(env, "THRIFT_INCLUDE_PATH", includedir, append=append) + + # Map from FB python manifests to PYTHONPATH + pydir = os.path.join(d, "lib", "fb-py-libs") + if os.path.exists(pydir): + found = True + manifest_ext = ".manifest" + pymanifestfiles = [ + f + for f in os.listdir(pydir) + if f.endswith(manifest_ext) and os.path.isfile(os.path.join(pydir, f)) + ] + for f in pymanifestfiles: + subdir = f[: -len(manifest_ext)] + add_path_entry( + env, "PYTHONPATH", os.path.join(pydir, subdir), append=append + ) + + # Allow resolving shared objects built earlier (eg: zstd + # doesn't include the full path to the dylib in its linkage + # so we need to give it an assist) + if self.lib_path: + for lib in ["lib", "lib64"]: + libdir = os.path.join(d, lib) + if os.path.exists(libdir): + found = True + add_path_entry(env, self.lib_path, libdir, append=append) + # module detection for python is old fashioned and needs flags + if "/ncurses-" in d: + add_flag(env, "LDFLAGS", f"-L{libdir}", append=append) + elif "/bz2-" in d: + add_flag(env, "LDFLAGS", f"-L{libdir}", append=append) + if add_library_path: + add_path_entry(env, "LIBRARY_PATH", libdir, append=append) + if not has_pkgconfig and is_direct_dep: + add_flag( + env, + "GETDEPS_CABAL_FLAGS", + f"--extra-lib-dirs={libdir}", + append=append, + ) + + # Allow resolving binaries (eg: cmake, ninja) and dlls + # built by earlier steps + if os.path.exists(bindir): + found = True + add_path_entry(env, "PATH", bindir, append=append) + + fbthrift_bin = os.path.join(bindir, "thrift1") + if os.path.isfile(fbthrift_bin): + env["FBTHRIFT_BIN"] = bindir + + # If rustc is present in the `bin` directory, set RUSTC to prevent + # cargo uses the rustc installed in the system. + if self.is_windows(): + cargo_path = os.path.join(bindir, "cargo.exe") + rustc_path = os.path.join(bindir, "rustc.exe") + rustdoc_path = os.path.join(bindir, "rustdoc.exe") + else: + cargo_path = os.path.join(bindir, "cargo") + rustc_path = os.path.join(bindir, "rustc") + rustdoc_path = os.path.join(bindir, "rustdoc") + + if os.path.isfile(rustc_path): + env["CARGO_BIN"] = cargo_path + env["RUSTC"] = rustc_path + env["RUSTDOC"] = rustdoc_path + + openssl_include = os.path.join(d, "include", "openssl") + if os.path.isdir(openssl_include) and any( + os.path.isfile(os.path.join(d, "lib", libcrypto)) + for libcrypto in ("libcrypto.lib", "libcrypto.so", "libcrypto.a") + ): + # This must be the openssl library, let Rust know about it + env["OPENSSL_DIR"] = d + + return found + + +def list_win32_subst_letters(): + output = subprocess.check_output(["subst"]).decode("utf-8") + # The output is a set of lines like: `F:\: => C:\open\some\where` + lines = output.strip().split("\r\n") + mapping = {} + for line in lines: + fields = line.split(": => ") + if len(fields) != 2: + continue + letter = fields[0] + path = fields[1] + mapping[letter] = path + + return mapping + + +def find_existing_win32_subst_for_path( + path: str, + subst_mapping: Mapping[str, str], +) -> Optional[str]: + path = ntpath.normcase(ntpath.normpath(path)) + for letter, target in subst_mapping.items(): + if ntpath.normcase(target) == path: + return letter + return None + + +def find_unused_drive_letter(): + import ctypes + + buffer_len = 256 + blen = ctypes.c_uint(buffer_len) + rv = ctypes.c_uint() + bufs = ctypes.create_string_buffer(buffer_len) + rv = ctypes.windll.kernel32.GetLogicalDriveStringsA(blen, bufs) + if rv > buffer_len: + raise Exception("GetLogicalDriveStringsA result too large for buffer") + nul = "\x00".encode("ascii") + + used = [drive.decode("ascii")[0] for drive in bufs.raw.strip(nul).split(nul)] + possible = [c for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ"] + available = sorted(list(set(possible) - set(used))) + if len(available) == 0: + return None + # Prefer to assign later letters rather than earlier letters + return available[-1] + + +def create_subst_path(path: str) -> str: + for _attempt in range(0, 24): + drive = find_existing_win32_subst_for_path( + path, subst_mapping=list_win32_subst_letters() + ) + if drive: + return drive + available = find_unused_drive_letter() + if available is None: + raise Exception( + ( + "unable to make shorter subst mapping for %s; " + "no available drive letters" + ) + % path + ) + + # Try to set up a subst mapping; note that we may be racing with + # other processes on the same host, so this may not succeed. + try: + subprocess.check_call(["subst", "%s:" % available, path]) + return "%s:\\" % available + except Exception: + print("Failed to map %s -> %s" % (available, path)) + + raise Exception("failed to set up a subst path for %s" % path) + + +def _check_host_type(args, host_type): + if host_type is None: + host_tuple_string = getattr(args, "host_type", None) + if host_tuple_string: + host_type = HostType.from_tuple_string(host_tuple_string) + else: + host_type = HostType() + + assert isinstance(host_type, HostType) + return host_type + + +def setup_build_options(args, host_type=None) -> BuildOptions: + """Create a BuildOptions object based on the arguments""" + + fbcode_builder_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + scratch_dir = args.scratch_path + if not scratch_dir: + # TODO: `mkscratch` doesn't currently know how best to place things on + # sandcastle, so whip up something reasonable-ish + if "SANDCASTLE" in os.environ: + if "DISK_TEMP" not in os.environ: + raise Exception( + ( + "I need DISK_TEMP to be set in the sandcastle environment " + "so that I can store build products somewhere sane" + ) + ) + scratch_dir = os.path.join( + os.environ["DISK_TEMP"], "fbcode_builder_getdeps" + ) + if not scratch_dir: + try: + scratch_dir = ( + subprocess.check_output( + ["mkscratch", "path", "--subdir", "fbcode_builder_getdeps"] + ) + .strip() + .decode("utf-8") + ) + except OSError as exc: + if exc.errno != errno.ENOENT: + # A legit failure; don't fall back, surface the error + raise + # This system doesn't have mkscratch so we fall back to + # something local. + munged = fbcode_builder_dir.replace("Z", "zZ") + for s in ["/", "\\", ":"]: + munged = munged.replace(s, "Z") + + if is_windows() and os.path.isdir("c:/open"): + temp = "c:/open/scratch" + else: + temp = tempfile.gettempdir() + + scratch_dir = os.path.join(temp, "fbcode_builder_getdeps-%s" % munged) + if not is_windows() and os.geteuid() == 0: + # Running as root; in the case where someone runs + # sudo getdeps.py install-system-deps + # and then runs as build without privs, we want to avoid creating + # a scratch dir that the second stage cannot write to. + # So we generate a different path if we are root. + scratch_dir += "-root" + + if not os.path.exists(scratch_dir): + os.makedirs(scratch_dir) + + if is_windows(): + subst = create_subst_path(scratch_dir) + print( + "Mapping scratch dir %s -> %s" % (scratch_dir, subst), file=sys.stderr + ) + scratch_dir = subst + else: + if not os.path.exists(scratch_dir): + os.makedirs(scratch_dir) + + # Make sure we normalize the scratch path. This path is used as part of the hash + # computation for detecting if projects have been updated, so we need to always + # use the exact same string to refer to a given directory. + # But! realpath in some combinations of Windows/Python3 versions can expand the + # drive substitutions on Windows, so avoid that! + if not is_windows(): + scratch_dir = os.path.realpath(scratch_dir) + + # Save these args passed by the user in an env variable, so it + # can be used while hashing this build. + os.environ["GETDEPS_CMAKE_DEFINES"] = getattr(args, "extra_cmake_defines", "") or "" + + host_type = _check_host_type(args, host_type) + + build_args = { + k: v + for (k, v) in vars(args).items() + if k + in { + "num_jobs", + "use_shipit", + "vcvars_path", + "allow_system_packages", + "lfs_path", + "shared_libs", + "free_up_disk", + "build_type", + } + } + + return BuildOptions( + fbcode_builder_dir, + scratch_dir, + host_type, + install_dir=args.install_prefix, + facebook_internal=args.facebook_internal, + **build_args, + ) diff --git a/build/fbcode_builder/getdeps/cache.py b/build/fbcode_builder/getdeps/cache.py new file mode 100644 index 000000000..ed0d45bfd --- /dev/null +++ b/build/fbcode_builder/getdeps/cache.py @@ -0,0 +1,39 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + + +class ArtifactCache(object): + """The ArtifactCache is a small abstraction that allows caching + named things in some external storage mechanism. + The primary use case is for storing the build products on CI + systems to accelerate the build""" + + def download_to_file(self, name, dest_file_name) -> bool: + """If `name` exists in the cache, download it and place it + in the specified `dest_file_name` location on the filesystem. + If a transient issue was encountered a TransientFailure shall + be raised. + If `name` doesn't exist in the cache `False` shall be returned. + If `dest_file_name` was successfully updated `True` shall be + returned. + All other conditions shall raise an appropriate exception.""" + return False + + def upload_from_file(self, name, source_file_name) -> None: + """Causes `name` to be populated in the cache by uploading + the contents of `source_file_name` to the storage system. + If a transient issue was encountered a TransientFailure shall + be raised. + If the upload failed for some other reason, an appropriate + exception shall be raised.""" + pass + + +def create_cache() -> None: + """This function is monkey patchable to provide an actual + implementation""" + return None diff --git a/build/fbcode_builder/getdeps/cargo.py b/build/fbcode_builder/getdeps/cargo.py new file mode 100644 index 000000000..cae8bf54c --- /dev/null +++ b/build/fbcode_builder/getdeps/cargo.py @@ -0,0 +1,467 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import os +import re +import shutil +import typing + +from .builder import BuilderBase + +if typing.TYPE_CHECKING: + from .buildopts import BuildOptions + + +class CargoBuilder(BuilderBase): + def __init__( + self, + loader, + dep_manifests, # manifests of dependencies + build_opts: "BuildOptions", + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + build_doc, + workspace_dir, + manifests_to_build, + cargo_config_file, + ) -> None: + super(CargoBuilder, self).__init__( + loader, + dep_manifests, + build_opts, + ctx, + manifest, + src_dir, + build_dir, + inst_dir, + ) + self.build_doc = build_doc + self.ws_dir = workspace_dir + self.manifests_to_build = manifests_to_build and manifests_to_build.split(",") + self.loader = loader + self.cargo_config_file_subdir = cargo_config_file + + def run_cargo(self, install_dirs, operation, args=None) -> None: + args = args or [] + env = self._compute_env() + # Enable using nightly features with stable compiler + env["RUSTC_BOOTSTRAP"] = "1" + env["LIBZ_SYS_STATIC"] = "1" + cmd = [ + "cargo", + operation, + "--workspace", + "-j%s" % self.num_jobs, + ] + args + self._run_cmd(cmd, cwd=self.workspace_dir(), env=env) + + def build_source_dir(self): + return os.path.join(self.build_dir, "source") + + def workspace_dir(self): + return os.path.join(self.build_source_dir(), self.ws_dir or "") + + def manifest_dir(self, manifest): + return os.path.join(self.build_source_dir(), manifest) + + def recreate_dir(self, src, dst) -> None: + if os.path.isdir(dst): + if os.path.islink(dst): + os.remove(dst) + else: + shutil.rmtree(dst) + shutil.copytree(src, dst) + + def cargo_config_file(self): + build_source_dir = self.build_dir + if self.cargo_config_file_subdir: + return os.path.join(build_source_dir, self.cargo_config_file_subdir) + else: + return os.path.join(build_source_dir, ".cargo", "config") + + def _create_cargo_config(self): + cargo_config_file = self.cargo_config_file() + cargo_config_dir = os.path.dirname(cargo_config_file) + if not os.path.isdir(cargo_config_dir): + os.mkdir(cargo_config_dir) + + dep_to_git = self._resolve_dep_to_git() + + if os.path.isfile(cargo_config_file): + with open(cargo_config_file, "r") as f: + print(f"Reading {cargo_config_file}") + cargo_content = f.read() + else: + cargo_content = "" + + new_content = cargo_content + if "# Generated by getdeps.py" not in cargo_content: + new_content += """\ +# Generated by getdeps.py +[build] +target-dir = '''{}''' + +[profile.dev] +debug = false +incremental = false +""".format( + self.build_dir.replace("\\", "\\\\") + ) + + # Point to vendored sources from getdeps manifests + for _dep, git_conf in dep_to_git.items(): + if "cargo_vendored_sources" in git_conf: + vendored_dir = git_conf["cargo_vendored_sources"].replace("\\", "\\\\") + override = ( + f'[source."{git_conf["repo_url"]}"]\ndirectory = "{vendored_dir}"\n' + ) + if override not in cargo_content: + new_content += override + + if self.build_opts.fbsource_dir: + # Point to vendored crates.io if possible + try: + from .facebook.rust import vendored_crates + + new_content = vendored_crates( + self.build_opts.fbsource_dir, new_content + ) + except ImportError: + # This FB internal module isn't shippped to github, + # so just rely on cargo downloading crates on it's own + pass + + if new_content != cargo_content: + with open(cargo_config_file, "w") as f: + print( + f"Writing cargo config for {self.manifest.name} to {cargo_config_file}" + ) + f.write(new_content) + + return dep_to_git + + def _prepare(self, reconfigure) -> None: + build_source_dir = self.build_source_dir() + self.recreate_dir(self.src_dir, build_source_dir) + + dep_to_git = self._create_cargo_config() + + if self.ws_dir is not None: + self._patchup_workspace(dep_to_git) + + def _build(self, reconfigure) -> None: + # _prepare has been run already. Actually do the build + build_source_dir = self.build_source_dir() + + build_args = [ + "--out-dir", + os.path.join(self.inst_dir, "bin"), + "-Zunstable-options", + ] + + if self.build_opts.build_type != "Debug": + build_args.append("--release") + + if self.manifests_to_build is None: + self.run_cargo( + self.install_dirs, + "build", + build_args, + ) + else: + for manifest in self.manifests_to_build: + self.run_cargo( + self.install_dirs, + "build", + build_args + + [ + "--manifest-path", + self.manifest_dir(manifest), + ], + ) + + self.recreate_dir(build_source_dir, os.path.join(self.inst_dir, "source")) + + def run_tests(self, schedule_type, owner, test_filter, retry, no_testpilot) -> None: + if test_filter: + args = ["--", test_filter] + else: + args = [] + + if self.manifests_to_build is None: + self.run_cargo(self.install_dirs, "test", args) + if self.build_doc: + self.run_cargo(self.install_dirs, "doc", ["--no-deps"]) + else: + for manifest in self.manifests_to_build: + margs = ["--manifest-path", self.manifest_dir(manifest)] + self.run_cargo(self.install_dirs, "test", args + margs) + if self.build_doc: + self.run_cargo(self.install_dirs, "doc", ["--no-deps"] + margs) + + def _patchup_workspace(self, dep_to_git) -> None: + """ + This method makes some assumptions about the state of the project and + its cargo dependendies: + 1. Crates from cargo dependencies can be extracted from Cargo.toml files + using _extract_crates function. It is using a heuristic so check its + code to understand how it is done. + 2. The extracted cargo dependencies crates can be found in the + dependency's install dir using _resolve_crate_to_path function + which again is using a heuristic. + + Notice that many things might go wrong here. E.g. if someone depends + on another getdeps crate by writing in their Cargo.toml file: + + my-rename-of-crate = { package = "crate", git = "..." } + + they can count themselves lucky because the code will raise an + Exception. There might be more cases where the code will silently pass + producing bad results. + """ + workspace_dir = self.workspace_dir() + git_url_to_crates_and_paths = self._resolve_config(dep_to_git) + if git_url_to_crates_and_paths: + patch_cargo = os.path.join(workspace_dir, "Cargo.toml") + if os.path.isfile(patch_cargo): + with open(patch_cargo, "r") as f: + manifest_content = f.read() + else: + manifest_content = "" + + new_content = manifest_content + if "[package]" not in manifest_content: + # A fake manifest has to be crated to change the virtual + # manifest into a non-virtual. The virtual manifests are limited + # in many ways and the inability to define patches on them is + # one. Check https://github.com/rust-lang/cargo/issues/4934 to + # see if it is resolved. + null_file = "/dev/null" + if self.build_opts.is_windows(): + null_file = "nul" + new_content += f""" +[package] +name = "fake_manifest_of_{self.manifest.name}" +version = "0.0.0" + +[lib] +path = "{null_file}" +""" + config = [] + for git_url, crates_to_patch_path in git_url_to_crates_and_paths.items(): + crates_patches = [ + '{} = {{ path = "{}" }}'.format( + crate, + crates_to_patch_path[crate].replace("\\", "\\\\"), + ) + for crate in sorted(crates_to_patch_path.keys()) + ] + patch_key = f'[patch."{git_url}"]' + if patch_key not in manifest_content: + config.append(f"\n{patch_key}\n" + "\n".join(crates_patches)) + new_content += "\n".join(config) + if new_content != manifest_content: + with open(patch_cargo, "w") as f: + print(f"writing patch to {patch_cargo}") + f.write(new_content) + + def _resolve_config(self, dep_to_git) -> typing.Dict[str, typing.Dict[str, str]]: + """ + Returns a configuration to be put inside root Cargo.toml file which + patches the dependencies git code with local getdeps versions. + See https://doc.rust-lang.org/cargo/reference/manifest.html#the-patch-section + """ + dep_to_crates = self._resolve_dep_to_crates(self.build_source_dir(), dep_to_git) + + git_url_to_crates_and_paths = {} + for dep_name in sorted(dep_to_git.keys()): + git_conf = dep_to_git[dep_name] + req_crates = sorted(dep_to_crates.get(dep_name, [])) + if not req_crates: + continue # nothing to patch, move along + + git_url = git_conf.get("repo_url", None) + crate_source_map = git_conf["crate_source_map"] + if git_url and crate_source_map: + crates_to_patch_path = git_url_to_crates_and_paths.get(git_url, {}) + for c in req_crates: + if c in crate_source_map and c not in crates_to_patch_path: + crates_to_patch_path[c] = crate_source_map[c] + print( + f"{self.manifest.name}: Patching crate {c} via virtual manifest in {self.workspace_dir()}" + ) + if crates_to_patch_path: + git_url_to_crates_and_paths[git_url] = crates_to_patch_path + + return git_url_to_crates_and_paths + + def _resolve_dep_to_git(self): + """ + For each direct dependency of the currently build manifest check if it + is also cargo-builded and if yes then extract it's git configs and + install dir + """ + dependencies = self.manifest.get_dependencies(self.ctx) + if not dependencies: + return [] + + dep_to_git = {} + for dep in dependencies: + dep_manifest = self.loader.load_manifest(dep) + dep_builder = dep_manifest.get("build", "builder", ctx=self.ctx) + + dep_cargo_conf = dep_manifest.get_section_as_dict("cargo", self.ctx) + dep_crate_map = dep_manifest.get_section_as_dict("crate.pathmap", self.ctx) + + if ( + not (dep_crate_map or dep_cargo_conf) + and dep_builder not in ["cargo"] + or dep == "rust" + ): + # This dependency has no cargo rust content so ignore it. + # The "rust" dependency is an exception since it contains the + # toolchain. + continue + + git_conf = dep_manifest.get_section_as_dict("git", self.ctx) + if dep != "rust" and "repo_url" not in git_conf: + raise Exception( + f"{dep}: A cargo dependency requires git.repo_url to be defined." + ) + + if dep_builder == "cargo": + dep_source_dir = self.loader.get_project_install_dir(dep_manifest) + dep_source_dir = os.path.join(dep_source_dir, "source") + else: + fetcher = self.loader.create_fetcher(dep_manifest) + dep_source_dir = fetcher.get_src_dir() + + crate_source_map = {} + if dep_crate_map: + for crate, subpath in dep_crate_map.items(): + if crate not in crate_source_map: + if self.build_opts.is_windows(): + subpath = subpath.replace("/", "\\") + crate_path = os.path.join(dep_source_dir, subpath) + print( + f"{self.manifest.name}: Mapped crate {crate} to dep {dep} dir {crate_path}" + ) + crate_source_map[crate] = crate_path + elif dep_cargo_conf: + # We don't know what crates are defined buy the dep, look for them + search_pattern = re.compile('\\[package\\]\nname = "(.*)"') + for crate_root, _, files in os.walk(dep_source_dir): + if "Cargo.toml" in files: + with open(os.path.join(crate_root, "Cargo.toml"), "r") as f: + content = f.read() + match = search_pattern.search(content) + if match: + crate = match.group(1) + if crate: + print( + f"{self.manifest.name}: Discovered crate {crate} in dep {dep} dir {crate_root}" + ) + crate_source_map[crate] = crate_root + + git_conf["crate_source_map"] = crate_source_map + + if not dep_crate_map and dep_cargo_conf: + dep_cargo_dir = self.loader.get_project_build_dir(dep_manifest) + dep_cargo_dir = os.path.join(dep_cargo_dir, "source") + dep_ws_dir = dep_cargo_conf.get("workspace_dir", None) + if dep_ws_dir: + dep_cargo_dir = os.path.join(dep_cargo_dir, dep_ws_dir) + git_conf["cargo_vendored_sources"] = dep_cargo_dir + + dep_to_git[dep] = git_conf + return dep_to_git + + def _resolve_dep_to_crates(self, build_source_dir, dep_to_git): + """ + This function traverse the build_source_dir in search of Cargo.toml + files, extracts the crate names from them using _extract_crates + function and returns a merged result containing crate names per + dependency name from all Cargo.toml files in the project. + """ + if not dep_to_git: + return {} # no deps, so don't waste time traversing files + + dep_to_crates = {} + + # First populate explicit crate paths from dependencies + for name, git_conf in dep_to_git.items(): + crates = git_conf["crate_source_map"].keys() + if crates: + dep_to_crates.setdefault(name, set()).update(crates) + + # Now find from Cargo.tomls + for root, _, files in os.walk(build_source_dir): + for f in files: + if f == "Cargo.toml": + more_dep_to_crates = CargoBuilder._extract_crates_used( + os.path.join(root, f), dep_to_git + ) + for dep_name, crates in more_dep_to_crates.items(): + existing_crates = dep_to_crates.get(dep_name, set()) + for c in crates: + if c not in existing_crates: + print( + f"Patch {self.manifest.name} uses {dep_name} crate {crates}" + ) + existing_crates.add(c) + dep_to_crates.setdefault(name, set()).update(existing_crates) + return dep_to_crates + + @staticmethod + def _extract_crates_used(cargo_toml_file, dep_to_git): + """ + This functions reads content of provided cargo toml file and extracts + crate names per each dependency. The extraction is done by a heuristic + so it might be incorrect. + """ + deps_to_crates = {} + with open(cargo_toml_file, "r") as f: + for line in f.readlines(): + if line.startswith("#") or "git = " not in line: + continue # filter out commented lines and ones without git deps + for dep_name, conf in dep_to_git.items(): + # Only redirect deps that point to git URLS + if 'git = "{}"'.format(conf["repo_url"]) in line: + pkg_template = ' package = "' + if pkg_template in line: + crate_name, _, _ = line.partition(pkg_template)[ + 2 + ].partition('"') + else: + crate_name, _, _ = line.partition("=") + deps_to_crates.setdefault(dep_name, set()).add( + crate_name.strip() + ) + return deps_to_crates + + def _resolve_crate_to_path(self, crate, crate_source_map): + """ + Tries to find in source_dir by searching a [package] + keyword followed by name = "". + """ + search_pattern = '[package]\nname = "{}"'.format(crate) + + for _crate, crate_source_dir in crate_source_map.items(): + for crate_root, _, files in os.walk(crate_source_dir): + if "Cargo.toml" in files: + with open(os.path.join(crate_root, "Cargo.toml"), "r") as f: + content = f.read() + if search_pattern in content: + return crate_root + + raise Exception( + f"{self.manifest.name}: Failed to find dep crate {crate} in paths {crate_source_map}" + ) diff --git a/build/fbcode_builder/getdeps/copytree.py b/build/fbcode_builder/getdeps/copytree.py new file mode 100644 index 000000000..2297bd3aa --- /dev/null +++ b/build/fbcode_builder/getdeps/copytree.py @@ -0,0 +1,82 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import os +import shutil +import subprocess + +from .platform import is_windows + + +PREFETCHED_DIRS = set() + + +def containing_repo_type(path): + while True: + if os.path.exists(os.path.join(path, ".git")): + return ("git", path) + if os.path.exists(os.path.join(path, ".hg")): + return ("hg", path) + + parent = os.path.dirname(path) + if parent == path: + return None, None + path = parent + + +def find_eden_root(dirpath): + """If the specified directory is inside an EdenFS checkout, returns + the canonical absolute path to the root of that checkout. + + Returns None if the specified directory is not in an EdenFS checkout. + """ + if is_windows(): + repo_type, repo_root = containing_repo_type(dirpath) + if repo_root is not None: + if os.path.exists(os.path.join(repo_root, ".eden", "config")): + return repo_root + return None + + try: + return os.readlink(os.path.join(dirpath, ".eden", "root")) + except OSError: + return None + + +def prefetch_dir_if_eden(dirpath) -> None: + """After an amend/rebase, Eden may need to fetch a large number + of trees from the servers. The simplistic single threaded walk + performed by copytree makes this more expensive than is desirable + so we help accelerate things by performing a prefetch on the + source directory""" + global PREFETCHED_DIRS + if dirpath in PREFETCHED_DIRS: + return + root = find_eden_root(dirpath) + if root is None: + return + glob = f"{os.path.relpath(dirpath, root).replace(os.sep, '/')}/**" + print(f"Prefetching {glob}") + subprocess.call(["edenfsctl", "prefetch", "--repo", root, glob, "--background"]) + PREFETCHED_DIRS.add(dirpath) + + +# pyre-fixme[9]: ignore has type `bool`; used as `None`. +def copytree(src_dir, dest_dir, ignore: bool = None): + """Recursively copy the src_dir to the dest_dir, filtering + out entries using the ignore lambda. The behavior of the + ignore lambda must match that described by `shutil.copytree`. + This `copytree` function knows how to prefetch data when + running in an eden repo. + TODO: I'd like to either extend this or add a variant that + uses watchman to mirror src_dir into dest_dir. + """ + prefetch_dir_if_eden(src_dir) + # pyre-fixme[6]: For 3rd param expected + # `Union[typing.Callable[[Union[PathLike[str], str], List[str]], Iterable[str]], + # typing.Callable[[str, List[str]], Iterable[str]], None]` but got `bool`. + return shutil.copytree(src_dir, dest_dir, ignore=ignore) diff --git a/build/fbcode_builder/getdeps/dyndeps.py b/build/fbcode_builder/getdeps/dyndeps.py new file mode 100644 index 000000000..25e15cd36 --- /dev/null +++ b/build/fbcode_builder/getdeps/dyndeps.py @@ -0,0 +1,461 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import errno +import glob +import os +import re +import shlex +import shutil +import stat +import subprocess +import sys +from struct import unpack +from typing import List, Optional + +OBJECT_SUBDIRS = ("bin", "lib", "lib64") + + +def copyfile(src, dest) -> None: + shutil.copyfile(src, dest) + shutil.copymode(src, dest) + + +class DepBase(object): + def __init__(self, buildopts, env, install_dirs, strip) -> None: + self.buildopts = buildopts + self.env = env + self.install_dirs = install_dirs + self.strip = strip + + # Deduplicates dependency processing. Keyed on the library + # destination path. + self.processed_deps = set() + + def list_dynamic_deps(self, objfile): + raise RuntimeError("list_dynamic_deps not implemented") + + def interesting_dep(self, d) -> bool: + return True + + # final_install_prefix must be the equivalent path to `destdir` on the + # installed system. For example, if destdir is `/tmp/RANDOM/usr/local' which + # is intended to map to `/usr/local` in the install image, then + # final_install_prefix='/usr/local'. + # If left unspecified, destdir will be used. + def process_deps(self, destdir, final_install_prefix=None) -> None: + if self.buildopts.is_windows(): + lib_dir = "bin" + else: + lib_dir = "lib" + # pyre-fixme[16]: `DepBase` has no attribute `munged_lib_dir`. + self.munged_lib_dir = os.path.join(destdir, lib_dir) + + final_lib_dir = os.path.join(final_install_prefix or destdir, lib_dir) + + if not os.path.isdir(self.munged_lib_dir): + os.makedirs(self.munged_lib_dir) + + # Look only at the things that got installed in the leaf package, + # which will be the last entry in the install dirs list + inst_dir = self.install_dirs[-1] + print("Process deps under %s" % inst_dir, file=sys.stderr) + + for dir in OBJECT_SUBDIRS: + src_dir = os.path.join(inst_dir, dir) + if not os.path.isdir(src_dir): + continue + dest_dir = os.path.join(destdir, dir) + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + + for objfile in self.list_objs_in_dir(src_dir): + print("Consider %s/%s" % (dir, objfile)) + dest_obj = os.path.join(dest_dir, objfile) + copyfile(os.path.join(src_dir, objfile), dest_obj) + self.munge_in_place(dest_obj, final_lib_dir) + + def find_all_dependencies(self, build_dir): + all_deps = set() + for objfile in self.list_objs_in_dir( + build_dir, recurse=True, output_prefix=build_dir + ): + for d in self.list_dynamic_deps(objfile): + all_deps.add(d) + + interesting_deps = {d for d in all_deps if self.interesting_dep(d)} + dep_paths = [] + for dep in interesting_deps: + dep_path = self.resolve_loader_path(dep) + if dep_path: + dep_paths.append(dep_path) + + return dep_paths + + def munge_in_place(self, objfile, final_lib_dir) -> None: + print("Munging %s" % objfile) + for d in self.list_dynamic_deps(objfile): + if not self.interesting_dep(d): + continue + + # Resolve this dep: does it exist in any of our installation + # directories? If so, then it is a candidate for processing + dep = self.resolve_loader_path(d) + if dep: + # pyre-fixme[16]: `DepBase` has no attribute `munged_lib_dir`. + dest_dep = os.path.join(self.munged_lib_dir, os.path.basename(dep)) + print("dep: %s -> %s" % (d, dest_dep)) + if dest_dep in self.processed_deps: + # A previous dependency with the same name has already + # been installed at dest_dep, so there is no need to copy + # or munge the dependency again. + # TODO: audit that both source paths have the same inode number + pass + else: + self.processed_deps.add(dest_dep) + copyfile(dep, dest_dep) + self.munge_in_place(dest_dep, final_lib_dir) + + self.rewrite_dep(objfile, d, dep, dest_dep, final_lib_dir) + + if self.strip: + self.strip_debug_info(objfile) + + def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir): + raise RuntimeError("rewrite_dep not implemented") + + def resolve_loader_path(self, dep: str) -> Optional[str]: + if os.path.isabs(dep): + return dep + d = os.path.basename(dep) + for inst_dir in self.install_dirs: + for libdir in OBJECT_SUBDIRS: + candidate = os.path.join(inst_dir, libdir, d) + if os.path.exists(candidate): + return candidate + return None + + def list_objs_in_dir(self, dir, recurse: bool = False, output_prefix: str = ""): + for entry in os.listdir(dir): + entry_path = os.path.join(dir, entry) + st = os.lstat(entry_path) + if stat.S_ISREG(st.st_mode): + if self.is_objfile(entry_path): + relative_result = os.path.join(output_prefix, entry) + yield os.path.normcase(relative_result) + elif recurse and stat.S_ISDIR(st.st_mode): + child_prefix = os.path.join(output_prefix, entry) + for result in self.list_objs_in_dir( + entry_path, recurse=recurse, output_prefix=child_prefix + ): + yield result + + def is_objfile(self, objfile) -> bool: + return True + + def strip_debug_info(self, objfile) -> None: + """override this to define how to remove debug information + from an object file""" + pass + + def check_call_verbose(self, args: List[str]) -> None: + print(" ".join(map(shlex.quote, args))) + subprocess.check_call(args) + + +class WinDeps(DepBase): + def __init__(self, buildopts, env, install_dirs, strip) -> None: + super(WinDeps, self).__init__(buildopts, env, install_dirs, strip) + self.dumpbin = self.find_dumpbin() + + def find_dumpbin(self) -> str: + # Looking for dumpbin in the following hardcoded paths. + # The registry option to find the install dir doesn't work anymore. + globs = [ + ( + "C:/Program Files (x86)/" + "Microsoft Visual Studio/" + "*/*/VC/Tools/" + "MSVC/*/bin/Hostx64/x64/dumpbin.exe" + ), + ( + "C:/Program Files (x86)/" + "Common Files/" + "Microsoft/Visual C++ for Python/*/" + "VC/bin/dumpbin.exe" + ), + ("c:/Program Files (x86)/Microsoft Visual Studio */VC/bin/dumpbin.exe"), + ( + "C:/Program Files/Microsoft Visual Studio/*/Professional/VC/Tools/MSVC/*/bin/HostX64/x64/dumpbin.exe" + ), + ] + for pattern in globs: + for exe in glob.glob(pattern): + return exe + + raise RuntimeError("could not find dumpbin.exe") + + def list_dynamic_deps(self, exe): + deps = [] + print("Resolve deps for %s" % exe) + output = subprocess.check_output( + [self.dumpbin, "/nologo", "/dependents", exe] + ).decode("utf-8") + + lines = output.split("\n") + for line in lines: + m = re.match("\\s+(\\S+.dll)", line, re.IGNORECASE) + if m: + deps.append(m.group(1).lower()) + + return deps + + def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir) -> None: + # We can't rewrite on windows, but we will + # place the deps alongside the exe so that + # they end up in the search path + pass + + # These are the Windows system dll, which we don't want to copy while + # packaging. + SYSTEM_DLLS = set( # noqa: C405 + [ + "advapi32.dll", + "dbghelp.dll", + "kernel32.dll", + "msvcp140.dll", + "vcruntime140.dll", + "ws2_32.dll", + "ntdll.dll", + "shlwapi.dll", + ] + ) + + def interesting_dep(self, d) -> bool: + if "api-ms-win-crt" in d: + return False + if d in self.SYSTEM_DLLS: + return False + return True + + def is_objfile(self, objfile) -> bool: + if not os.path.isfile(objfile): + return False + if objfile.lower().endswith(".exe"): + return True + return False + + def emit_dev_run_script(self, script_path, dep_dirs) -> None: + """Emit a script that can be used to run build artifacts directly from the + build directory, without installing them. + + The dep_dirs parameter should be a list of paths that need to be added to $PATH. + This can be computed by calling compute_dependency_paths() or + compute_dependency_paths_fast(). + + This is only necessary on Windows, which does not have RPATH, and instead + requires the $PATH environment variable be updated in order to find the proper + library dependencies. + """ + contents = self._get_dev_run_script_contents(dep_dirs) + with open(script_path, "w") as f: + f.write(contents) + + def compute_dependency_paths(self, build_dir): + """Return a list of all directories that need to be added to $PATH to ensure + that library dependencies can be found correctly. This is computed by scanning + binaries to determine exactly the right list of dependencies. + + The compute_dependency_paths_fast() is a alternative function that runs faster + but may return additional extraneous paths. + """ + dep_dirs = set() + # Find paths by scanning the binaries. + for dep in self.find_all_dependencies(build_dir): + dep_dirs.add(os.path.dirname(dep)) + + dep_dirs.update(self.read_custom_dep_dirs(build_dir)) + return sorted(dep_dirs) + + def compute_dependency_paths_fast(self, build_dir): + """Similar to compute_dependency_paths(), but rather than actually scanning + binaries, just add all library paths from the specified installation + directories. This is much faster than scanning the binaries, but may result in + more paths being returned than actually necessary. + """ + dep_dirs = set() + for inst_dir in self.install_dirs: + for subdir in OBJECT_SUBDIRS: + path = os.path.join(inst_dir, subdir) + if os.path.exists(path): + dep_dirs.add(path) + + dep_dirs.update(self.read_custom_dep_dirs(build_dir)) + return sorted(dep_dirs) + + def read_custom_dep_dirs(self, build_dir): + # The build system may also have included libraries from other locations that + # we might not be able to find normally in find_all_dependencies(). + # To handle this situation we support reading additional library paths + # from a LIBRARY_DEP_DIRS.txt file that may have been generated in the build + # output directory. + dep_dirs = set() + try: + explicit_dep_dirs_path = os.path.join(build_dir, "LIBRARY_DEP_DIRS.txt") + with open(explicit_dep_dirs_path, "r") as f: + for line in f.read().splitlines(): + dep_dirs.add(line) + except OSError as ex: + if ex.errno != errno.ENOENT: + raise + + return dep_dirs + + def _get_dev_run_script_contents(self, path_dirs) -> str: + path_entries = ["$env:PATH"] + path_dirs + path_str = ";".join(path_entries) + return """\ +$orig_env = $env:PATH +$env:PATH = "{path_str}" + +try {{ + $cmd_args = $args[1..$args.length] + & $args[0] @cmd_args +}} finally {{ + $env:PATH = $orig_env +}} +""".format( + path_str=path_str + ) + + +class ElfDeps(DepBase): + def __init__(self, buildopts, env, install_dirs, strip) -> None: + super(ElfDeps, self).__init__(buildopts, env, install_dirs, strip) + + # We need patchelf to rewrite deps, so ensure that it is built... + args = [sys.executable, sys.argv[0]] + if buildopts.allow_system_packages: + args.append("--allow-system-packages") + subprocess.check_call(args + ["build", "patchelf"]) + + # ... and that we know where it lives + patchelf_install = os.fsdecode( + subprocess.check_output(args + ["show-inst-dir", "patchelf"]).strip() + ) + if not patchelf_install: + # its a system package, so we assume it is in the path + patchelf_install = "patchelf" + else: + patchelf_install = os.path.join(patchelf_install, "bin", "patchelf") + self.patchelf = patchelf_install + + def list_dynamic_deps(self, objfile): + out = ( + subprocess.check_output( + [self.patchelf, "--print-needed", objfile], env=dict(self.env.items()) + ) + .decode("utf-8") + .strip() + ) + lines = out.split("\n") + return lines + + def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir) -> None: + final_dep = os.path.join( + final_lib_dir, + # pyre-fixme[16]: `ElfDeps` has no attribute `munged_lib_dir`. + os.path.relpath(new_dep, self.munged_lib_dir), + ) + self.check_call_verbose( + [self.patchelf, "--replace-needed", depname, final_dep, objfile] + ) + + def is_objfile(self, objfile) -> bool: + if not os.path.isfile(objfile): + return False + with open(objfile, "rb") as f: + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + magic = f.read(4) + return magic == b"\x7fELF" + + def strip_debug_info(self, objfile) -> None: + self.check_call_verbose(["strip", objfile]) + + +# MACH-O magic number +MACH_MAGIC = 0xFEEDFACF + + +class MachDeps(DepBase): + def interesting_dep(self, d) -> bool: + if d.startswith("/usr/lib/") or d.startswith("/System/"): + return False + return True + + def is_objfile(self, objfile): + if not os.path.isfile(objfile): + return False + with open(objfile, "rb") as f: + # mach stores the magic number in native endianness, + # so unpack as native here and compare + header = f.read(4) + if len(header) != 4: + return False + magic = unpack("I", header)[0] + return magic == MACH_MAGIC + + def list_dynamic_deps(self, objfile): + if not self.interesting_dep(objfile): + return + out = ( + subprocess.check_output( + ["otool", "-L", objfile], env=dict(self.env.items()) + ) + .decode("utf-8") + .strip() + ) + lines = out.split("\n") + deps = [] + for line in lines: + m = re.match("\t(\\S+)\\s", line) + if m: + if os.path.basename(m.group(1)) != os.path.basename(objfile): + deps.append(os.path.normcase(m.group(1))) + return deps + + def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir) -> None: + if objfile.endswith(".dylib"): + # Erase the original location from the id of the shared + # object. It doesn't appear to hurt to retain it, but + # it does look weird, so let's rewrite it to be sure. + self.check_call_verbose( + ["install_name_tool", "-id", os.path.basename(objfile), objfile] + ) + final_dep = os.path.join( + final_lib_dir, + # pyre-fixme[16]: `MachDeps` has no attribute `munged_lib_dir`. + os.path.relpath(new_dep, self.munged_lib_dir), + ) + + self.check_call_verbose( + ["install_name_tool", "-change", depname, final_dep, objfile] + ) + + +def create_dyn_dep_munger( + buildopts, env, install_dirs, strip: bool = False +) -> Optional[DepBase]: + if buildopts.is_linux(): + return ElfDeps(buildopts, env, install_dirs, strip) + if buildopts.is_darwin(): + return MachDeps(buildopts, env, install_dirs, strip) + if buildopts.is_windows(): + return WinDeps(buildopts, env, install_dirs, strip) + if buildopts.is_freebsd(): + return ElfDeps(buildopts, env, install_dirs, strip) + return None diff --git a/build/fbcode_builder/getdeps/envfuncs.py b/build/fbcode_builder/getdeps/envfuncs.py new file mode 100644 index 000000000..f32418c93 --- /dev/null +++ b/build/fbcode_builder/getdeps/envfuncs.py @@ -0,0 +1,198 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import os +import shlex +import sys +from typing import Optional + + +class Env(object): + def __init__(self, src=None) -> None: + self._dict = {} + if src is None: + self.update(os.environ) + else: + self.update(src) + + def update(self, src) -> None: + for k, v in src.items(): + self.set(k, v) + + def copy(self) -> "Env": + return Env(self._dict) + + def _key(self, key): + # The `str` cast may not appear to be needed, but without it we run + # into issues when passing the environment to subprocess. The main + # issue is that in python2 `os.environ` (which is the initial source + # of data for the environment) uses byte based strings, but this + # project uses `unicode_literals`. `subprocess` will raise an error + # if the environment that it is passed has a mixture of byte and + # unicode strings. + # It is simplest to force everything to be `str` for the sake of + # consistency. + key = str(key) + if sys.platform.startswith("win"): + # Windows env var names are case insensitive but case preserving. + # An implementation of PAR files on windows gets confused if + # the env block contains keys with conflicting case, so make a + # pass over the contents to remove any. + # While this O(n) scan is technically expensive and gross, it + # is practically not a problem because the volume of calls is + # relatively low and the cost of manipulating the env is dwarfed + # by the cost of spawning a process on windows. In addition, + # since the processes that we run are expensive anyway, this + # overhead is not the worst thing to worry about. + for k in list(self._dict.keys()): + if str(k).lower() == key.lower(): + return k + elif key in self._dict: + return key + return None + + def get(self, key, defval=None): + key = self._key(key) + if key is None: + return defval + return self._dict[key] + + def __getitem__(self, key): + val = self.get(key) + if key is None: + raise KeyError(key) + return val + + def unset(self, key) -> None: + if key is None: + raise KeyError("attempting to unset env[None]") + + key = self._key(key) + if key: + del self._dict[key] + + def __delitem__(self, key) -> None: + self.unset(key) + + def __repr__(self): + return repr(self._dict) + + def set(self, key, value) -> None: + if key is None: + raise KeyError("attempting to assign env[None] = %r" % value) + + if value is None: + raise ValueError("attempting to assign env[%s] = None" % key) + + # The `str` conversion is important to avoid triggering errors + # with subprocess if we pass in a unicode value; see commentary + # in the `_key` method. + key = str(key) + value = str(value) + + # The `unset` call is necessary on windows where the keys are + # case insensitive. Since this dict is case sensitive, simply + # assigning the value to the new key is not sufficient to remove + # the old value. The `unset` call knows how to match keys and + # remove any potential duplicates. + self.unset(key) + self._dict[key] = value + + def __setitem__(self, key, value) -> None: + self.set(key, value) + + def __iter__(self): + return self._dict.__iter__() + + def __len__(self) -> int: + return len(self._dict) + + def keys(self): + return self._dict.keys() + + def values(self): + return self._dict.values() + + def items(self): + return self._dict.items() + + +def add_path_entry( + env, name, item, append: bool = True, separator: str = os.pathsep +) -> None: + """Cause `item` to be added to the path style env var named + `name` held in the `env` dict. `append` specifies whether + the item is added to the end (the default) or should be + prepended if `name` already exists.""" + val = env.get(name, "") + if len(val) > 0: + val = val.split(separator) + else: + val = [] + if append: + val.append(item) + else: + val.insert(0, item) + env.set(name, separator.join(val)) + + +def add_flag(env, name, flag: str, append: bool = True) -> None: + """Cause `flag` to be added to the CXXFLAGS-style env var named + `name` held in the `env` dict. `append` specifies whether the + flag is added to the end (the default) or should be prepended if + `name` already exists.""" + val = shlex.split(env.get(name, "")) + if append: + val.append(flag) + else: + val.insert(0, flag) + env.set(name, " ".join(val)) + + +_path_search_cache = {} +_not_found = object() + + +def tpx_path() -> str: + return "xplat/testinfra/tpx/ctp.tpx" + + +def path_search(env, exename: str, defval: Optional[str] = None) -> Optional[str]: + """Search for exename in the PATH specified in env. + exename is eg: `ninja` and this function knows to append a .exe + to the end on windows. + Returns the path to the exe if found, or None if either no + PATH is set in env or no executable is found.""" + + path = env.get("PATH", None) + if path is None: + return defval + + # The project hash computation code searches for C++ compilers (g++, clang, etc) + # repeatedly. Cache the result so we don't end up searching for these over and over + # again. + cache_key = (path, exename) + result = _path_search_cache.get(cache_key, _not_found) + if result is _not_found: + result = _perform_path_search(path, exename) + _path_search_cache[cache_key] = result + return result + + +def _perform_path_search(path, exename: str) -> Optional[str]: + is_win = sys.platform.startswith("win") + if is_win: + exename = "%s.exe" % exename + + for bindir in path.split(os.pathsep): + full_name = os.path.join(bindir, exename) + if os.path.exists(full_name) and os.path.isfile(full_name): + if not is_win and not os.access(full_name, os.X_OK): + continue + return full_name + + return None diff --git a/build/fbcode_builder/getdeps/errors.py b/build/fbcode_builder/getdeps/errors.py new file mode 100644 index 000000000..1d01ad0ec --- /dev/null +++ b/build/fbcode_builder/getdeps/errors.py @@ -0,0 +1,19 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + + +class TransientFailure(Exception): + """Raising this error causes getdeps to return with an error code + that Sandcastle will consider to be a retryable transient + infrastructure error""" + + pass + + +class ManifestNotFound(Exception): + def __init__(self, manifest_name) -> None: + super(Exception, self).__init__("Unable to find manifest '%s'" % manifest_name) diff --git a/build/fbcode_builder/getdeps/expr.py b/build/fbcode_builder/getdeps/expr.py new file mode 100644 index 000000000..0f51521d6 --- /dev/null +++ b/build/fbcode_builder/getdeps/expr.py @@ -0,0 +1,186 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import re +import shlex + + +def parse_expr(expr_text, valid_variables): + """parses the simple criteria expression syntax used in + dependency specifications. + Returns an ExprNode instance that can be evaluated like this: + + ``` + expr = parse_expr("os=windows") + ok = expr.eval({ + "os": "windows" + }) + ``` + + Whitespace is allowed between tokens. The following terms + are recognized: + + KEY = VALUE # Evaluates to True if ctx[KEY] == VALUE + not(EXPR) # Evaluates to True if EXPR evaluates to False + # and vice versa + all(EXPR1, EXPR2, ...) # Evaluates True if all of the supplied + # EXPR's also evaluate True + any(EXPR1, EXPR2, ...) # Evaluates True if any of the supplied + # EXPR's also evaluate True, False if + # none of them evaluated true. + """ + + p = Parser(expr_text, valid_variables) + return p.parse() + + +class ExprNode(object): + def eval(self, ctx) -> bool: + return False + + +class TrueExpr(ExprNode): + def eval(self, ctx) -> bool: + return True + + def __str__(self) -> str: + return "true" + + +class NotExpr(ExprNode): + def __init__(self, node) -> None: + self._node = node + + def eval(self, ctx) -> bool: + return not self._node.eval(ctx) + + def __str__(self) -> str: + return "not(%s)" % self._node + + +class AllExpr(ExprNode): + def __init__(self, nodes) -> None: + self._nodes = nodes + + def eval(self, ctx) -> bool: + for node in self._nodes: + if not node.eval(ctx): + return False + return True + + def __str__(self) -> str: + items = [] + for node in self._nodes: + items.append(str(node)) + return "all(%s)" % ",".join(items) + + +class AnyExpr(ExprNode): + def __init__(self, nodes) -> None: + self._nodes = nodes + + def eval(self, ctx) -> bool: + for node in self._nodes: + if node.eval(ctx): + return True + return False + + def __str__(self) -> str: + items = [] + for node in self._nodes: + items.append(str(node)) + return "any(%s)" % ",".join(items) + + +class EqualExpr(ExprNode): + def __init__(self, key, value) -> None: + self._key = key + self._value = value + + def eval(self, ctx): + return ctx.get(self._key) == self._value + + def __str__(self) -> str: + return "%s=%s" % (self._key, self._value) + + +class Parser(object): + def __init__(self, text, valid_variables) -> None: + self.text = text + self.lex = shlex.shlex(text) + self.valid_variables = valid_variables + + def parse(self): + expr = self.top() + garbage = self.lex.get_token() + if garbage != "": + raise Exception( + "Unexpected token %s after EqualExpr in %s" % (garbage, self.text) + ) + return expr + + def top(self): + name = self.ident() + op = self.lex.get_token() + + if op == "(": + parsers = { + "not": self.parse_not, + "any": self.parse_any, + "all": self.parse_all, + } + func = parsers.get(name) + if not func: + raise Exception("invalid term %s in %s" % (name, self.text)) + return func() + + if op == "=": + if name not in self.valid_variables: + raise Exception("unknown variable %r in expression" % (name,)) + # remove shell quote from value so can test things with period in them, e.g "18.04" + unquoted = " ".join(shlex.split(self.lex.get_token())) + return EqualExpr(name, unquoted) + + raise Exception( + "Unexpected token sequence '%s %s' in %s" % (name, op, self.text) + ) + + def ident(self) -> str: + ident = self.lex.get_token() + if not re.match("[a-zA-Z]+", ident): + raise Exception("expected identifier found %s" % ident) + return ident + + def parse_not(self) -> NotExpr: + node = self.top() + expr = NotExpr(node) + tok = self.lex.get_token() + if tok != ")": + raise Exception("expected ')' found %s" % tok) + return expr + + def parse_any(self) -> AnyExpr: + nodes = [] + while True: + nodes.append(self.top()) + tok = self.lex.get_token() + if tok == ")": + break + if tok != ",": + raise Exception("expected ',' or ')' but found %s" % tok) + return AnyExpr(nodes) + + def parse_all(self) -> AllExpr: + nodes = [] + while True: + nodes.append(self.top()) + tok = self.lex.get_token() + if tok == ")": + break + if tok != ",": + raise Exception("expected ',' or ')' but found %s" % tok) + return AllExpr(nodes) diff --git a/build/fbcode_builder/getdeps/fetcher.py b/build/fbcode_builder/getdeps/fetcher.py new file mode 100644 index 000000000..30cff5b7d --- /dev/null +++ b/build/fbcode_builder/getdeps/fetcher.py @@ -0,0 +1,911 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import errno +import hashlib +import os +import re +import shutil +import stat +import subprocess +import sys +import tarfile +import time +import zipfile +from datetime import datetime +from typing import Dict, NamedTuple +from urllib.parse import urlparse +from urllib.request import Request, urlopen + +from .copytree import prefetch_dir_if_eden +from .envfuncs import Env +from .errors import TransientFailure +from .platform import is_windows +from .runcmd import run_cmd + + +def file_name_is_cmake_file(file_name): + file_name = file_name.lower() + base = os.path.basename(file_name) + return ( + base.endswith(".cmake") + or base.endswith(".cmake.in") + or base == "cmakelists.txt" + ) + + +class ChangeStatus(object): + """Indicates the nature of changes that happened while updating + the source directory. There are two broad uses: + * When extracting archives for third party software we want to + know that we did something (eg: we either extracted code or + we didn't do anything) + * For 1st party code where we use shipit to transform the code, + we want to know if we changed anything so that we can perform + a build, but we generally want to be a little more nuanced + and be able to distinguish between just changing a source file + and whether we might need to reconfigure the build system. + """ + + def __init__(self, all_changed: bool = False) -> None: + """Construct a ChangeStatus object. The default is to create + a status that indicates no changes, but passing all_changed=True + will create one that indicates that everything changed""" + if all_changed: + self.source_files = 1 + self.make_files = 1 + else: + self.source_files = 0 + self.make_files = 0 + + def record_change(self, file_name) -> None: + """Used by the shipit fetcher to record changes as it updates + files in the destination. If the file name might be one used + in the cmake build system that we use for 1st party code, then + record that as a "make file" change. We could broaden this + to match any file used by various build systems, but it is + only really useful for our internal cmake stuff at this time. + If the file isn't a build file and is under the `fbcode_builder` + dir then we don't class that as an interesting change that we + might need to rebuild, so we ignore it. + Otherwise we record the file as a source file change.""" + + file_name = file_name.lower() + if file_name_is_cmake_file(file_name): + self.make_files += 1 + elif "/fbcode_builder/cmake" in file_name: + self.source_files += 1 + elif "/fbcode_builder/" not in file_name: + self.source_files += 1 + + def sources_changed(self) -> bool: + """Returns true if any source files were changed during + an update operation. This will typically be used to decide + that the build system to be run on the source dir in an + incremental mode""" + return self.source_files > 0 + + def build_changed(self) -> bool: + """Returns true if any build files were changed during + an update operation. This will typically be used to decidfe + that the build system should be reconfigured and re-run + as a full build""" + return self.make_files > 0 + + +class Fetcher(object): + """The Fetcher is responsible for fetching and extracting the + sources for project. The Fetcher instance defines where the + extracted data resides and reports this to the consumer via + its `get_src_dir` method.""" + + def update(self) -> ChangeStatus: + """Brings the src dir up to date, ideally minimizing + changes so that a subsequent build doesn't over-build. + Returns a ChangeStatus object that helps the caller to + understand the nature of the changes required during + the update.""" + return ChangeStatus() + + def clean(self) -> None: + """Reverts any changes that might have been made to + the src dir""" + pass + + def hash(self) -> None: + """Returns a hash that identifies the version of the code in the + working copy. For a git repo this is commit hash for the working + copy. For other Fetchers this should relate to the version of + the code in the src dir. The intent is that if a manifest + changes the version/rev of a project that the hash be different. + Importantly, this should be computable without actually fetching + the code, as we want this to factor into a hash used to download + a pre-built version of the code, without having to first download + and extract its sources (eg: boost on windows is pretty painful). + """ + pass + + def get_src_dir(self) -> None: + """Returns the source directory that the project was + extracted into""" + pass + + +class LocalDirFetcher(object): + """This class exists to override the normal fetching behavior, and + use an explicit user-specified directory for the project sources. + + This fetcher cannot update or track changes. It always reports that the + project has changed, forcing it to always be built.""" + + def __init__(self, path) -> None: + self.path = os.path.realpath(path) + + def update(self) -> ChangeStatus: + return ChangeStatus(all_changed=True) + + def hash(self) -> str: + return "0" * 40 + + def get_src_dir(self): + return self.path + + +class SystemPackageFetcher(object): + def __init__(self, build_options, packages) -> None: + self.manager = build_options.host_type.get_package_manager() + self.packages = packages.get(self.manager) + self.host_type = build_options.host_type + if self.packages: + self.installed = None + else: + self.installed = False + + def packages_are_installed(self): + if self.installed is not None: + return self.installed + + cmd = None + if self.manager == "rpm": + cmd = ["rpm", "-q"] + sorted(self.packages) + elif self.manager == "deb": + cmd = ["dpkg", "-s"] + sorted(self.packages) + elif self.manager == "homebrew": + cmd = ["brew", "ls", "--versions"] + sorted(self.packages) + + if cmd: + proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if proc.returncode == 0: + # captured as binary as we will hash this later + self.installed = proc.stdout + else: + # Need all packages to be present to consider us installed + self.installed = False + + else: + self.installed = False + + return bool(self.installed) + + def update(self) -> ChangeStatus: + assert self.installed + return ChangeStatus(all_changed=False) + + def hash(self) -> str: + if self.packages_are_installed(): + return hashlib.sha256(self.installed).hexdigest() + else: + return "0" * 40 + + def get_src_dir(self) -> None: + return None + + +class PreinstalledNopFetcher(SystemPackageFetcher): + def __init__(self) -> None: + self.installed = True + + +class GitFetcher(Fetcher): + DEFAULT_DEPTH = 1 + + def __init__(self, build_options, manifest, repo_url, rev, depth) -> None: + # Extract the host/path portions of the URL and generate a flattened + # directory name. eg: + # github.com/facebook/folly.git -> github.com-facebook-folly.git + url = urlparse(repo_url) + directory = "%s%s" % (url.netloc, url.path) + for s in ["/", "\\", ":"]: + directory = directory.replace(s, "-") + + # Place it in a repos dir in the scratch space + repos_dir = os.path.join(build_options.scratch_dir, "repos") + if not os.path.exists(repos_dir): + os.makedirs(repos_dir) + self.repo_dir = os.path.join(repos_dir, directory) + + if not rev and build_options.project_hashes: + hash_file = os.path.join( + build_options.project_hashes, + re.sub("\\.git$", "-rev.txt", url.path[1:]), + ) + if os.path.exists(hash_file): + with open(hash_file, "r") as f: + data = f.read() + m = re.match("Subproject commit ([a-fA-F0-9]{40})", data) + if not m: + raise Exception("Failed to parse rev from %s" % hash_file) + rev = m.group(1) + print("Using pinned rev %s for %s" % (rev, repo_url)) + + self.rev = rev or "main" + self.origin_repo = repo_url + self.manifest = manifest + self.depth = depth if depth else GitFetcher.DEFAULT_DEPTH + + def _update(self) -> ChangeStatus: + current_hash = ( + subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=self.repo_dir) + .strip() + .decode("utf-8") + ) + target_hash = ( + subprocess.check_output(["git", "rev-parse", self.rev], cwd=self.repo_dir) + .strip() + .decode("utf-8") + ) + if target_hash == current_hash: + # It's up to date, so there are no changes. This doesn't detect eg: + # if origin/main moved and rev='main', but that's ok for our purposes; + # we should be using explicit hashes or eg: a stable branch for the cases + # that we care about, and it isn't unreasonable to require that the user + # explicitly perform a clean build if those have moved. For the most + # part we prefer that folks build using a release tarball from github + # rather than use the git protocol, as it is generally a bit quicker + # to fetch and easier to hash and verify tarball downloads. + return ChangeStatus() + + print("Updating %s -> %s" % (self.repo_dir, self.rev)) + run_cmd(["git", "fetch", "origin", self.rev], cwd=self.repo_dir) + run_cmd(["git", "checkout", self.rev], cwd=self.repo_dir) + run_cmd(["git", "submodule", "update", "--init"], cwd=self.repo_dir) + + return ChangeStatus(True) + + def update(self) -> ChangeStatus: + if os.path.exists(self.repo_dir): + return self._update() + self._clone() + return ChangeStatus(True) + + def _clone(self) -> None: + print("Cloning %s..." % self.origin_repo) + # The basename/dirname stuff allows us to dance around issues where + # eg: this python process is native win32, but the git.exe is cygwin + # or msys and doesn't like the absolute windows path that we'd otherwise + # pass to it. Careful use of cwd helps avoid headaches with cygpath. + run_cmd( + [ + "git", + "clone", + "--depth=" + str(self.depth), + "--", + self.origin_repo, + os.path.basename(self.repo_dir), + ], + cwd=os.path.dirname(self.repo_dir), + ) + self._update() + + def clean(self) -> None: + if os.path.exists(self.repo_dir): + run_cmd(["git", "clean", "-fxd"], cwd=self.repo_dir) + + def hash(self): + return self.rev + + def get_src_dir(self): + return self.repo_dir + + +def does_file_need_update(src_name, src_st, dest_name): + try: + target_st = os.lstat(dest_name) + except OSError as exc: + if exc.errno != errno.ENOENT: + raise + return True + + if src_st.st_size != target_st.st_size: + return True + + if stat.S_IFMT(src_st.st_mode) != stat.S_IFMT(target_st.st_mode): + return True + if stat.S_ISLNK(src_st.st_mode): + return os.readlink(src_name) != os.readlink(dest_name) + if not stat.S_ISREG(src_st.st_mode): + return True + + # They might have the same content; compare. + with open(src_name, "rb") as sf, open(dest_name, "rb") as df: + chunk_size = 8192 + while True: + src_data = sf.read(chunk_size) + dest_data = df.read(chunk_size) + if src_data != dest_data: + return True + if len(src_data) < chunk_size: + # EOF + break + return False + + +def copy_if_different(src_name, dest_name) -> bool: + """Copy src_name -> dest_name, but only touch dest_name + if src_name is different from dest_name, making this a + more build system friendly way to copy.""" + src_st = os.lstat(src_name) + if not does_file_need_update(src_name, src_st, dest_name): + return False + + dest_parent = os.path.dirname(dest_name) + if not os.path.exists(dest_parent): + os.makedirs(dest_parent) + if stat.S_ISLNK(src_st.st_mode): + try: + os.unlink(dest_name) + except OSError as exc: + if exc.errno != errno.ENOENT: + raise + target = os.readlink(src_name) + print("Symlinking %s -> %s" % (dest_name, target)) + os.symlink(target, dest_name) + else: + print("Copying %s -> %s" % (src_name, dest_name)) + shutil.copy2(src_name, dest_name) + + return True + + +def list_files_under_dir_newer_than_timestamp(dir_to_scan, ts): + for root, _dirs, files in os.walk(dir_to_scan): + for src_file in files: + full_name = os.path.join(root, src_file) + st = os.lstat(full_name) + if st.st_mtime > ts: + yield full_name + + +class ShipitPathMap(object): + def __init__(self) -> None: + self.roots = [] + self.mapping = [] + self.exclusion = [] + + def add_mapping(self, fbsource_dir, target_dir) -> None: + """Add a posix path or pattern. We cannot normpath the input + here because that would change the paths from posix to windows + form and break the logic throughout this class.""" + self.roots.append(fbsource_dir) + self.mapping.append((fbsource_dir, target_dir)) + + def add_exclusion(self, pattern) -> None: + self.exclusion.append(re.compile(pattern)) + + def _minimize_roots(self) -> None: + """compute the de-duplicated set of roots within fbsource. + We take the shortest common directory prefix to make this + determination""" + self.roots.sort(key=len) + minimized = [] + + for r in self.roots: + add_this_entry = True + for existing in minimized: + if r.startswith(existing + "/"): + add_this_entry = False + break + if add_this_entry: + minimized.append(r) + + self.roots = minimized + + def _sort_mapping(self) -> None: + self.mapping.sort(reverse=True, key=lambda x: len(x[0])) + + def _map_name(self, norm_name, dest_root): + if norm_name.endswith(".pyc") or norm_name.endswith(".swp"): + # Ignore some incidental garbage while iterating + return None + + for excl in self.exclusion: + if excl.match(norm_name): + return None + + for src_name, dest_name in self.mapping: + if norm_name == src_name or norm_name.startswith(src_name + "/"): + rel_name = os.path.relpath(norm_name, src_name) + # We can have "." as a component of some paths, depending + # on the contents of the shipit transformation section. + # normpath doesn't always remove `.` as the final component + # of the path, which be problematic when we later mkdir + # the dirname of the path that we return. Take care to avoid + # returning a path with a `.` in it. + rel_name = os.path.normpath(rel_name) + if dest_name == ".": + return os.path.normpath(os.path.join(dest_root, rel_name)) + dest_name = os.path.normpath(dest_name) + return os.path.normpath(os.path.join(dest_root, dest_name, rel_name)) + + raise Exception("%s did not match any rules" % norm_name) + + def mirror(self, fbsource_root, dest_root) -> ChangeStatus: + self._minimize_roots() + self._sort_mapping() + + change_status = ChangeStatus() + + # Record the full set of files that should be in the tree + full_file_list = set() + + if sys.platform == "win32": + # Let's not assume st_dev has a consistent value on Windows. + def st_dev(path): + return 1 + + else: + + def st_dev(path): + return os.lstat(path).st_dev + + for fbsource_subdir in self.roots: + dir_to_mirror = os.path.join(fbsource_root, fbsource_subdir) + root_dev = st_dev(dir_to_mirror) + prefetch_dir_if_eden(dir_to_mirror) + if not os.path.exists(dir_to_mirror): + raise Exception( + "%s doesn't exist; check your sparse profile!" % dir_to_mirror + ) + + for root, dirs, files in os.walk(dir_to_mirror): + dirs[:] = [d for d in dirs if root_dev == st_dev(os.path.join(root, d))] + + for src_file in files: + full_name = os.path.join(root, src_file) + rel_name = os.path.relpath(full_name, fbsource_root) + norm_name = rel_name.replace("\\", "/") + + target_name = self._map_name(norm_name, dest_root) + if target_name: + full_file_list.add(target_name) + if copy_if_different(full_name, target_name): + change_status.record_change(target_name) + + # Compare the list of previously shipped files; if a file is + # in the old list but not the new list then it has been + # removed from the source and should be removed from the + # destination. + # Why don't we simply create this list by walking dest_root? + # Some builds currently have to be in-source builds and + # may legitimately need to keep some state in the source tree :-/ + installed_name = os.path.join(dest_root, ".shipit_shipped") + if os.path.exists(installed_name): + with open(installed_name, "rb") as f: + for name in f.read().decode("utf-8").splitlines(): + name = name.strip() + if name not in full_file_list: + print("Remove %s" % name) + os.unlink(name) + change_status.record_change(name) + + with open(installed_name, "wb") as f: + for name in sorted(list(full_file_list)): + f.write(("%s\n" % name).encode("utf-8")) + + return change_status + + +class FbsourceRepoData(NamedTuple): + hash: str + date: str + + +FBSOURCE_REPO_DATA: Dict[str, FbsourceRepoData] = {} + + +def get_fbsource_repo_data(build_options) -> FbsourceRepoData: + """Returns the commit metadata for the fbsource repo. + Since we may have multiple first party projects to + hash, and because we don't mutate the repo, we cache + this hash in a global.""" + cached_data = FBSOURCE_REPO_DATA.get(build_options.fbsource_dir) + if cached_data: + return cached_data + + if "GETDEPS_HG_REPO_DATA" in os.environ: + log_data = os.environ["GETDEPS_HG_REPO_DATA"] + else: + cmd = ["hg", "log", "-r.", "-T{node}\n{date|hgdate}"] + env = Env() + env.set("HGPLAIN", "1") + log_data = subprocess.check_output( + cmd, cwd=build_options.fbsource_dir, env=dict(env.items()) + ).decode("ascii") + + (hash, datestr) = log_data.split("\n") + + # datestr is like "seconds fractionalseconds" + # We want "20200324.113140" + (unixtime, _fractional) = datestr.split(" ") + date = datetime.fromtimestamp(int(unixtime)).strftime("%Y%m%d.%H%M%S") + cached_data = FbsourceRepoData(hash=hash, date=date) + + FBSOURCE_REPO_DATA[build_options.fbsource_dir] = cached_data + + return cached_data + + +class SimpleShipitTransformerFetcher(Fetcher): + def __init__(self, build_options, manifest, ctx) -> None: + self.build_options = build_options + self.manifest = manifest + self.repo_dir = os.path.join(build_options.scratch_dir, "shipit", manifest.name) + self.ctx = ctx + + def clean(self) -> None: + if os.path.exists(self.repo_dir): + shutil.rmtree(self.repo_dir) + + def update(self) -> ChangeStatus: + mapping = ShipitPathMap() + for src, dest in self.manifest.get_section_as_ordered_pairs( + "shipit.pathmap", self.ctx + ): + mapping.add_mapping(src, dest) + if self.manifest.shipit_fbcode_builder: + mapping.add_mapping( + "fbcode/opensource/fbcode_builder", "build/fbcode_builder" + ) + for pattern in self.manifest.get_section_as_args("shipit.strip", self.ctx): + mapping.add_exclusion(pattern) + + return mapping.mirror(self.build_options.fbsource_dir, self.repo_dir) + + # pyre-fixme[15]: `hash` overrides method defined in `Fetcher` inconsistently. + def hash(self) -> str: + # We return a fixed non-hash string for in-fbsource builds. + # We're relying on the `update` logic to correctly invalidate + # the build in the case that files have changed. + return "fbsource" + + def get_src_dir(self): + return self.repo_dir + + +class SubFetcher(Fetcher): + """Fetcher for a project with subprojects""" + + def __init__(self, base, subs) -> None: + self.base = base + self.subs = subs + + def update(self) -> ChangeStatus: + base = self.base.update() + changed = base.build_changed() or base.sources_changed() + for fetcher, dir in self.subs: + stat = fetcher.update() + if stat.build_changed() or stat.sources_changed(): + changed = True + link = self.base.get_src_dir() + "/" + dir + if not os.path.exists(link): + os.symlink(fetcher.get_src_dir(), link) + return ChangeStatus(changed) + + def clean(self) -> None: + self.base.clean() + for fetcher, _ in self.subs: + fetcher.clean() + + def hash(self) -> None: + hash = self.base.hash() + for fetcher, _ in self.subs: + hash += fetcher.hash() + + def get_src_dir(self): + return self.base.get_src_dir() + + +class ShipitTransformerFetcher(Fetcher): + @classmethod + def _shipit_paths(cls, build_options): + www_path = ["/var/www/scripts/opensource/codesync"] + if build_options.fbsource_dir: + fbcode_path = [ + os.path.join( + build_options.fbsource_dir, + "fbcode/opensource/codesync/codesync-cli/codesync", + ) + ] + else: + fbcode_path = [] + return www_path + fbcode_path + + def __init__(self, build_options, project_name) -> None: + self.build_options = build_options + self.project_name = project_name + self.repo_dir = os.path.join(build_options.scratch_dir, "shipit", project_name) + self.shipit = None + for path in ShipitTransformerFetcher._shipit_paths(build_options): + if os.path.exists(path): + self.shipit = path + break + + def update(self) -> ChangeStatus: + if os.path.exists(self.repo_dir): + return ChangeStatus() + self.run_shipit() + return ChangeStatus(True) + + def clean(self) -> None: + if os.path.exists(self.repo_dir): + shutil.rmtree(self.repo_dir) + + @classmethod + def available(cls, build_options): + return any( + os.path.exists(path) + for path in ShipitTransformerFetcher._shipit_paths(build_options) + ) + + def run_shipit(self) -> None: + tmp_path = self.repo_dir + ".new" + try: + if os.path.exists(tmp_path): + shutil.rmtree(tmp_path) + os.makedirs(os.path.dirname(tmp_path), exist_ok=True) + + # Run shipit + run_cmd( + [ + self.shipit, + "shipit", + "--project=" + self.project_name, + "--create-new-repo", + "--source-repo-dir=" + self.build_options.fbsource_dir, + "--source-branch=.", + "--skip-source-init", + "--skip-source-pull", + "--skip-source-clean", + "--skip-push", + "--destination-use-anonymous-https", + "--create-new-repo-output-path=" + tmp_path, + ] + ) + + # Remove the .git directory from the repository it generated. + # There is no need to commit this. + repo_git_dir = os.path.join(tmp_path, ".git") + shutil.rmtree(repo_git_dir) + os.rename(tmp_path, self.repo_dir) + except Exception: + # Clean up after a failed extraction + if os.path.exists(tmp_path): + shutil.rmtree(tmp_path) + self.clean() + raise + + # pyre-fixme[15]: `hash` overrides method defined in `Fetcher` inconsistently. + def hash(self) -> str: + # We return a fixed non-hash string for in-fbsource builds. + return "fbsource" + + def get_src_dir(self): + return self.repo_dir + + +def download_url_to_file_with_progress(url: str, file_name) -> None: + print("Download with %s -> %s ..." % (url, file_name)) + + class Progress(object): + last_report = 0 + + def write_update(self, total, amount): + if total == -1: + total = "(Unknown)" + + if sys.stdout.isatty(): + sys.stdout.write("\r downloading %s of %s " % (amount, total)) + else: + # When logging to CI logs, avoid spamming the logs and print + # status every few seconds + now = time.time() + if now - self.last_report > 5: + sys.stdout.write(".. %s of %s " % (amount, total)) + self.last_report = now + sys.stdout.flush() + + def progress_pycurl(self, total, amount, _uploadtotal, _uploadamount): + self.write_update(total, amount) + + progress = Progress() + start = time.time() + try: + if os.environ.get("GETDEPS_USE_WGET") is not None: + procargs = ( + [ + "wget", + ] + + os.environ.get("GETDEPS_WGET_ARGS", "").split() + + [ + "-O", + file_name, + url, + ] + ) + subprocess.run(procargs, capture_output=True) + headers = None + + elif os.environ.get("GETDEPS_USE_LIBCURL") is not None: + import pycurl + + with open(file_name, "wb") as f: + c = pycurl.Curl() + c.setopt(pycurl.URL, url) + c.setopt(pycurl.WRITEDATA, f) + # display progress + c.setopt(pycurl.NOPROGRESS, False) + c.setopt(pycurl.XFERINFOFUNCTION, progress.progress_pycurl) + c.perform() + c.close() + headers = None + else: + req_header = {"Accept": "application/*"} + res = urlopen(Request(url, None, req_header)) + chunk_size = 8192 # urlretrieve uses this value + headers = res.headers + content_length = res.headers.get("Content-Length") + total = int(content_length.strip()) if content_length else -1 + amount = 0 + with open(file_name, "wb") as f: + chunk = res.read(chunk_size) + while chunk: + f.write(chunk) + amount += len(chunk) + progress.write_update(total, amount) + chunk = res.read(chunk_size) + except (OSError, IOError) as exc: # noqa: B014 + raise TransientFailure( + "Failed to download %s to %s: %s" % (url, file_name, str(exc)) + ) + + end = time.time() + sys.stdout.write(" [Complete in %f seconds]\n" % (end - start)) + sys.stdout.flush() + if headers is not None: + print(f"{headers}") + + +class ArchiveFetcher(Fetcher): + def __init__(self, build_options, manifest, url, sha256) -> None: + self.manifest = manifest + self.url = url + self.sha256 = sha256 + self.build_options = build_options + + url = urlparse(self.url) + basename = "%s-%s" % (manifest.name, os.path.basename(url.path)) + self.file_name = os.path.join(build_options.scratch_dir, "downloads", basename) + self.src_dir = os.path.join(build_options.scratch_dir, "extracted", basename) + self.hash_file = self.src_dir + ".hash" + + def _verify_hash(self) -> None: + h = hashlib.sha256() + with open(self.file_name, "rb") as f: + while True: + block = f.read(8192) + if not block: + break + h.update(block) + digest = h.hexdigest() + if digest != self.sha256: + os.unlink(self.file_name) + raise Exception( + "%s: expected sha256 %s but got %s" % (self.url, self.sha256, digest) + ) + + def _download_dir(self): + """returns the download dir, creating it if it doesn't already exist""" + download_dir = os.path.dirname(self.file_name) + if not os.path.exists(download_dir): + os.makedirs(download_dir) + return download_dir + + def _download(self) -> None: + self._download_dir() + download_url_to_file_with_progress(self.url, self.file_name) + self._verify_hash() + + def clean(self) -> None: + if os.path.exists(self.src_dir): + shutil.rmtree(self.src_dir) + + def update(self) -> ChangeStatus: + try: + with open(self.hash_file, "r") as f: + saved_hash = f.read().strip() + if saved_hash == self.sha256 and os.path.exists(self.src_dir): + # Everything is up to date + return ChangeStatus() + print( + "saved hash %s doesn't match expected hash %s, re-validating" + % (saved_hash, self.sha256) + ) + os.unlink(self.hash_file) + except EnvironmentError: + pass + + # If we got here we know the contents of src_dir are either missing + # or wrong, so blow away whatever happened to be there first. + if os.path.exists(self.src_dir): + shutil.rmtree(self.src_dir) + + # If we already have a file here, make sure it looks legit before + # proceeding: any errors and we just remove it and re-download + if os.path.exists(self.file_name): + try: + self._verify_hash() + except Exception: + if os.path.exists(self.file_name): + os.unlink(self.file_name) + + if not os.path.exists(self.file_name): + self._download() + + if tarfile.is_tarfile(self.file_name): + opener = tarfile.open + elif zipfile.is_zipfile(self.file_name): + opener = zipfile.ZipFile + else: + raise Exception("don't know how to extract %s" % self.file_name) + os.makedirs(self.src_dir) + print("Extract %s -> %s" % (self.file_name, self.src_dir)) + t = opener(self.file_name) + if is_windows(): + # Ensure that we don't fall over when dealing with long paths + # on windows + src = r"\\?\%s" % os.path.normpath(self.src_dir) + else: + src = self.src_dir + # The `str` here is necessary to ensure that we don't pass a unicode + # object down to tarfile.extractall on python2. When extracting + # the boost tarball it makes some assumptions and tries to convert + # a non-ascii path to ascii and throws. + src = str(src) + t.extractall(src) + + with open(self.hash_file, "w") as f: + f.write(self.sha256) + + return ChangeStatus(True) + + def hash(self): + return self.sha256 + + def get_src_dir(self): + return self.src_dir + + +def homebrew_package_prefix(package): + cmd = ["brew", "--prefix", package] + try: + proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except FileNotFoundError: + return + + if proc.returncode == 0: + return proc.stdout.decode("utf-8").rstrip() diff --git a/build/fbcode_builder/getdeps/load.py b/build/fbcode_builder/getdeps/load.py new file mode 100644 index 000000000..85a79a973 --- /dev/null +++ b/build/fbcode_builder/getdeps/load.py @@ -0,0 +1,366 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import base64 +import copy +import hashlib +import os + +from . import fetcher +from .envfuncs import path_search +from .errors import ManifestNotFound +from .manifest import ManifestParser + + +class Loader(object): + """The loader allows our tests to patch the load operation""" + + def _list_manifests(self, build_opts): + """Returns a generator that iterates all the available manifests""" + for path, _, files in os.walk(build_opts.manifests_dir): + for name in files: + # skip hidden files + if name.startswith("."): + continue + + yield os.path.join(path, name) + + def _load_manifest(self, path): + return ManifestParser(path) + + def load_project(self, build_opts, project_name): + if "/" in project_name or "\\" in project_name: + # Assume this is a path already + return ManifestParser(project_name) + + for manifest in self._list_manifests(build_opts): + if os.path.basename(manifest) == project_name: + return ManifestParser(manifest) + + raise ManifestNotFound(project_name) + + def load_all(self, build_opts): + manifests_by_name = {} + + for manifest in self._list_manifests(build_opts): + m = self._load_manifest(manifest) + + if m.name in manifests_by_name: + raise Exception("found duplicate manifest '%s'" % m.name) + + manifests_by_name[m.name] = m + + return manifests_by_name + + +class ResourceLoader(Loader): + def __init__(self, namespace, manifests_dir) -> None: + self.namespace = namespace + self.manifests_dir = manifests_dir + + def _list_manifests(self, _build_opts): + import pkg_resources + + dirs = [self.manifests_dir] + + while dirs: + current = dirs.pop(0) + for name in pkg_resources.resource_listdir(self.namespace, current): + path = "%s/%s" % (current, name) + + if pkg_resources.resource_isdir(self.namespace, path): + dirs.append(path) + else: + yield "%s/%s" % (current, name) + + def _find_manifest(self, project_name): + for name in self._list_manifests(): + if name.endswith("/%s" % project_name): + return name + + raise ManifestNotFound(project_name) + + def _load_manifest(self, path: str): + import pkg_resources + + contents = pkg_resources.resource_string(self.namespace, path).decode("utf8") + return ManifestParser(file_name=path, fp=contents) + + def load_project(self, build_opts, project_name): + project_name = self._find_manifest(project_name) + return self._load_resource_manifest(project_name) + + +LOADER = Loader() + + +def patch_loader(namespace, manifests_dir: str = "manifests") -> None: + global LOADER + LOADER = ResourceLoader(namespace, manifests_dir) + + +def load_project(build_opts, project_name): + """given the name of a project or a path to a manifest file, + load up the ManifestParser instance for it and return it""" + return LOADER.load_project(build_opts, project_name) + + +def load_all_manifests(build_opts): + return LOADER.load_all(build_opts) + + +class ManifestLoader(object): + """ManifestLoader stores information about project manifest relationships for a + given set of (build options + platform) configuration. + + The ManifestLoader class primarily serves as a location to cache project dependency + relationships and project hash values for this build configuration. + """ + + def __init__(self, build_opts, ctx_gen=None) -> None: + self._loader = LOADER + self.build_opts = build_opts + if ctx_gen is None: + self.ctx_gen = self.build_opts.get_context_generator() + else: + self.ctx_gen = ctx_gen + + self.manifests_by_name = {} + self._loaded_all = False + self._project_hashes = {} + self._fetcher_overrides = {} + self._build_dir_overrides = {} + self._install_dir_overrides = {} + self._install_prefix_overrides = {} + + def load_manifest(self, name): + manifest = self.manifests_by_name.get(name) + if manifest is None: + manifest = self._loader.load_project(self.build_opts, name) + self.manifests_by_name[name] = manifest + return manifest + + def load_all_manifests(self): + if not self._loaded_all: + all_manifests_by_name = self._loader.load_all(self.build_opts) + if self.manifests_by_name: + # To help ensure that we only ever have a single manifest object for a + # given project, and that it can't change once we have loaded it, + # only update our mapping for projects that weren't already loaded. + for name, manifest in all_manifests_by_name.items(): + self.manifests_by_name.setdefault(name, manifest) + else: + self.manifests_by_name = all_manifests_by_name + self._loaded_all = True + + return self.manifests_by_name + + def dependencies_of(self, manifest): + """Returns the dependencies of the given project, not including the project itself, in topological order.""" + return [ + dep + for dep in self.manifests_in_dependency_order(manifest) + if dep != manifest + ] + + def manifests_in_dependency_order(self, manifest=None): + """Compute all dependencies of the specified project. Returns a list of the + dependencies plus the project itself, in topologically sorted order. + + Each entry in the returned list only depends on projects that appear before it + in the list. + + If the input manifest is None, the dependencies for all currently loaded + projects will be computed. i.e., if you call load_all_manifests() followed by + manifests_in_dependency_order() this will return a global dependency ordering of + all projects.""" + # The list of deps that have been fully processed + seen = set() + # The list of deps which have yet to be evaluated. This + # can potentially contain duplicates. + if manifest is None: + deps = list(self.manifests_by_name.values()) + else: + assert manifest.name in self.manifests_by_name + deps = [manifest] + # The list of manifests in dependency order + dep_order = [] + system_packages = {} + + while len(deps) > 0: + m = deps.pop(0) + if m.name in seen: + continue + + # Consider its deps, if any. + # We sort them for increased determinism; we'll produce + # a correct order even if they aren't sorted, but we prefer + # to produce the same order regardless of how they are listed + # in the project manifest files. + ctx = self.ctx_gen.get_context(m.name) + dep_list = m.get_dependencies(ctx) + + dep_count = 0 + for dep_name in dep_list: + # If we're not sure whether it is done, queue it up + if dep_name not in seen: + dep = self.manifests_by_name.get(dep_name) + if dep is None: + dep = self._loader.load_project(self.build_opts, dep_name) + self.manifests_by_name[dep.name] = dep + + deps.append(dep) + dep_count += 1 + + if dep_count > 0: + # If we queued anything, re-queue this item, as it depends + # those new item(s) and their transitive deps. + deps.append(m) + continue + + # Its deps are done, so we can emit it + seen.add(m.name) + # Capture system packages as we may need to set PATHs to then later + if ( + self.build_opts.allow_system_packages + and self.build_opts.host_type.get_package_manager() + ): + packages = m.get_required_system_packages(ctx) + for pkg_type, v in packages.items(): + merged = system_packages.get(pkg_type, []) + if v not in merged: + merged += v + system_packages[pkg_type] = merged + # A manifest depends on all system packages in it dependencies as well + m.resolved_system_packages = copy.copy(system_packages) + dep_order.append(m) + + return dep_order + + def set_project_src_dir(self, project_name, path) -> None: + self._fetcher_overrides[project_name] = fetcher.LocalDirFetcher(path) + + def set_project_build_dir(self, project_name, path) -> None: + self._build_dir_overrides[project_name] = path + + def set_project_install_dir(self, project_name, path) -> None: + self._install_dir_overrides[project_name] = path + + def set_project_install_prefix(self, project_name, path) -> None: + self._install_prefix_overrides[project_name] = path + + def create_fetcher(self, manifest): + override = self._fetcher_overrides.get(manifest.name) + if override is not None: + return override + + ctx = self.ctx_gen.get_context(manifest.name) + return manifest.create_fetcher(self.build_opts, self, ctx) + + def get_project_hash(self, manifest): + h = self._project_hashes.get(manifest.name) + if h is None: + h = self._compute_project_hash(manifest) + self._project_hashes[manifest.name] = h + return h + + def _compute_project_hash(self, manifest) -> str: + """This recursive function computes a hash for a given manifest. + The hash takes into account some environmental factors on the + host machine and includes the hashes of its dependencies. + No caching of the computation is performed, which is theoretically + wasteful but the computation is fast enough that it is not required + to cache across multiple invocations.""" + ctx = self.ctx_gen.get_context(manifest.name) + + hasher = hashlib.sha256() + # Some environmental and configuration things matter + env = {} + env["install_dir"] = self.build_opts.install_dir + env["scratch_dir"] = self.build_opts.scratch_dir + env["vcvars_path"] = self.build_opts.vcvars_path + env["os"] = self.build_opts.host_type.ostype + env["distro"] = self.build_opts.host_type.distro + env["distro_vers"] = self.build_opts.host_type.distrovers + env["shared_libs"] = str(self.build_opts.shared_libs) + for name in [ + "CXXFLAGS", + "CPPFLAGS", + "LDFLAGS", + "CXX", + "CC", + "GETDEPS_CMAKE_DEFINES", + ]: + env[name] = os.environ.get(name) + for tool in ["cc", "c++", "gcc", "g++", "clang", "clang++"]: + env["tool-%s" % tool] = path_search(os.environ, tool) + for name in manifest.get_section_as_args("depends.environment", ctx): + env[name] = os.environ.get(name) + + fetcher = self.create_fetcher(manifest) + env["fetcher.hash"] = fetcher.hash() + + for name in sorted(env.keys()): + hasher.update(name.encode("utf-8")) + value = env.get(name) + if value is not None: + try: + hasher.update(value.encode("utf-8")) + except AttributeError as exc: + raise AttributeError("name=%r, value=%r: %s" % (name, value, exc)) + + manifest.update_hash(hasher, ctx) + + dep_list = manifest.get_dependencies(ctx) + for dep in dep_list: + dep_manifest = self.load_manifest(dep) + dep_hash = self.get_project_hash(dep_manifest) + hasher.update(dep_hash.encode("utf-8")) + + # Use base64 to represent the hash, rather than the simple hex digest, + # so that the string is shorter. Use the URL-safe encoding so that + # the hash can also be safely used as a filename component. + h = base64.urlsafe_b64encode(hasher.digest()).decode("ascii") + # ... and because cmd.exe is troublesome with `=` signs, nerf those. + # They tend to be padding characters at the end anyway, so we can + # safely discard them. + h = h.replace("=", "") + + return h + + def _get_project_dir_name(self, manifest): + if manifest.is_first_party_project(): + return manifest.name + else: + project_hash = self.get_project_hash(manifest) + return "%s-%s" % (manifest.name, project_hash) + + def get_project_install_dir(self, manifest): + override = self._install_dir_overrides.get(manifest.name) + if override: + return override + + project_dir_name = self._get_project_dir_name(manifest) + return os.path.join(self.build_opts.install_dir, project_dir_name) + + def get_project_build_dir(self, manifest): + override = self._build_dir_overrides.get(manifest.name) + if override: + return override + + project_dir_name = self._get_project_dir_name(manifest) + return os.path.join(self.build_opts.scratch_dir, "build", project_dir_name) + + def get_project_install_prefix(self, manifest): + return self._install_prefix_overrides.get(manifest.name) + + def get_project_install_dir_respecting_install_prefix(self, manifest): + inst_dir = self.get_project_install_dir(manifest) + prefix = self.get_project_install_prefix(manifest) + if prefix: + return inst_dir + prefix + return inst_dir diff --git a/build/fbcode_builder/getdeps/manifest.py b/build/fbcode_builder/getdeps/manifest.py new file mode 100644 index 000000000..ae06f4b26 --- /dev/null +++ b/build/fbcode_builder/getdeps/manifest.py @@ -0,0 +1,783 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import configparser +import io +import os +from typing import List + +from .builder import ( + AutoconfBuilder, + Boost, + CMakeBootStrapBuilder, + CMakeBuilder, + Iproute2Builder, + MakeBuilder, + NinjaBootstrap, + NopBuilder, + OpenSSLBuilder, + SqliteBuilder, +) +from .cargo import CargoBuilder +from .expr import parse_expr +from .fetcher import ( + ArchiveFetcher, + GitFetcher, + PreinstalledNopFetcher, + ShipitTransformerFetcher, + SimpleShipitTransformerFetcher, + SubFetcher, + SystemPackageFetcher, +) +from .py_wheel_builder import PythonWheelBuilder + +REQUIRED = "REQUIRED" +OPTIONAL = "OPTIONAL" + +SCHEMA = { + "manifest": { + "optional_section": False, + "fields": { + "name": REQUIRED, + "fbsource_path": OPTIONAL, + "shipit_project": OPTIONAL, + "shipit_fbcode_builder": OPTIONAL, + "use_shipit": OPTIONAL, + }, + }, + "dependencies": {"optional_section": True, "allow_values": False}, + "depends.environment": {"optional_section": True}, + "git": { + "optional_section": True, + "fields": {"repo_url": REQUIRED, "rev": OPTIONAL, "depth": OPTIONAL}, + }, + "download": { + "optional_section": True, + "fields": {"url": REQUIRED, "sha256": REQUIRED}, + }, + "build": { + "optional_section": True, + "fields": { + "builder": REQUIRED, + "subdir": OPTIONAL, + "make_binary": OPTIONAL, + "build_in_src_dir": OPTIONAL, + "only_install": OPTIONAL, + "job_weight_mib": OPTIONAL, + "patchfile": OPTIONAL, + "patchfile_opts": OPTIONAL, + }, + }, + "msbuild": {"optional_section": True, "fields": {"project": REQUIRED}}, + "cargo": { + "optional_section": True, + "fields": { + "build_doc": OPTIONAL, + "workspace_dir": OPTIONAL, + "manifests_to_build": OPTIONAL, + # Where to write cargo config (defaults to build_dir/.cargo/config.toml) + "cargo_config_file": OPTIONAL, + }, + }, + "github.actions": { + "optional_section": True, + "fields": { + "run_tests": OPTIONAL, + }, + }, + "crate.pathmap": {"optional_section": True}, + "cmake.defines": {"optional_section": True}, + "autoconf.args": {"optional_section": True}, + "autoconf.envcmd.LDFLAGS": {"optional_section": True}, + "rpms": {"optional_section": True}, + "debs": {"optional_section": True}, + "homebrew": {"optional_section": True}, + "pps": {"optional_section": True}, + "preinstalled.env": {"optional_section": True}, + "bootstrap.args": {"optional_section": True}, + "b2.args": {"optional_section": True}, + "make.build_args": {"optional_section": True}, + "make.install_args": {"optional_section": True}, + "make.test_args": {"optional_section": True}, + "header-only": {"optional_section": True, "fields": {"includedir": REQUIRED}}, + "shipit.pathmap": {"optional_section": True}, + "shipit.strip": {"optional_section": True}, + "install.files": {"optional_section": True}, + "subprojects": {"optional_section": True}, + # fb-only + "sandcastle": {"optional_section": True, "fields": {"run_tests": OPTIONAL}}, +} + +# These sections are allowed to vary for different platforms +# using the expression syntax to enable/disable sections +ALLOWED_EXPR_SECTIONS = [ + "autoconf.args", + "autoconf.envcmd.LDFLAGS", + "build", + "cmake.defines", + "dependencies", + "make.build_args", + "make.install_args", + "bootstrap.args", + "b2.args", + "download", + "git", + "install.files", + "rpms", + "debs", + "shipit.pathmap", + "shipit.strip", + "homebrew", + "github.actions", + "pps", +] + + +def parse_conditional_section_name(name, section_def): + expr = name[len(section_def) + 1 :] + return parse_expr(expr, ManifestContext.ALLOWED_VARIABLES) + + +def validate_allowed_fields(file_name, section, config, allowed_fields): + for field in config.options(section): + if not allowed_fields.get(field): + raise Exception( + ("manifest file %s section '%s' contains " "unknown field '%s'") + % (file_name, section, field) + ) + + for field in allowed_fields: + if allowed_fields[field] == REQUIRED and not config.has_option(section, field): + raise Exception( + ("manifest file %s section '%s' is missing " "required field '%s'") + % (file_name, section, field) + ) + + +def validate_allow_values(file_name, section, config): + for field in config.options(section): + value = config.get(section, field) + if value is not None: + raise Exception( + ( + "manifest file %s section '%s' has '%s = %s' but " + "this section doesn't allow specifying values " + "for its entries" + ) + % (file_name, section, field, value) + ) + + +def validate_section(file_name, section, config): + section_def = SCHEMA.get(section) + if not section_def: + for name in ALLOWED_EXPR_SECTIONS: + if section.startswith(name + "."): + # Verify that the conditional parses, but discard it + try: + parse_conditional_section_name(section, name) + except Exception as exc: + raise Exception( + ("manifest file %s section '%s' has invalid " "conditional: %s") + % (file_name, section, str(exc)) + ) + section_def = SCHEMA.get(name) + canonical_section_name = name + break + if not section_def: + raise Exception( + "manifest file %s contains unknown section '%s'" % (file_name, section) + ) + else: + canonical_section_name = section + + allowed_fields = section_def.get("fields") + if allowed_fields: + validate_allowed_fields(file_name, section, config, allowed_fields) + elif not section_def.get("allow_values", True): + validate_allow_values(file_name, section, config) + return canonical_section_name + + +class ManifestParser(object): + def __init__(self, file_name, fp=None): + # allow_no_value enables listing parameters in the + # autoconf.args section one per line + config = configparser.RawConfigParser(allow_no_value=True) + config.optionxform = str # make it case sensitive + if fp is None: + with open(file_name, "r") as fp: + config.read_file(fp) + elif isinstance(fp, type("")): + # For testing purposes, parse from a string (str + # or unicode) + config.read_file(io.StringIO(fp)) + else: + config.read_file(fp) + + # validate against the schema + seen_sections = set() + + for section in config.sections(): + seen_sections.add(validate_section(file_name, section, config)) + + for section in SCHEMA.keys(): + section_def = SCHEMA[section] + if ( + not section_def.get("optional_section", False) + and section not in seen_sections + ): + raise Exception( + "manifest file %s is missing required section %s" + % (file_name, section) + ) + + self._config = config + self.name = config.get("manifest", "name") + self.fbsource_path = self.get("manifest", "fbsource_path") + self.shipit_project = self.get("manifest", "shipit_project") + self.shipit_fbcode_builder = self.get("manifest", "shipit_fbcode_builder") + self.resolved_system_packages = {} + + if self.name != os.path.basename(file_name): + raise Exception( + "filename of the manifest '%s' does not match the manifest name '%s'" + % (file_name, self.name) + ) + + def get(self, section, key, defval=None, ctx=None): + ctx = ctx or {} + + for s in self._config.sections(): + if s == section: + if self._config.has_option(s, key): + return self._config.get(s, key) + return defval + + if s.startswith(section + "."): + expr = parse_conditional_section_name(s, section) + if not expr.eval(ctx): + continue + + if self._config.has_option(s, key): + return self._config.get(s, key) + + return defval + + def get_dependencies(self, ctx): + dep_list = list(self.get_section_as_dict("dependencies", ctx).keys()) + dep_list.sort() + builder = self.get("build", "builder", ctx=ctx) + if builder in ("cmake", "python-wheel"): + dep_list.insert(0, "cmake") + elif builder == "autoconf" and self.name not in ( + "autoconf", + "libtool", + "automake", + ): + # they need libtool and its deps (automake, autoconf) so add + # those as deps (but obviously not if we're building those + # projects themselves) + dep_list.insert(0, "libtool") + + return dep_list + + def get_section_as_args(self, section, ctx=None) -> List[str]: + """Intended for use with the make.[build_args/install_args] and + autoconf.args sections, this method collects the entries and returns an + array of strings. + If the manifest contains conditional sections, ctx is used to + evaluate the condition and merge in the values. + """ + args = [] + ctx = ctx or {} + + for s in self._config.sections(): + if s != section: + if not s.startswith(section + "."): + continue + expr = parse_conditional_section_name(s, section) + if not expr.eval(ctx): + continue + for field in self._config.options(s): + value = self._config.get(s, field) + if value is None: + args.append(field) + else: + args.append("%s=%s" % (field, value)) + return args + + def get_section_as_ordered_pairs(self, section, ctx=None): + """Used for eg: shipit.pathmap which has strong + ordering requirements""" + res = [] + ctx = ctx or {} + + for s in self._config.sections(): + if s != section: + if not s.startswith(section + "."): + continue + expr = parse_conditional_section_name(s, section) + if not expr.eval(ctx): + continue + + for key in self._config.options(s): + value = self._config.get(s, key) + res.append((key, value)) + return res + + def get_section_as_dict(self, section, ctx): + d = {} + + for s in self._config.sections(): + if s != section: + if not s.startswith(section + "."): + continue + expr = parse_conditional_section_name(s, section) + if not expr.eval(ctx): + continue + for field in self._config.options(s): + value = self._config.get(s, field) + d[field] = value + return d + + def update_hash(self, hasher, ctx): + """Compute a hash over the configuration for the given + context. The goal is for the hash to change if the config + for that context changes, but not if a change is made to + the config only for a different platform than that expressed + by ctx. The hash is intended to be used to help invalidate + a future cache for the third party build products. + The hasher argument is a hash object returned from hashlib.""" + for section in sorted(SCHEMA.keys()): + hasher.update(section.encode("utf-8")) + + # Note: at the time of writing, nothing in the implementation + # relies on keys in any config section being ordered. + # In theory we could have conflicting flags in different + # config sections and later flags override earlier flags. + # For the purposes of computing a hash we're not super + # concerned about this: manifest changes should be rare + # enough and we'd rather that this trigger an invalidation + # than strive for a cache hit at this time. + pairs = self.get_section_as_ordered_pairs(section, ctx) + pairs.sort(key=lambda pair: pair[0]) + for key, value in pairs: + hasher.update(key.encode("utf-8")) + if value is not None: + hasher.update(value.encode("utf-8")) + + def is_first_party_project(self): + """returns true if this is an FB first-party project""" + return self.shipit_project is not None + + def get_required_system_packages(self, ctx): + """Returns dictionary of packager system -> list of packages""" + return { + "rpm": self.get_section_as_args("rpms", ctx), + "deb": self.get_section_as_args("debs", ctx), + "homebrew": self.get_section_as_args("homebrew", ctx), + "pacman-package": self.get_section_as_args("pps", ctx), + } + + def _is_satisfied_by_preinstalled_environment(self, ctx): + envs = self.get_section_as_args("preinstalled.env", ctx) + if not envs: + return False + for key in envs: + val = os.environ.get(key, None) + print(f"Testing ENV[{key}]: {repr(val)}") + if val is None: + return False + if len(val) == 0: + return False + + return True + + def get_repo_url(self, ctx): + return self.get("git", "repo_url", ctx=ctx) + + def _create_fetcher(self, build_options, ctx): + real_shipit_available = ShipitTransformerFetcher.available(build_options) + use_real_shipit = real_shipit_available and ( + build_options.use_shipit + or self.get("manifest", "use_shipit", defval="false", ctx=ctx) == "true" + ) + if ( + not use_real_shipit + and self.fbsource_path + and build_options.fbsource_dir + and self.shipit_project + ): + return SimpleShipitTransformerFetcher(build_options, self, ctx) + + if ( + self.fbsource_path + and build_options.fbsource_dir + and self.shipit_project + and real_shipit_available + ): + # We can use the code from fbsource + return ShipitTransformerFetcher(build_options, self.shipit_project) + + # Can we satisfy this dep with system packages? + if build_options.allow_system_packages: + if self._is_satisfied_by_preinstalled_environment(ctx): + return PreinstalledNopFetcher() + + packages = self.get_required_system_packages(ctx) + package_fetcher = SystemPackageFetcher(build_options, packages) + if package_fetcher.packages_are_installed(): + return package_fetcher + + repo_url = self.get_repo_url(ctx) + if repo_url: + rev = self.get("git", "rev") + depth = self.get("git", "depth") + return GitFetcher(build_options, self, repo_url, rev, depth) + + url = self.get("download", "url", ctx=ctx) + if url: + # We need to defer this import until now to avoid triggering + # a cycle when the facebook/__init__.py is loaded. + try: + from .facebook.lfs import LFSCachingArchiveFetcher + + return LFSCachingArchiveFetcher( + build_options, self, url, self.get("download", "sha256", ctx=ctx) + ) + except ImportError: + # This FB internal module isn't shippped to github, + # so just use its base class + return ArchiveFetcher( + build_options, self, url, self.get("download", "sha256", ctx=ctx) + ) + + raise KeyError( + "project %s has no fetcher configuration matching %s" % (self.name, ctx) + ) + + def create_fetcher(self, build_options, loader, ctx): + fetcher = self._create_fetcher(build_options, ctx) + subprojects = self.get_section_as_ordered_pairs("subprojects", ctx) + if subprojects: + subs = [] + for project, subdir in subprojects: + submanifest = loader.load_manifest(project) + subfetcher = submanifest.create_fetcher(build_options, loader, ctx) + subs.append((subfetcher, subdir)) + return SubFetcher(fetcher, subs) + else: + return fetcher + + def get_builder_name(self, ctx): + builder = self.get("build", "builder", ctx=ctx) + if not builder: + raise Exception("project %s has no builder for %r" % (self.name, ctx)) + return builder + + def create_builder( # noqa:C901 + self, + build_options, + src_dir, + build_dir, + inst_dir, + ctx, + loader, + dep_manifests, + final_install_prefix=None, + extra_cmake_defines=None, + cmake_target=None, + extra_b2_args=None, + ): + builder = self.get_builder_name(ctx) + build_in_src_dir = self.get("build", "build_in_src_dir", "false", ctx=ctx) + if build_in_src_dir == "true": + # Some scripts don't work when they are configured and build in + # a different directory than source (or when the build directory + # is not a subdir of source). + build_dir = src_dir + subdir = self.get("build", "subdir", None, ctx=ctx) + if subdir is not None: + build_dir = os.path.join(build_dir, subdir) + print("build_dir is %s" % build_dir) # just to quiet lint + + if builder == "make" or builder == "cmakebootstrap": + build_args = self.get_section_as_args("make.build_args", ctx) + install_args = self.get_section_as_args("make.install_args", ctx) + test_args = self.get_section_as_args("make.test_args", ctx) + if builder == "cmakebootstrap": + return CMakeBootStrapBuilder( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + None, + inst_dir, + build_args, + install_args, + test_args, + ) + else: + return MakeBuilder( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + None, + inst_dir, + build_args, + install_args, + test_args, + ) + + if builder == "autoconf": + args = self.get_section_as_args("autoconf.args", ctx) + conf_env_args = {} + ldflags_cmd = self.get_section_as_args("autoconf.envcmd.LDFLAGS", ctx) + if ldflags_cmd: + conf_env_args["LDFLAGS"] = ldflags_cmd + return AutoconfBuilder( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + build_dir, + inst_dir, + args, + conf_env_args, + ) + + if builder == "boost": + args = self.get_section_as_args("b2.args", ctx) + if extra_b2_args is not None: + args += extra_b2_args + return Boost( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + build_dir, + inst_dir, + args, + ) + + if builder == "cmake": + defines = self.get_section_as_dict("cmake.defines", ctx) + return CMakeBuilder( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + build_dir, + inst_dir, + defines, + final_install_prefix, + extra_cmake_defines, + cmake_target, + ) + + if builder == "python-wheel": + return PythonWheelBuilder( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + build_dir, + inst_dir, + ) + + if builder == "sqlite": + return SqliteBuilder( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + build_dir, + inst_dir, + ) + + if builder == "ninja_bootstrap": + return NinjaBootstrap( + loader, + dep_manifests, + build_options, + ctx, + self, + build_dir, + src_dir, + inst_dir, + ) + + if builder == "nop": + return NopBuilder( + loader, dep_manifests, build_options, ctx, self, src_dir, inst_dir + ) + + if builder == "openssl": + return OpenSSLBuilder( + loader, + dep_manifests, + build_options, + ctx, + self, + build_dir, + src_dir, + inst_dir, + ) + + if builder == "iproute2": + return Iproute2Builder( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + build_dir, + inst_dir, + ) + + if builder == "cargo": + return self.create_cargo_builder( + loader, + dep_manifests, + build_options, + ctx, + src_dir, + build_dir, + inst_dir, + ) + + raise KeyError("project %s has no known builder" % (self.name)) + + def create_prepare_builders( + self, + build_options, + ctx, + src_dir, + build_dir, + inst_dir, + loader, + dep_manifests, + ): + """Create builders that have a prepare step run, e.g. to write config files""" + prepare_builders = [] + builder = self.get_builder_name(ctx) + cargo = self.get_section_as_dict("cargo", ctx) + if not builder == "cargo" and cargo: + cargo_builder = self.create_cargo_builder( + loader, + dep_manifests, + build_options, + ctx, + src_dir, + build_dir, + inst_dir, + ) + prepare_builders.append(cargo_builder) + return prepare_builders + + def create_cargo_builder( + self, loader, dep_manifests, build_options, ctx, src_dir, build_dir, inst_dir + ): + build_doc = self.get("cargo", "build_doc", False, ctx) + workspace_dir = self.get("cargo", "workspace_dir", None, ctx) + manifests_to_build = self.get("cargo", "manifests_to_build", None, ctx) + cargo_config_file = self.get("cargo", "cargo_config_file", None, ctx) + return CargoBuilder( + loader, + dep_manifests, + build_options, + ctx, + self, + src_dir, + build_dir, + inst_dir, + build_doc, + workspace_dir, + manifests_to_build, + cargo_config_file, + ) + + +class ManifestContext(object): + """ProjectContext contains a dictionary of values to use when evaluating boolean + expressions in a project manifest. + + This object should be passed as the `ctx` parameter in ManifestParser.get() calls. + """ + + ALLOWED_VARIABLES = { + "os", + "distro", + "distro_vers", + "fb", + "fbsource", + "test", + "shared_libs", + } + + def __init__(self, ctx_dict): + assert set(ctx_dict.keys()) == self.ALLOWED_VARIABLES + self.ctx_dict = ctx_dict + + def get(self, key): + return self.ctx_dict[key] + + def set(self, key, value): + assert key in self.ALLOWED_VARIABLES + self.ctx_dict[key] = value + + def copy(self): + return ManifestContext(dict(self.ctx_dict)) + + def __str__(self): + s = ", ".join( + "%s=%s" % (key, value) for key, value in sorted(self.ctx_dict.items()) + ) + return "{" + s + "}" + + +class ContextGenerator(object): + """ContextGenerator allows creating ManifestContext objects on a per-project basis. + This allows us to evaluate different projects with slightly different contexts. + + For instance, this can be used to only enable tests for some projects.""" + + def __init__(self, default_ctx): + self.default_ctx = ManifestContext(default_ctx) + self.ctx_by_project = {} + + def set_value_for_project(self, project_name, key, value): + project_ctx = self.ctx_by_project.get(project_name) + if project_ctx is None: + project_ctx = self.default_ctx.copy() + self.ctx_by_project[project_name] = project_ctx + project_ctx.set(key, value) + + def set_value_for_all_projects(self, key, value): + self.default_ctx.set(key, value) + for ctx in self.ctx_by_project.values(): + ctx.set(key, value) + + def get_context(self, project_name): + return self.ctx_by_project.get(project_name, self.default_ctx) diff --git a/build/fbcode_builder/getdeps/platform.py b/build/fbcode_builder/getdeps/platform.py new file mode 100644 index 000000000..1e021d992 --- /dev/null +++ b/build/fbcode_builder/getdeps/platform.py @@ -0,0 +1,291 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import os + +import platform +import re +import shlex +import sys +from typing import Optional, Tuple + + +def is_windows() -> bool: + """Returns true if the system we are currently running on + is a Windows system""" + return sys.platform.startswith("win") + + +def get_linux_type() -> Tuple[Optional[str], Optional[str], Optional[str]]: + try: + with open("/etc/os-release") as f: + data = f.read() + except EnvironmentError: + return (None, None, None) + + os_vars = {} + for line in data.splitlines(): + parts = line.split("=", 1) + if len(parts) != 2: + continue + key = parts[0].strip() + value_parts = shlex.split(parts[1].strip()) + if not value_parts: + value = "" + else: + value = value_parts[0] + os_vars[key] = value + + name = os_vars.get("NAME") + if name: + name = name.lower() + name = re.sub("linux", "", name) + name = name.strip().replace(" ", "_") + + version_id = os_vars.get("VERSION_ID") + if version_id: + version_id = version_id.lower() + + return "linux", name, version_id + + +# Ideally we'd use a common library like `psutil` to read system information, +# but getdeps can't take third-party dependencies. + + +def _get_available_ram_linux() -> int: + # TODO: Ideally, this function would inspect the current cgroup for any + # limits, rather than solely relying on system RAM. + + meminfo_path = "/proc/meminfo" + try: + with open(meminfo_path) as f: + for line in f: + try: + key, value = line.split(":", 1) + except ValueError: + continue + suffix = " kB\n" + if key == "MemAvailable" and value.endswith(suffix): + value = value[: -len(suffix)] + try: + return int(value) // 1024 + except ValueError: + continue + except OSError: + print("error opening {}".format(meminfo_path), end="", file=sys.stderr) + else: + print( + "{} had no valid MemAvailable".format(meminfo_path), end="", file=sys.stderr + ) + + guess = 8 + print(", guessing {} GiB".format(guess), file=sys.stderr) + return guess * 1024 + + +def _get_available_ram_macos() -> int: + import ctypes.util + + libc = ctypes.CDLL(ctypes.util.find_library("libc"), use_errno=True) + sysctlbyname = libc.sysctlbyname + sysctlbyname.restype = ctypes.c_int + sysctlbyname.argtypes = [ + ctypes.c_char_p, + ctypes.c_void_p, + ctypes.POINTER(ctypes.c_size_t), + ctypes.c_void_p, + ctypes.c_size_t, + ] + # TODO: There may be some way to approximate an availability + # metric, but just use total RAM for now. + memsize = ctypes.c_int64() + memsizesize = ctypes.c_size_t(8) + res = sysctlbyname( + b"hw.memsize", ctypes.byref(memsize), ctypes.byref(memsizesize), None, 0 + ) + if res != 0: + raise NotImplementedError( + f"failed to retrieve hw.memsize sysctl: {ctypes.get_errno()}" + ) + return memsize.value // (1024 * 1024) + + +def _get_available_ram_windows() -> int: + import ctypes + + DWORD = ctypes.c_uint32 + QWORD = ctypes.c_uint64 + + class MEMORYSTATUSEX(ctypes.Structure): + _fields_ = [ + ("dwLength", DWORD), + ("dwMemoryLoad", DWORD), + ("ullTotalPhys", QWORD), + ("ullAvailPhys", QWORD), + ("ullTotalPageFile", QWORD), + ("ullAvailPageFile", QWORD), + ("ullTotalVirtual", QWORD), + ("ullAvailVirtual", QWORD), + ("ullExtendedVirtual", QWORD), + ] + + ms = MEMORYSTATUSEX() + ms.dwLength = ctypes.sizeof(ms) + # pyre-ignore[16] + res = ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(ms)) + if res == 0: + raise NotImplementedError("error calling GlobalMemoryStatusEx") + + # This is fuzzy, but AvailPhys is too conservative, and AvailTotal is too + # aggressive, so average the two. It's okay for builds to use some swap. + return (ms.ullAvailPhys + ms.ullTotalPhys) // (2 * 1024 * 1024) + + +def _get_available_ram_freebsd() -> int: + import ctypes.util + + libc = ctypes.CDLL(ctypes.util.find_library("libc"), use_errno=True) + sysctlbyname = libc.sysctlbyname + sysctlbyname.restype = ctypes.c_int + sysctlbyname.argtypes = [ + ctypes.c_char_p, + ctypes.c_void_p, + ctypes.POINTER(ctypes.c_size_t), + ctypes.c_void_p, + ctypes.c_size_t, + ] + # hw.usermem is pretty close to what we want. + memsize = ctypes.c_int64() + memsizesize = ctypes.c_size_t(8) + res = sysctlbyname( + b"hw.usermem", ctypes.byref(memsize), ctypes.byref(memsizesize), None, 0 + ) + if res != 0: + raise NotImplementedError( + f"failed to retrieve hw.memsize sysctl: {ctypes.get_errno()}" + ) + return memsize.value // (1024 * 1024) + + +def get_available_ram() -> int: + """ + Returns a platform-appropriate available RAM metric in MiB. + """ + if sys.platform == "linux": + return _get_available_ram_linux() + elif sys.platform == "darwin": + return _get_available_ram_macos() + elif sys.platform == "win32": + return _get_available_ram_windows() + elif sys.platform.startswith("freebsd"): + return _get_available_ram_freebsd() + else: + raise NotImplementedError( + f"platform {sys.platform} does not have an implementation of get_available_ram" + ) + + +def is_current_host_arm() -> bool: + if sys.platform.startswith("darwin"): + # platform.machine() can be fooled by rosetta for python < 3.9.2 + return "ARM64" in os.uname().version + else: + machine = platform.machine().lower() + return "arm" in machine or "aarch" in machine + + +class HostType(object): + def __init__(self, ostype=None, distro=None, distrovers=None) -> None: + # Maybe we should allow callers to indicate whether this machine uses + # an ARM architecture, but we need to change HostType serialization + # and deserialization in that case and hunt down anywhere that is + # persisting that serialized data. + isarm = False + + if ostype is None: + distro = None + distrovers = None + if sys.platform.startswith("linux"): + ostype, distro, distrovers = get_linux_type() + elif sys.platform.startswith("darwin"): + ostype = "darwin" + elif is_windows(): + ostype = "windows" + # pyre-fixme[16]: Module `sys` has no attribute `getwindowsversion`. + distrovers = str(sys.getwindowsversion().major) + elif sys.platform.startswith("freebsd"): + ostype = "freebsd" + else: + ostype = sys.platform + + isarm = is_current_host_arm() + + # The operating system type + self.ostype = ostype + # The distribution, if applicable + self.distro = distro + # The OS/distro version if known + self.distrovers = distrovers + # Does the CPU use an ARM architecture? ARM includes Apple Silicon + # Macs as well as other ARM systems that might be running Linux or + # something. + self.isarm = isarm + + def is_windows(self): + return self.ostype == "windows" + + # is_arm is kinda half implemented at the moment. This method is only + # intended to be used when HostType represents information about the + # current machine we are running on. + # When HostType is being used to enumerate platform types (represent + # information about machine types that we may or may not be running on) + # the result could be nonsense (under the current implementation its always + # false.) + def is_arm(self): + return self.isarm + + def is_darwin(self): + return self.ostype == "darwin" + + def is_linux(self): + return self.ostype == "linux" + + def is_freebsd(self): + return self.ostype == "freebsd" + + def as_tuple_string(self) -> str: + return "%s-%s-%s" % ( + self.ostype, + self.distro or "none", + self.distrovers or "none", + ) + + def get_package_manager(self): + if not self.is_linux() and not self.is_darwin(): + return None + if self.is_darwin(): + return "homebrew" + if self.distro in ("fedora", "centos", "centos_stream", "rocky"): + return "rpm" + if self.distro.startswith(("debian", "ubuntu", "pop!_os", "mint")): + return "deb" + if self.distro == "arch": + return "pacman-package" + return None + + @staticmethod + def from_tuple_string(s) -> "HostType": + ostype, distro, distrovers = s.split("-") + return HostType(ostype=ostype, distro=distro, distrovers=distrovers) + + def __eq__(self, b): + return ( + self.ostype == b.ostype + and self.distro == b.distro + and self.distrovers == b.distrovers + ) diff --git a/build/fbcode_builder/getdeps/py_wheel_builder.py b/build/fbcode_builder/getdeps/py_wheel_builder.py new file mode 100644 index 000000000..335d74afd --- /dev/null +++ b/build/fbcode_builder/getdeps/py_wheel_builder.py @@ -0,0 +1,288 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import codecs +import collections +import email +import os +import re +import stat +from typing import Dict, List + +from .builder import BuilderBase, CMakeBuilder + + +WheelNameInfo = collections.namedtuple( + "WheelNameInfo", ("distribution", "version", "build", "python", "abi", "platform") +) + +CMAKE_HEADER = """ +cmake_minimum_required(VERSION 3.8) + +project("{manifest_name}" LANGUAGES C) + +set(CMAKE_MODULE_PATH + "{cmake_dir}" + ${{CMAKE_MODULE_PATH}} +) +include(FBPythonBinary) + +set(CMAKE_INSTALL_DIR lib/cmake/{manifest_name} CACHE STRING + "The subdirectory where CMake package config files should be installed") +""" + +CMAKE_FOOTER = """ +install_fb_python_library({lib_name} EXPORT all) +install( + EXPORT all + FILE {manifest_name}-targets.cmake + NAMESPACE {namespace}:: + DESTINATION ${{CMAKE_INSTALL_DIR}} +) + +include(CMakePackageConfigHelpers) +configure_package_config_file( + ${{CMAKE_BINARY_DIR}}/{manifest_name}-config.cmake.in + {manifest_name}-config.cmake + INSTALL_DESTINATION ${{CMAKE_INSTALL_DIR}} + PATH_VARS + CMAKE_INSTALL_DIR +) +install( + FILES ${{CMAKE_CURRENT_BINARY_DIR}}/{manifest_name}-config.cmake + DESTINATION ${{CMAKE_INSTALL_DIR}} +) +""" + +CMAKE_CONFIG_FILE = """ +@PACKAGE_INIT@ + +include(CMakeFindDependencyMacro) + +set_and_check({upper_name}_CMAKE_DIR "@PACKAGE_CMAKE_INSTALL_DIR@") + +if (NOT TARGET {namespace}::{lib_name}) + include("${{{upper_name}_CMAKE_DIR}}/{manifest_name}-targets.cmake") +endif() + +set({upper_name}_LIBRARIES {namespace}::{lib_name}) + +{find_dependency_lines} + +if (NOT {manifest_name}_FIND_QUIETLY) + message(STATUS "Found {manifest_name}: ${{PACKAGE_PREFIX_DIR}}") +endif() +""" + + +# Note: for now we are manually manipulating the wheel packet contents. +# The wheel format is documented here: +# https://www.python.org/dev/peps/pep-0491/#file-format +# +# We currently aren't particularly smart about correctly handling the full wheel +# functionality, but this is good enough to handle simple pure-python wheels, +# which is the main thing we care about right now. +# +# We could potentially use pip to install the wheel to a temporary location and +# then copy its "installed" files, but this has its own set of complications. +# This would require pip to already be installed and available, and we would +# need to correctly find the right version of pip or pip3 to use. +# If we did ever want to go down that path, we would probably want to use +# something like the following pip3 command: +# pip3 --isolated install --no-cache-dir --no-index --system \ +# --target +# pyre-fixme[13] fields initialized in _build +class PythonWheelBuilder(BuilderBase): + """This Builder can take Python wheel archives and install them as python libraries + that can be used by add_fb_python_library()/add_fb_python_executable() CMake rules. + """ + + dist_info_dir: str + template_format_dict: Dict[str, str] + + def _build(self, reconfigure: bool) -> None: + # When we are invoked, self.src_dir contains the unpacked wheel contents. + # + # Since a wheel file is just a zip file, the Fetcher code recognizes it as such + # and goes ahead and unpacks it. (We could disable that Fetcher behavior in the + # future if we ever wanted to, say if we wanted to call pip here.) + wheel_name = self._parse_wheel_name() + name_version_prefix = "-".join((wheel_name.distribution, wheel_name.version)) + dist_info_name = name_version_prefix + ".dist-info" + data_dir_name = name_version_prefix + ".data" + self.dist_info_dir = os.path.join(self.src_dir, dist_info_name) + wheel_metadata = self._read_wheel_metadata(wheel_name) + + # Check that we can understand the wheel version. + # We don't really care about wheel_metadata["Root-Is-Purelib"] since + # we are generating our own standalone python archives rather than installing + # into site-packages. + version = wheel_metadata["Wheel-Version"] + if not version.startswith("1."): + raise Exception("unsupported wheel version %s" % (version,)) + + # Add a find_dependency() call for each of our dependencies. + # The dependencies are also listed in the wheel METADATA file, but it is simpler + # to pull this directly from the getdeps manifest. + dep_list = sorted( + self.manifest.get_section_as_dict("dependencies", self.ctx).keys() + ) + find_dependency_lines = ["find_dependency({})".format(dep) for dep in dep_list] + + getdeps_cmake_dir = os.path.join( + os.path.dirname(os.path.dirname(__file__)), "CMake" + ) + self.template_format_dict = { + # Note that CMake files always uses forward slash separators in path names, + # even on Windows. Therefore replace path separators here. + "cmake_dir": _to_cmake_path(getdeps_cmake_dir), + "lib_name": self.manifest.name, + "manifest_name": self.manifest.name, + "namespace": self.manifest.name, + "upper_name": self.manifest.name.upper().replace("-", "_"), + "find_dependency_lines": "\n".join(find_dependency_lines), + } + + # Find sources from the root directory + path_mapping = {} + for entry in os.listdir(self.src_dir): + if entry in (dist_info_name, data_dir_name): + continue + self._add_sources(path_mapping, os.path.join(self.src_dir, entry), entry) + + # Files under the .data directory also need to be installed in the correct + # locations + if os.path.exists(data_dir_name): + # TODO: process the subdirectories of data_dir_name + # This isn't implemented yet since for now we have only needed dependencies + # on some simple pure Python wheels, so I haven't tested against wheels with + # additional files in the .data directory. + raise Exception( + "handling of the subdirectories inside %s is not implemented yet" + % data_dir_name + ) + + # Emit CMake files + self._write_cmakelists(path_mapping, dep_list) + self._write_cmake_config_template() + + # Run the build + self._run_cmake_build(reconfigure) + + def _run_cmake_build(self, reconfigure: bool) -> None: + cmake_builder = CMakeBuilder( + loader=self.loader, + dep_manifests=self.dep_manifests, + build_opts=self.build_opts, + ctx=self.ctx, + manifest=self.manifest, + # Note that we intentionally supply src_dir=build_dir, + # since we wrote out our generated CMakeLists.txt in the build directory + src_dir=self.build_dir, + build_dir=self.build_dir, + inst_dir=self.inst_dir, + defines={}, + final_install_prefix=None, + ) + cmake_builder.build(reconfigure=reconfigure) + + def _write_cmakelists(self, path_mapping: Dict[str, str], dependencies) -> None: + cmake_path = os.path.join(self.build_dir, "CMakeLists.txt") + with open(cmake_path, "w") as f: + f.write(CMAKE_HEADER.format(**self.template_format_dict)) + for dep in dependencies: + f.write("find_package({0} REQUIRED)\n".format(dep)) + + f.write( + "add_fb_python_library({lib_name}\n".format(**self.template_format_dict) + ) + f.write(' BASE_DIR "%s"\n' % _to_cmake_path(self.src_dir)) + f.write(" SOURCES\n") + for src_path, install_path in path_mapping.items(): + f.write( + ' "%s=%s"\n' + % (_to_cmake_path(src_path), _to_cmake_path(install_path)) + ) + if dependencies: + f.write(" DEPENDS\n") + for dep in dependencies: + f.write(' "{0}::{0}"\n'.format(dep)) + f.write(")\n") + + f.write(CMAKE_FOOTER.format(**self.template_format_dict)) + + def _write_cmake_config_template(self) -> None: + config_path_name = self.manifest.name + "-config.cmake.in" + output_path = os.path.join(self.build_dir, config_path_name) + + with open(output_path, "w") as f: + f.write(CMAKE_CONFIG_FILE.format(**self.template_format_dict)) + + def _add_sources( + self, path_mapping: Dict[str, str], src_path: str, install_path: str + ) -> None: + s = os.lstat(src_path) + if not stat.S_ISDIR(s.st_mode): + path_mapping[src_path] = install_path + return + + for entry in os.listdir(src_path): + self._add_sources( + path_mapping, + os.path.join(src_path, entry), + os.path.join(install_path, entry), + ) + + def _parse_wheel_name(self) -> WheelNameInfo: + # The ArchiveFetcher prepends "manifest_name-", so strip that off first. + wheel_name = os.path.basename(self.src_dir) + prefix = self.manifest.name + "-" + if not wheel_name.startswith(prefix): + raise Exception( + "expected wheel source directory to be of the form %s-NAME.whl" + % (prefix,) + ) + wheel_name = wheel_name[len(prefix) :] + + wheel_name_re = re.compile( + r"(?P[^-]+)" + r"-(?P\d+[^-]*)" + r"(-(?P\d+[^-]*))?" + r"-(?P\w+\d+(\.\w+\d+)*)" + r"-(?P\w+)" + r"-(?P\w+(\.\w+)*)" + r"\.whl" + ) + match = wheel_name_re.match(wheel_name) + if not match: + raise Exception( + "bad python wheel name %s: expected to have the form " + "DISTRIBUTION-VERSION-[-BUILD]-PYTAG-ABI-PLATFORM" + ) + + return WheelNameInfo( + distribution=match.group("distribution"), + version=match.group("version"), + build=match.group("build"), + python=match.group("python"), + abi=match.group("abi"), + platform=match.group("platform"), + ) + + def _read_wheel_metadata(self, wheel_name): + metadata_path = os.path.join(self.dist_info_dir, "WHEEL") + with codecs.open(metadata_path, "r", encoding="utf-8") as f: + return email.message_from_file(f) + + +def _to_cmake_path(path): + # CMake always uses forward slashes to separate paths in CMakeLists.txt files, + # even on Windows. It treats backslashes as character escapes, so using + # backslashes in the path will cause problems. Therefore replace all path + # separators with forward slashes to make sure the paths are correct on Windows. + # e.g. "C:\foo\bar.txt" becomes "C:/foo/bar.txt" + return path.replace(os.path.sep, "/") diff --git a/build/fbcode_builder/getdeps/runcmd.py b/build/fbcode_builder/getdeps/runcmd.py new file mode 100644 index 000000000..e0b9d2b22 --- /dev/null +++ b/build/fbcode_builder/getdeps/runcmd.py @@ -0,0 +1,168 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + +import os +import select +import subprocess +import sys + +from .envfuncs import Env +from .platform import is_windows + + +try: + from shlex import quote as shellquote +except ImportError: + from pipes import quote as shellquote + + +class RunCommandError(Exception): + pass + + +def _print_env_diff(env, log_fn) -> None: + current_keys = set(os.environ.keys()) + wanted_env = set(env.keys()) + + unset_keys = current_keys.difference(wanted_env) + for k in sorted(unset_keys): + log_fn("+ unset %s\n" % k) + + added_keys = wanted_env.difference(current_keys) + for k in wanted_env.intersection(current_keys): + if os.environ[k] != env[k]: + added_keys.add(k) + + for k in sorted(added_keys): + if ("PATH" in k) and (os.pathsep in env[k]): + log_fn("+ %s=\\\n" % k) + for elem in env[k].split(os.pathsep): + log_fn("+ %s%s\\\n" % (shellquote(elem), os.pathsep)) + else: + log_fn("+ %s=%s \\\n" % (k, shellquote(env[k]))) + + +def run_cmd(cmd, env=None, cwd=None, allow_fail: bool = False, log_file=None) -> int: + def log_to_stdout(msg): + sys.stdout.buffer.write(msg.encode(errors="surrogateescape")) + + if log_file is not None: + with open(log_file, "a", encoding="utf-8", errors="surrogateescape") as log: + + def log_function(msg): + log.write(msg) + log_to_stdout(msg) + + return _run_cmd( + cmd, env=env, cwd=cwd, allow_fail=allow_fail, log_fn=log_function + ) + else: + return _run_cmd( + cmd, env=env, cwd=cwd, allow_fail=allow_fail, log_fn=log_to_stdout + ) + + +def _run_cmd(cmd, env, cwd, allow_fail, log_fn) -> int: + log_fn("---\n") + try: + cmd_str = " \\\n+ ".join(shellquote(arg) for arg in cmd) + except TypeError: + # eg: one of the elements is None + raise RunCommandError("problem quoting cmd: %r" % cmd) + + if env: + assert isinstance(env, Env) + _print_env_diff(env, log_fn) + + # Convert from our Env type to a regular dict. + # This is needed because python3 looks up b'PATH' and 'PATH' + # and emits an error if both are present. In our Env type + # we'll return the same value for both requests, but we don't + # have duplicate potentially conflicting values which is the + # spirit of the check. + env = dict(env.items()) + + if cwd: + log_fn("+ cd %s && \\\n" % shellquote(cwd)) + # Our long path escape sequence may confuse cmd.exe, so if the cwd + # is short enough, strip that off. + if is_windows() and (len(cwd) < 250) and cwd.startswith("\\\\?\\"): + cwd = cwd[4:] + + log_fn("+ %s\n" % cmd_str) + + isinteractive = os.isatty(sys.stdout.fileno()) + if isinteractive: + stdout = None + sys.stdout.buffer.flush() + else: + stdout = subprocess.PIPE + + try: + p = subprocess.Popen( + cmd, env=env, cwd=cwd, stdout=stdout, stderr=subprocess.STDOUT + ) + except (TypeError, ValueError, OSError) as exc: + log_fn("error running `%s`: %s" % (cmd_str, exc)) + raise RunCommandError( + "%s while running `%s` with env=%r\nos.environ=%r" + % (str(exc), cmd_str, env, os.environ) + ) + + if not isinteractive: + _pipe_output(p, log_fn) + + p.wait() + if p.returncode != 0 and not allow_fail: + raise subprocess.CalledProcessError(p.returncode, cmd) + + return p.returncode + + +if hasattr(select, "poll"): + + def _pipe_output(p, log_fn): + """Read output from p.stdout and call log_fn() with each chunk of data as it + becomes available.""" + # Perform non-blocking reads + import fcntl + + fcntl.fcntl(p.stdout.fileno(), fcntl.F_SETFL, os.O_NONBLOCK) + poll = select.poll() + poll.register(p.stdout.fileno(), select.POLLIN) + + buffer_size = 4096 + while True: + poll.poll() + data = p.stdout.read(buffer_size) + if not data: + break + # log_fn() accepts arguments as str (binary in Python 2, unicode in + # Python 3). In Python 3 the subprocess output will be plain bytes, + # and need to be decoded. + if not isinstance(data, str): + data = data.decode("utf-8", errors="surrogateescape") + log_fn(data) + +else: + + def _pipe_output(p, log_fn): + """Read output from p.stdout and call log_fn() with each chunk of data as it + becomes available.""" + # Perform blocking reads. Use a smaller buffer size to avoid blocking + # for very long when data is available. + buffer_size = 64 + while True: + data = p.stdout.read(buffer_size) + if not data: + break + # log_fn() accepts arguments as str (binary in Python 2, unicode in + # Python 3). In Python 3 the subprocess output will be plain bytes, + # and need to be decoded. + if not isinstance(data, str): + data = data.decode("utf-8", errors="surrogateescape") + log_fn(data) diff --git a/build/fbcode_builder/getdeps/subcmd.py b/build/fbcode_builder/getdeps/subcmd.py new file mode 100644 index 000000000..acbeb93f1 --- /dev/null +++ b/build/fbcode_builder/getdeps/subcmd.py @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + + +class SubCmd(object): + NAME = None + HELP = None + + def run(self, args) -> int: + """perform the command""" + return 0 + + def setup_parser(self, parser) -> None: + # Subclasses should override setup_parser() if they have any + # command line options or arguments. + pass + + +CmdTable = [] + + +def add_subcommands(parser, common_args, cmd_table=CmdTable) -> None: + """Register parsers for the defined commands with the provided parser""" + for cls in cmd_table: + command = cls() + command_parser = parser.add_parser( + command.NAME, help=command.HELP, parents=[common_args] + ) + command.setup_parser(command_parser) + command_parser.set_defaults(func=command.run) + + +def cmd(name, help=None, cmd_table=CmdTable): + """ + @cmd() is a decorator that can be used to help define Subcmd instances + + Example usage: + + @subcmd('list', 'Show the result list') + class ListCmd(Subcmd): + def run(self, args): + # Perform the command actions here... + pass + """ + + def wrapper(cls): + class SubclassedCmd(cls): + NAME = name + HELP = help + + cmd_table.append(SubclassedCmd) + return SubclassedCmd + + return wrapper diff --git a/build/fbcode_builder/getdeps/test/expr_test.py b/build/fbcode_builder/getdeps/test/expr_test.py new file mode 100644 index 000000000..4f4b957ce --- /dev/null +++ b/build/fbcode_builder/getdeps/test/expr_test.py @@ -0,0 +1,50 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + + +import unittest + +from ..expr import parse_expr + + +class ExprTest(unittest.TestCase): + def test_equal(self) -> None: + valid_variables = {"foo", "some_var", "another_var"} + e = parse_expr("foo=bar", valid_variables) + self.assertTrue(e.eval({"foo": "bar"})) + self.assertFalse(e.eval({"foo": "not-bar"})) + self.assertFalse(e.eval({"not-foo": "bar"})) + + def test_not_equal(self) -> None: + valid_variables = {"foo"} + e = parse_expr("not(foo=bar)", valid_variables) + self.assertFalse(e.eval({"foo": "bar"})) + self.assertTrue(e.eval({"foo": "not-bar"})) + + def test_bad_not(self) -> None: + valid_variables = {"foo"} + with self.assertRaises(Exception): + parse_expr("foo=not(bar)", valid_variables) + + def test_bad_variable(self) -> None: + valid_variables = {"bar"} + with self.assertRaises(Exception): + parse_expr("foo=bar", valid_variables) + + def test_all(self) -> None: + valid_variables = {"foo", "baz"} + e = parse_expr("all(foo = bar, baz = qux)", valid_variables) + self.assertTrue(e.eval({"foo": "bar", "baz": "qux"})) + self.assertFalse(e.eval({"foo": "bar", "baz": "nope"})) + self.assertFalse(e.eval({"foo": "nope", "baz": "nope"})) + + def test_any(self) -> None: + valid_variables = {"foo", "baz"} + e = parse_expr("any(foo = bar, baz = qux)", valid_variables) + self.assertTrue(e.eval({"foo": "bar", "baz": "qux"})) + self.assertTrue(e.eval({"foo": "bar", "baz": "nope"})) + self.assertFalse(e.eval({"foo": "nope", "baz": "nope"})) diff --git a/build/fbcode_builder/getdeps/test/fixtures/duplicate/foo b/build/fbcode_builder/getdeps/test/fixtures/duplicate/foo new file mode 100644 index 000000000..a0384ee3b --- /dev/null +++ b/build/fbcode_builder/getdeps/test/fixtures/duplicate/foo @@ -0,0 +1,2 @@ +[manifest] +name = foo diff --git a/build/fbcode_builder/getdeps/test/fixtures/duplicate/subdir/foo b/build/fbcode_builder/getdeps/test/fixtures/duplicate/subdir/foo new file mode 100644 index 000000000..a0384ee3b --- /dev/null +++ b/build/fbcode_builder/getdeps/test/fixtures/duplicate/subdir/foo @@ -0,0 +1,2 @@ +[manifest] +name = foo diff --git a/build/fbcode_builder/getdeps/test/manifest_test.py b/build/fbcode_builder/getdeps/test/manifest_test.py new file mode 100644 index 000000000..05b2c58b1 --- /dev/null +++ b/build/fbcode_builder/getdeps/test/manifest_test.py @@ -0,0 +1,234 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + + +import sys +import unittest + +from ..load import load_all_manifests, patch_loader +from ..manifest import ManifestParser + + +class ManifestTest(unittest.TestCase): + def test_missing_section(self) -> None: + with self.assertRaisesRegex( + Exception, "manifest file test is missing required section manifest" + ): + ManifestParser("test", "") + + def test_missing_name(self) -> None: + with self.assertRaisesRegex( + Exception, + "manifest file test section 'manifest' is missing required field 'name'", + ): + ManifestParser( + "test", + """ +[manifest] +""", + ) + + def test_minimal(self) -> None: + p = ManifestParser( + "test", + """ +[manifest] +name = test +""", + ) + self.assertEqual(p.name, "test") + self.assertEqual(p.fbsource_path, None) + + def test_minimal_with_fbsource_path(self) -> None: + p = ManifestParser( + "test", + """ +[manifest] +name = test +fbsource_path = fbcode/wat +""", + ) + self.assertEqual(p.name, "test") + self.assertEqual(p.fbsource_path, "fbcode/wat") + + def test_unknown_field(self) -> None: + with self.assertRaisesRegex( + Exception, + ( + "manifest file test section 'manifest' contains " + "unknown field 'invalid.field'" + ), + ): + ManifestParser( + "test", + """ +[manifest] +name = test +invalid.field = woot +""", + ) + + def test_invalid_section_name(self) -> None: + with self.assertRaisesRegex( + Exception, "manifest file test contains unknown section 'invalid.section'" + ): + ManifestParser( + "test", + """ +[manifest] +name = test + +[invalid.section] +foo = bar +""", + ) + + def test_value_in_dependencies_section(self) -> None: + with self.assertRaisesRegex( + Exception, + ( + "manifest file test section 'dependencies' has " + "'foo = bar' but this section doesn't allow " + "specifying values for its entries" + ), + ): + ManifestParser( + "test", + """ +[manifest] +name = test + +[dependencies] +foo = bar +""", + ) + + def test_invalid_conditional_section_name(self) -> None: + with self.assertRaisesRegex( + Exception, + ( + "manifest file test section 'dependencies.=' " + "has invalid conditional: expected " + "identifier found =" + ), + ): + ManifestParser( + "test", + """ +[manifest] +name = test + +[dependencies.=] +""", + ) + + def test_section_as_args(self) -> None: + p = ManifestParser( + "test", + """ +[manifest] +name = test + +[dependencies] +a +b +c + +[dependencies.test=on] +foo +""", + ) + self.assertEqual(p.get_section_as_args("dependencies"), ["a", "b", "c"]) + self.assertEqual( + p.get_section_as_args("dependencies", {"test": "off"}), ["a", "b", "c"] + ) + self.assertEqual( + p.get_section_as_args("dependencies", {"test": "on"}), + ["a", "b", "c", "foo"], + ) + + p2 = ManifestParser( + "test", + """ +[manifest] +name = test + +[autoconf.args] +--prefix=/foo +--with-woot +""", + ) + self.assertEqual( + p2.get_section_as_args("autoconf.args"), ["--prefix=/foo", "--with-woot"] + ) + + def test_section_as_dict(self) -> None: + p = ManifestParser( + "test", + """ +[manifest] +name = test + +[cmake.defines] +foo = bar + +[cmake.defines.test=on] +foo = baz +""", + ) + self.assertEqual(p.get_section_as_dict("cmake.defines", {}), {"foo": "bar"}) + self.assertEqual( + p.get_section_as_dict("cmake.defines", {"test": "on"}), {"foo": "baz"} + ) + + p2 = ManifestParser( + "test", + """ +[manifest] +name = test + +[cmake.defines.test=on] +foo = baz + +[cmake.defines] +foo = bar +""", + ) + self.assertEqual( + p2.get_section_as_dict("cmake.defines", {"test": "on"}), + {"foo": "bar"}, + msg="sections cascade in the order they appear in the manifest", + ) + + def test_parse_common_manifests(self) -> None: + patch_loader(__name__) + manifests = load_all_manifests(None) + self.assertNotEqual(0, len(manifests), msg="parsed some number of manifests") + + def test_mismatch_name(self) -> None: + with self.assertRaisesRegex( + Exception, + "filename of the manifest 'foo' does not match the manifest name 'bar'", + ): + ManifestParser( + "foo", + """ +[manifest] +name = bar +""", + ) + + def test_duplicate_manifest(self) -> None: + patch_loader(__name__, "fixtures/duplicate") + + with self.assertRaisesRegex(Exception, "found duplicate manifest 'foo'"): + load_all_manifests(None) + + if sys.version_info < (3, 2): + + def assertRaisesRegex(self, *args, **kwargs): + return self.assertRaisesRegexp(*args, **kwargs) diff --git a/build/fbcode_builder/getdeps/test/platform_test.py b/build/fbcode_builder/getdeps/test/platform_test.py new file mode 100644 index 000000000..1fcab7a58 --- /dev/null +++ b/build/fbcode_builder/getdeps/test/platform_test.py @@ -0,0 +1,41 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + + +import unittest + +from ..platform import HostType + + +class PlatformTest(unittest.TestCase): + def test_create(self) -> None: + p = HostType() + self.assertNotEqual(p.ostype, None, msg="probed and returned something") + + tuple_string = p.as_tuple_string() + round_trip = HostType.from_tuple_string(tuple_string) + self.assertEqual(round_trip, p) + + def test_rendering_of_none(self) -> None: + p = HostType(ostype="foo") + self.assertEqual(p.as_tuple_string(), "foo-none-none") + + def test_is_methods(self) -> None: + p = HostType(ostype="windows") + self.assertTrue(p.is_windows()) + self.assertFalse(p.is_darwin()) + self.assertFalse(p.is_linux()) + + p = HostType(ostype="darwin") + self.assertFalse(p.is_windows()) + self.assertTrue(p.is_darwin()) + self.assertFalse(p.is_linux()) + + p = HostType(ostype="linux") + self.assertFalse(p.is_windows()) + self.assertFalse(p.is_darwin()) + self.assertTrue(p.is_linux()) diff --git a/build/fbcode_builder/getdeps/test/scratch_test.py b/build/fbcode_builder/getdeps/test/scratch_test.py new file mode 100644 index 000000000..4075e0a3d --- /dev/null +++ b/build/fbcode_builder/getdeps/test/scratch_test.py @@ -0,0 +1,81 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# pyre-unsafe + + +import unittest + +from ..buildopts import find_existing_win32_subst_for_path + + +class Win32SubstTest(unittest.TestCase): + def test_no_existing_subst(self) -> None: + self.assertIsNone( + find_existing_win32_subst_for_path( + r"C:\users\alice\appdata\local\temp\fbcode_builder_getdeps", + subst_mapping={}, + ) + ) + self.assertIsNone( + find_existing_win32_subst_for_path( + r"C:\users\alice\appdata\local\temp\fbcode_builder_getdeps", + subst_mapping={"X:\\": r"C:\users\alice\appdata\local\temp\other"}, + ) + ) + + def test_exact_match_returns_drive_path(self) -> None: + self.assertEqual( + find_existing_win32_subst_for_path( + r"C:\temp\fbcode_builder_getdeps", + subst_mapping={"X:\\": r"C:\temp\fbcode_builder_getdeps"}, + ), + "X:\\", + ) + self.assertEqual( + find_existing_win32_subst_for_path( + r"C:/temp/fbcode_builder_getdeps", + subst_mapping={"X:\\": r"C:/temp/fbcode_builder_getdeps"}, + ), + "X:\\", + ) + + def test_multiple_exact_matches_returns_arbitrary_drive_path(self) -> None: + self.assertIn( + find_existing_win32_subst_for_path( + r"C:\temp\fbcode_builder_getdeps", + subst_mapping={ + "X:\\": r"C:\temp\fbcode_builder_getdeps", + "Y:\\": r"C:\temp\fbcode_builder_getdeps", + "Z:\\": r"C:\temp\fbcode_builder_getdeps", + }, + ), + ("X:\\", "Y:\\", "Z:\\"), + ) + + def test_drive_letter_is_case_insensitive(self) -> None: + self.assertEqual( + find_existing_win32_subst_for_path( + r"C:\temp\fbcode_builder_getdeps", + subst_mapping={"X:\\": r"c:\temp\fbcode_builder_getdeps"}, + ), + "X:\\", + ) + + def test_path_components_are_case_insensitive(self) -> None: + self.assertEqual( + find_existing_win32_subst_for_path( + r"C:\TEMP\FBCODE_builder_getdeps", + subst_mapping={"X:\\": r"C:\temp\fbcode_builder_getdeps"}, + ), + "X:\\", + ) + self.assertEqual( + find_existing_win32_subst_for_path( + r"C:\temp\fbcode_builder_getdeps", + subst_mapping={"X:\\": r"C:\TEMP\FBCODE_builder_getdeps"}, + ), + "X:\\", + ) diff --git a/build/fbcode_builder/manifests/CLI11 b/build/fbcode_builder/manifests/CLI11 new file mode 100644 index 000000000..14cb2332a --- /dev/null +++ b/build/fbcode_builder/manifests/CLI11 @@ -0,0 +1,14 @@ +[manifest] +name = CLI11 + +[download] +url = https://github.com/CLIUtils/CLI11/archive/v2.0.0.tar.gz +sha256 = 2c672f17bf56e8e6223a3bfb74055a946fa7b1ff376510371902adb9cb0ab6a3 + +[build] +builder = cmake +subdir = CLI11-2.0.0 + +[cmake.defines] +CLI11_BUILD_TESTS = OFF +CLI11_BUILD_EXAMPLES = OFF diff --git a/build/fbcode_builder/manifests/autoconf b/build/fbcode_builder/manifests/autoconf new file mode 100644 index 000000000..60cff9c50 --- /dev/null +++ b/build/fbcode_builder/manifests/autoconf @@ -0,0 +1,22 @@ +[manifest] +name = autoconf + +[debs] +autoconf + +[homebrew] +autoconf + +[rpms] +autoconf + +[pps] +autoconf + +[download] +url = http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz +sha256 = 954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969 + +[build] +builder = autoconf +subdir = autoconf-2.69 diff --git a/build/fbcode_builder/manifests/automake b/build/fbcode_builder/manifests/automake new file mode 100644 index 000000000..b098ac2df --- /dev/null +++ b/build/fbcode_builder/manifests/automake @@ -0,0 +1,25 @@ +[manifest] +name = automake + +[homebrew] +automake + +[debs] +automake + +[rpms] +automake + +[pps] +automake + +[download] +url = http://ftp.gnu.org/gnu/automake/automake-1.16.1.tar.gz +sha256 = 608a97523f97db32f1f5d5615c98ca69326ced2054c9f82e65bade7fc4c9dea8 + +[build] +builder = autoconf +subdir = automake-1.16.1 + +[dependencies] +autoconf diff --git a/build/fbcode_builder/manifests/benchmark b/build/fbcode_builder/manifests/benchmark new file mode 100644 index 000000000..25d621184 --- /dev/null +++ b/build/fbcode_builder/manifests/benchmark @@ -0,0 +1,13 @@ +[manifest] +name = benchmark + +[download] +url = https://github.com/google/benchmark/archive/refs/tags/v1.8.0.tar.gz +sha256 = ea2e94c24ddf6594d15c711c06ccd4486434d9cf3eca954e2af8a20c88f9f172 + +[build] +builder = cmake +subdir = benchmark-1.8.0/ + +[cmake.defines] +BENCHMARK_ENABLE_TESTING=OFF diff --git a/build/fbcode_builder/manifests/blake3 b/build/fbcode_builder/manifests/blake3 new file mode 100644 index 000000000..12ee6518f --- /dev/null +++ b/build/fbcode_builder/manifests/blake3 @@ -0,0 +1,10 @@ +[manifest] +name = blake3 + +[download] +url = https://github.com/BLAKE3-team/BLAKE3/archive/refs/tags/1.5.1.tar.gz +sha256 = 822cd37f70152e5985433d2c50c8f6b2ec83aaf11aa31be9fe71486a91744f37 + +[build] +builder = cmake +subdir = BLAKE3-1.5.1/c diff --git a/build/fbcode_builder/manifests/boost b/build/fbcode_builder/manifests/boost new file mode 100644 index 000000000..923e52314 --- /dev/null +++ b/build/fbcode_builder/manifests/boost @@ -0,0 +1,116 @@ +[manifest] +name = boost + +[download.not(os=windows)] +url = https://archives.boost.io/release/1.83.0/source/boost_1_83_0.tar.gz +sha256 = c0685b68dd44cc46574cce86c4e17c0f611b15e195be9848dfd0769a0a207628 + +[download.os=windows] +url = https://archives.boost.io/release/1.83.0/source/boost_1_83_0.zip +sha256 = c86bd9d9eef795b4b0d3802279419fde5221922805b073b9bd822edecb1ca28e + +[preinstalled.env] +# Here we list the acceptable versions that cmake needs a hint to find +BOOST_ROOT_1_69_0 +BOOST_ROOT_1_83_0 + +[debs] +libboost-all-dev + +[homebrew] +boost +# Boost cmake detection on homebrew adds this as requirement: https://github.com/Homebrew/homebrew-core/issues/67427#issuecomment-754187345 +icu4c + +[pps] +boost + +[rpms.all(distro=centos_stream,distro_vers=8)] +boost169 +boost169-math +boost169-test +boost169-fiber +boost169-graph +boost169-log +boost169-openmpi +boost169-timer +boost169-chrono +boost169-locale +boost169-thread +boost169-atomic +boost169-random +boost169-static +boost169-contract +boost169-date-time +boost169-iostreams +boost169-container +boost169-coroutine +boost169-filesystem +boost169-system +boost169-stacktrace +boost169-regex +boost169-devel +boost169-context +boost169-python3-devel +boost169-type_erasure +boost169-wave +boost169-python3 +boost169-serialization +boost169-program-options + +[rpms.distro=fedora] +boost-devel +boost-static + +[build] +builder = boost +job_weight_mib = 512 +patchfile = boost_comparator_operator_fix.patch + +[b2.args] +--with-atomic +--with-chrono +--with-container +--with-context +--with-contract +--with-coroutine +--with-date_time +--with-exception +--with-fiber +--with-filesystem +--with-graph +--with-graph_parallel +--with-iostreams +--with-locale +--with-log +--with-math +--with-mpi +--with-program_options +--with-python +--with-random +--with-regex +--with-serialization +--with-stacktrace +--with-system +--with-test +--with-thread +--with-timer +--with-type_erasure + +[bootstrap.args.os=darwin] +# Not really gcc, but CI puts a broken clang in the PATH, and saying gcc +# here selects the correct one from Xcode. +--with-toolset=gcc + +[b2.args.os=linux] +# RHEL hardened gcc is not compatible with PCH +# https://bugzilla.redhat.com/show_bug.cgi?id=1806545 +pch=off + +[b2.args.os=darwin] +toolset=clang +# Since Xcode 15.3 std::piecewise_construct is only visible in C++17 and later modes +cxxflags="-DBOOST_UNORDERED_HAVE_PIECEWISE_CONSTRUCT=0" + +[b2.args.all(os=windows,fb=on)] +toolset=msvc-14.2 diff --git a/build/fbcode_builder/manifests/bz2 b/build/fbcode_builder/manifests/bz2 new file mode 100644 index 000000000..af2f357d5 --- /dev/null +++ b/build/fbcode_builder/manifests/bz2 @@ -0,0 +1,30 @@ +[manifest] +name = bz2 + +[debs] +libbz2-dev + +[homebrew] +bzip2 + +[rpms] +bzip2-devel + +[download] +url = https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz +sha256 = ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269 + +[build.not(os=windows)] +builder = make +subdir = bzip2-1.0.8 + +[make.build_args.os=linux] +# python bz2 support on linux needs dynamic library +-f +Makefile-libbz2_so + +[make.install_args] +install + +[build.os=windows] +builder = nop diff --git a/build/fbcode_builder/manifests/cabal b/build/fbcode_builder/manifests/cabal new file mode 100644 index 000000000..1405b8bc8 --- /dev/null +++ b/build/fbcode_builder/manifests/cabal @@ -0,0 +1,12 @@ +[manifest] +name = cabal + +[download.os=linux] +url = https://downloads.haskell.org/~cabal/cabal-install-3.6.2.0/cabal-install-3.6.2.0-x86_64-linux-deb10.tar.xz +sha256 = 4759b56e9257e02f29fa374a6b25d6cb2f9d80c7e3a55d4f678a8e570925641c + +[build] +builder = nop + +[install.files] +cabal = bin/cabal diff --git a/build/fbcode_builder/manifests/clang b/build/fbcode_builder/manifests/clang new file mode 100644 index 000000000..a2133e018 --- /dev/null +++ b/build/fbcode_builder/manifests/clang @@ -0,0 +1,5 @@ +[manifest] +name = clang + +[rpms] +clang15-devel diff --git a/build/fbcode_builder/manifests/cmake b/build/fbcode_builder/manifests/cmake new file mode 100644 index 000000000..06d1c9b0a --- /dev/null +++ b/build/fbcode_builder/manifests/cmake @@ -0,0 +1,49 @@ +[manifest] +name = cmake + +[homebrew] +cmake + +# 18.04 cmake is too old +[debs.not(all(distro=ubuntu,distro_vers="18.04"))] +cmake + +[rpms] +cmake + +[pps] +cmake + +[dependencies] +ninja + +[download.os=windows] +url = https://github.com/Kitware/CMake/releases/download/v3.20.2/cmake-3.20.2-windows-x86_64.zip +sha256 = 15a49e2ab81c1822d75b1b1a92f7863f58e31f6d6aac1c4103eef2b071be3112 + +[download.os=darwin] +url = https://github.com/Kitware/CMake/releases/download/v3.20.2/cmake-3.20.2-macos-universal.tar.gz +sha256 = 0100663380a3bd977b001183cd487412db7aad9de6859927bde97e1e6e44e645 + +[download.any(os=linux,os=freebsd)] +url = https://github.com/Kitware/CMake/releases/download/v3.20.2/cmake-3.20.2.tar.gz +sha256 = aecf6ecb975179eb3bb6a4a50cae192d41e92b9372b02300f9e8f1d5f559544e + +[build.os=windows] +builder = nop +subdir = cmake-3.20.2-windows-x86_64 + +[build.os=darwin] +builder = nop +subdir = cmake-3.20.2-macos-universal + +[install.files.os=darwin] +CMake.app/Contents/bin = bin +CMake.app/Contents/share = share + +[build.any(os=linux,os=freebsd)] +builder = cmakebootstrap +subdir = cmake-3.20.2 + +[make.install_args.any(os=linux,os=freebsd)] +install diff --git a/build/fbcode_builder/manifests/cpptoml b/build/fbcode_builder/manifests/cpptoml new file mode 100644 index 000000000..c4d6d8d9c --- /dev/null +++ b/build/fbcode_builder/manifests/cpptoml @@ -0,0 +1,16 @@ +[manifest] +name = cpptoml + +[homebrew] +cpptoml + +[download] +url = https://github.com/chadaustin/cpptoml/archive/refs/tags/v0.1.2.tar.gz +sha256 = beda37e94f9746874436c8090c045fd80ae6f8a51f7c668c932a2b110a4fc277 + +[build] +builder = cmake +subdir = cpptoml-0.1.2 + +[cmake.defines.os=freebsd] +ENABLE_LIBCXX=NO diff --git a/build/fbcode_builder/manifests/double-conversion b/build/fbcode_builder/manifests/double-conversion new file mode 100644 index 000000000..720d9a2ec --- /dev/null +++ b/build/fbcode_builder/manifests/double-conversion @@ -0,0 +1,23 @@ +[manifest] +name = double-conversion + +[download] +url = https://github.com/google/double-conversion/archive/v3.1.4.tar.gz +sha256 = 95004b65e43fefc6100f337a25da27bb99b9ef8d4071a36a33b5e83eb1f82021 + +[homebrew] +double-conversion + +[debs] +libdouble-conversion-dev + +[rpms] +double-conversion +double-conversion-devel + +[pps] +double-conversion + +[build] +builder = cmake +subdir = double-conversion-3.1.4 diff --git a/build/fbcode_builder/manifests/eden b/build/fbcode_builder/manifests/eden new file mode 100644 index 000000000..168f460e9 --- /dev/null +++ b/build/fbcode_builder/manifests/eden @@ -0,0 +1,113 @@ +[manifest] +name = eden +fbsource_path = fbcode/eden +shipit_project = eden +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/sapling.git + +[github.actions] +run_tests = off + +[sandcastle] +run_tests = off + +[build] +builder = cmake + +[dependencies] +blake3 +googletest +folly +fbthrift +fb303 +cpptoml +rocksdb +re2 +libgit2 +pexpect +python-toml +python-filelock +edencommon +rust-shed + +[dependencies.fbsource=on] +rust + +# macOS ships with sqlite3, and some of the core system +# frameworks require that that version be linked rather +# than the one we might build for ourselves here, so we +# skip building it on macos. +[dependencies.not(os=darwin)] +sqlite3 + +[dependencies.os=darwin] +osxfuse + +[dependencies.not(os=windows)] +# TODO: teach getdeps to compile curl on Windows. +# Enabling curl on Windows requires us to find a way to compile libcurl with +# msvc. +libcurl +# Added so that OSS doesn't see system "python" which is python 2 on darwin and some linux +python +# TODO: teach getdeps to compile lmdb on Windows. +lmdb + +[shipit.pathmap.fb=on] +# for internal builds that use getdeps +fbcode/fb303 = fb303 +fbcode/common/rust/shed = common/rust/shed +fbcode/thrift/lib/rust = thrift/lib/rust + +[shipit.pathmap] +# Map hostcaps for now as eden C++ includes its .h. Rust-shed should install it +fbcode/common/rust/shed/hostcaps = common/rust/shed/hostcaps +fbcode/configerator/structs/scm/hg = configerator/structs/scm/hg +fbcode/eden/oss = . +fbcode/eden = eden +fbcode/tools/lfs = tools/lfs + +[shipit.pathmap.fb=off] +fbcode/eden/fs/public_autocargo = eden/fs +fbcode/eden/scm/public_autocargo = eden/scm +fbcode/common/rust/shed/hostcaps/public_cargo = common/rust/shed/hostcaps +fbcode/configerator/structs/scm/hg/public_autocargo = configerator/structs/scm/hg + +[shipit.strip] +^fbcode/eden/addons/.*$ +^fbcode/eden/fs/eden-config\.h$ +^fbcode/eden/fs/py/eden/config\.py$ +^fbcode/eden/hg-server/.*$ +^fbcode/eden/mononoke/(?!lfs_protocol) +^fbcode/eden/scm/build/.*$ +^fbcode/eden/scm/lib/third-party/rust/.*/Cargo.toml$ +^fbcode/eden/website/.*$ +^fbcode/eden/.*/\.cargo/.*$ +/Cargo\.lock$ +\.pyc$ + +[shipit.strip.fb=off] +^fbcode/common/rust/shed(?!/public_autocargo).*/Cargo\.toml$ +^fbcode/configerator/structs/scm/hg(?!/public_autocargo).*/Cargo\.toml$ +^fbcode/eden/fs(?!/public_autocargo).*/Cargo\.toml$ +^fbcode/eden/scm(?!/public_autocargo|/saplingnative).*/Cargo\.toml$ +^.*/facebook/.*$ +^.*/fb/.*$ + +[cmake.defines.all(fb=on,os=windows)] +ENABLE_GIT=OFF +INSTALL_PYTHON_LIB=ON + +[cmake.defines.all(not(fb=on),os=windows)] +ENABLE_GIT=OFF + +[cmake.defines.fbsource=on] +USE_CARGO_VENDOR=ON + +[cmake.defines.fb=on] +IS_FB_BUILD=ON + +[depends.environment] +EDEN_VERSION_OVERRIDE diff --git a/build/fbcode_builder/manifests/edencommon b/build/fbcode_builder/manifests/edencommon new file mode 100644 index 000000000..e2c1b1167 --- /dev/null +++ b/build/fbcode_builder/manifests/edencommon @@ -0,0 +1,32 @@ +[manifest] +name = edencommon +fbsource_path = fbcode/eden/common +shipit_project = edencommon +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebookexperimental/edencommon.git + +[build] +builder = cmake + +[dependencies] +fbthrift +fb303 +fmt +folly +gflags +glog + +[cmake.defines.test=on] +BUILD_TESTS=ON + +[cmake.defines.test=off] +BUILD_TESTS=OFF + +[shipit.pathmap] +fbcode/eden/common = eden/common +fbcode/eden/common/oss = . + +[shipit.strip] +@README.facebook@ diff --git a/build/fbcode_builder/manifests/exprtk b/build/fbcode_builder/manifests/exprtk new file mode 100644 index 000000000..c0dfc1afb --- /dev/null +++ b/build/fbcode_builder/manifests/exprtk @@ -0,0 +1,15 @@ +[manifest] +name = exprtk + +[download] +url = https://github.com/ArashPartow/exprtk/archive/refs/tags/0.0.1.tar.gz +sha256 = fb72791c88ae3b3426e14fdad630027715682584daf56b973569718c56e33f28 + +[build.not(os=windows)] +builder = nop +subdir = exprtk-0.0.1 + +[install.files] +exprtk.hpp = exprtk.hpp + +[dependencies] diff --git a/build/fbcode_builder/manifests/fatal b/build/fbcode_builder/manifests/fatal new file mode 100644 index 000000000..b516d765f --- /dev/null +++ b/build/fbcode_builder/manifests/fatal @@ -0,0 +1,24 @@ +[manifest] +name = fatal +fbsource_path = fbcode/fatal +shipit_project = fatal + +[git] +repo_url = https://github.com/facebook/fatal.git + +[shipit.pathmap] +fbcode/fatal = fatal +fbcode/fatal/public_tld = . + +[build] +builder = nop +subdir = . + +[install.files] +fatal/portability.h = fatal/portability.h +fatal/preprocessor.h = fatal/preprocessor.h +fatal/container = fatal/container +fatal/functional = fatal/functional +fatal/math = fatal/math +fatal/string = fatal/string +fatal/type = fatal/type diff --git a/build/fbcode_builder/manifests/fb303 b/build/fbcode_builder/manifests/fb303 new file mode 100644 index 000000000..ad398a8c2 --- /dev/null +++ b/build/fbcode_builder/manifests/fb303 @@ -0,0 +1,37 @@ +[manifest] +name = fb303 +fbsource_path = fbcode/fb303 +shipit_project = fb303 +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/fb303.git + +[cargo] +cargo_config_file = source/fb303/thrift/.cargo/config.toml + +[crate.pathmap] +fb303_core = fb303/thrift + +[build] +builder = cmake + +[dependencies] +folly +gflags +glog +fbthrift + +[cmake.defines.test=on] +BUILD_TESTS=ON + +[cmake.defines.test=off] +BUILD_TESTS=OFF + +[shipit.pathmap] +fbcode/fb303/github = . +fbcode/fb303/public_autocargo = fb303 +fbcode/fb303 = fb303 + +[shipit.strip] +^fbcode/fb303/(?!public_autocargo).+/Cargo\.toml$ diff --git a/build/fbcode_builder/manifests/fboss b/build/fbcode_builder/manifests/fboss new file mode 100644 index 000000000..8bcd1cb11 --- /dev/null +++ b/build/fbcode_builder/manifests/fboss @@ -0,0 +1,48 @@ +[manifest] +name = fboss +fbsource_path = fbcode/fboss +shipit_project = fboss +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/fboss.git + +[build.os=linux] +builder = cmake +# fboss files take a lot of RAM to compile. +job_weight_mib = 3072 + +[build.not(os=linux)] +builder = nop + +[dependencies] +folly +fb303 +wangle +fizz +fmt +libsodium +googletest +zstd +fatal +fbthrift +iproute2 +libusb +libcurl +libnl +libsai +re2 +python +yaml-cpp +libyaml +CLI11 +exprtk +nlohmann-json + +[shipit.pathmap] +fbcode/fboss/github = . +fbcode/fboss/common = common +fbcode/fboss = fboss + +[sandcastle] +run_tests = off diff --git a/build/fbcode_builder/manifests/fbthrift b/build/fbcode_builder/manifests/fbthrift new file mode 100644 index 000000000..3d852d8d1 --- /dev/null +++ b/build/fbcode_builder/manifests/fbthrift @@ -0,0 +1,47 @@ +[manifest] +name = fbthrift +fbsource_path = fbcode/thrift +shipit_project = fbthrift +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/fbthrift.git + +[cargo] +cargo_config_file = source/thrift/lib/rust/.cargo/config.toml + +[crate.pathmap] +fbthrift = thrift/lib/rust + +[build] +builder = cmake +job_weight_mib = 2048 + +[dependencies] +fizz +fmt +folly +googletest +libsodium +python-six +wangle +zstd +mvfst +# Thrift also depends on openssl but since the latter requires a platform- +# specific configuration we rely on the folly manifest to provide this +# dependency to avoid duplication. + +[dependencies.os=linux] +# python doesn't build on Windows yet and this causes python3 shebangs to +# expand to a non-portable path on macOS +python + +[shipit.pathmap] +fbcode/thrift/public_tld = . +fbcode/thrift = thrift + +[shipit.strip] +^fbcode/thrift/thrift-config\.h$ +^fbcode/thrift/perf/canary.py$ +^fbcode/thrift/perf/loadtest.py$ +^fbcode/thrift/.castle/.* diff --git a/build/fbcode_builder/manifests/fizz b/build/fbcode_builder/manifests/fizz new file mode 100644 index 000000000..15e14ec60 --- /dev/null +++ b/build/fbcode_builder/manifests/fizz @@ -0,0 +1,37 @@ +[manifest] +name = fizz +fbsource_path = fbcode/fizz +shipit_project = fizz +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebookincubator/fizz.git + +[build] +builder = cmake +subdir = fizz + +[cmake.defines] +BUILD_EXAMPLES = OFF + +[cmake.defines.test=on] +BUILD_TESTS = ON + +[cmake.defines.all(os=windows, test=on)] +BUILD_TESTS = OFF + +[cmake.defines.test=off] +BUILD_TESTS = OFF + +[dependencies] +folly +libsodium +zlib +zstd + +[dependencies.all(test=on, not(os=windows))] +googletest + +[shipit.pathmap] +fbcode/fizz/public_tld = . +fbcode/fizz = fizz diff --git a/build/fbcode_builder/manifests/fmt b/build/fbcode_builder/manifests/fmt new file mode 100644 index 000000000..015e8c3bc --- /dev/null +++ b/build/fbcode_builder/manifests/fmt @@ -0,0 +1,20 @@ +[manifest] +name = fmt + +[download] +url = https://github.com/fmtlib/fmt/archive/refs/tags/9.1.0.tar.gz +sha256 = 5dea48d1fcddc3ec571ce2058e13910a0d4a6bab4cc09a809d8b1dd1c88ae6f2 + +[build] +builder = cmake +subdir = fmt-9.1.0 + +[cmake.defines] +FMT_TEST = OFF +FMT_DOC = OFF + +[homebrew] +fmt + +[rpms.distro=fedora] +fmt-devel diff --git a/build/fbcode_builder/manifests/folly b/build/fbcode_builder/manifests/folly new file mode 100644 index 000000000..5fc5297f9 --- /dev/null +++ b/build/fbcode_builder/manifests/folly @@ -0,0 +1,76 @@ +[manifest] +name = folly +fbsource_path = fbcode/folly +shipit_project = folly +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/folly.git + +[build] +builder = cmake +job_weight_mib = 1024 + +[dependencies] +gflags +glog +googletest +boost +libdwarf +libevent +libsodium +double-conversion +fmt +lz4 +snappy +zstd +# no openssl or zlib in the linux case, why? +# these are usually installed on the system +# and are the easiest system deps to pull in. +# In the future we want to be able to express +# that a system dep is sufficient in the manifest +# for eg: openssl and zlib, but for now we don't +# have it. + +# macOS doesn't expose the openssl api so we need +# to build our own. +[dependencies.os=darwin] +openssl + +# Windows has neither openssl nor zlib, so we get +# to provide both +[dependencies.os=windows] +openssl +zlib + +[dependencies.os=linux] +libiberty +libunwind + +# xz depends on autoconf which does not build on +# Windows +[dependencies.not(os=windows)] +xz + +[shipit.pathmap] +fbcode/folly/public_tld = . +fbcode/folly = folly + +[shipit.strip] +^fbcode/folly/folly-config\.h$ +^fbcode/folly/public_tld/build/facebook_.* + +[cmake.defines] +BUILD_SHARED_LIBS=OFF +BOOST_LINK_STATIC=ON + +[cmake.defines.os=freebsd] +LIBDWARF_FOUND=NO + +[cmake.defines.test=on] +BUILD_TESTS=ON +BUILD_BENCHMARKS=OFF + +[cmake.defines.test=off] +BUILD_TESTS=OFF +BUILD_BENCHMARKS=OFF diff --git a/build/fbcode_builder/manifests/gflags b/build/fbcode_builder/manifests/gflags new file mode 100644 index 000000000..47c01c204 --- /dev/null +++ b/build/fbcode_builder/manifests/gflags @@ -0,0 +1,25 @@ +[manifest] +name = gflags + +[download] +url = https://github.com/gflags/gflags/archive/v2.2.2.tar.gz +sha256 = 34af2f15cf7367513b352bdcd2493ab14ce43692d2dcd9dfc499492966c64dcf + +[build] +builder = cmake +subdir = gflags-2.2.2 + +[cmake.defines] +BUILD_SHARED_LIBS = ON +BUILD_STATIC_LIBS = ON +#BUILD_gflags_nothreads_LIB = OFF +BUILD_gflags_LIB = ON + +[homebrew] +gflags + +[debs] +libgflags-dev + +[rpms.distro=fedora] +gflags-devel diff --git a/build/fbcode_builder/manifests/ghc b/build/fbcode_builder/manifests/ghc new file mode 100644 index 000000000..0e452195c --- /dev/null +++ b/build/fbcode_builder/manifests/ghc @@ -0,0 +1,15 @@ +[manifest] +name = ghc + +[download.os=linux] +url = https://downloads.haskell.org/~ghc/8.10.7/ghc-8.10.7-x86_64-fedora27-linux.tar.xz +sha256 = b6ed67049a23054a8042e65c9976d5e196e5ee4e83b29b2ee35c8a22ab1e5b73 + +[build] +builder = autoconf +subdir = ghc-8.10.7 +build_in_src_dir = true +only_install = true + +[make.install_args] +install diff --git a/build/fbcode_builder/manifests/glean b/build/fbcode_builder/manifests/glean new file mode 100644 index 000000000..7cb422c9b --- /dev/null +++ b/build/fbcode_builder/manifests/glean @@ -0,0 +1,45 @@ +[manifest] +name = glean +fbsource_path = fbcode/glean +shipit_project = facebookincubator/Glean +use_shipit = true + +[shipit.pathmap] +# These are only used by target determinator to trigger builds, the +# real path mappings are in the ShipIt config. +fbcode/glean = glean +fbcode/common/hs = hsthrift + +[subprojects] +hsthrift = hsthrift + +[dependencies] +cabal +ghc +gflags +glog +folly +rocksdb +xxhash +llvm +clang +re2 + +[build] +builder = make + +[make.build_args] +cabal-update +all +glass +glean-clang +EXTRA_GHC_OPTS=-j4 +RTS -A32m -n4m -RTS +CABAL_CONFIG_FLAGS=-f-hack-tests -f-typescript-tests -f-python-tests -f-dotnet-tests -f-go-tests -f-rust-tests -f-java-lsif-tests -f-flow-tests + +[make.install_args] +install + +[make.test_args] +test +EXTRA_GHC_OPTS=-j4 +RTS -A32m -n4m -RTS +CABAL_CONFIG_FLAGS=-f-hack-tests -f-typescript-tests -f-python-tests -f-dotnet-tests -f-go-tests -f-rust-tests -f-java-lsif-tests -f-flow-tests diff --git a/build/fbcode_builder/manifests/glog b/build/fbcode_builder/manifests/glog new file mode 100644 index 000000000..b5d5fa814 --- /dev/null +++ b/build/fbcode_builder/manifests/glog @@ -0,0 +1,32 @@ +[manifest] +name = glog + +[download] +url = https://github.com/google/glog/archive/v0.5.0.tar.gz +sha256 = eede71f28371bf39aa69b45de23b329d37214016e2055269b3b5e7cfd40b59f5 + +[build] +builder = cmake +subdir = glog-0.5.0 + +[dependencies] +gflags + +[cmake.defines] +BUILD_SHARED_LIBS=ON +BUILD_TESTING=NO +WITH_PKGCONFIG=ON + +[cmake.defines.os=freebsd] +HAVE_TR1_UNORDERED_MAP=OFF +HAVE_TR1_UNORDERED_SET=OFF + +[homebrew] +glog + +[debs] +libgoogle-glog-dev + +[rpms.distro=fedora] +glog-devel + diff --git a/build/fbcode_builder/manifests/googletest b/build/fbcode_builder/manifests/googletest new file mode 100644 index 000000000..101175874 --- /dev/null +++ b/build/fbcode_builder/manifests/googletest @@ -0,0 +1,30 @@ +[manifest] +name = googletest + +[download] +url = https://github.com/google/googletest/archive/refs/tags/release-1.12.1.tar.gz +sha256 = 81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2 + +[build] +builder = cmake +subdir = googletest-release-1.12.1 + +[cmake.defines] +# Everything else defaults to the shared runtime, so tell gtest that +# it should not use its choice of the static runtime +gtest_force_shared_crt=ON + +[cmake.defines.os=windows] +BUILD_SHARED_LIBS=ON + +[homebrew] +googletest + +# packaged googletest is too old +[debs.not(all(distro=ubuntu,any(distro_vers="18.04",distro_vers="20.04",distro_vers="22.04")))] +libgtest-dev +libgmock-dev + +[rpms.distro=fedora] +gmock-devel +gtest-devel diff --git a/build/fbcode_builder/manifests/gperf b/build/fbcode_builder/manifests/gperf new file mode 100644 index 000000000..13d7a890f --- /dev/null +++ b/build/fbcode_builder/manifests/gperf @@ -0,0 +1,14 @@ +[manifest] +name = gperf + +[download] +url = http://ftp.gnu.org/pub/gnu/gperf/gperf-3.1.tar.gz +sha256 = 588546b945bba4b70b6a3a616e80b4ab466e3f33024a352fc2198112cdbb3ae2 + +[build.not(os=windows)] +builder = autoconf +subdir = gperf-3.1 + +[build.os=windows] +builder = nop + diff --git a/build/fbcode_builder/manifests/hsthrift b/build/fbcode_builder/manifests/hsthrift new file mode 100644 index 000000000..8a958820c --- /dev/null +++ b/build/fbcode_builder/manifests/hsthrift @@ -0,0 +1,34 @@ +[manifest] +name = hsthrift +fbsource_path = fbcode/common/hs +shipit_project = facebookincubator/hsthrift +use_shipit = true + +[shipit.pathmap] +# These are only used by target determinator to trigger builds, the +# real path mappings are in the ShipIt config. +fbcode/common/hs = . + +[dependencies] +cabal +ghc +gflags +glog +folly +fbthrift +wangle +fizz +boost + +[build] +builder = make + +[make.build_args] +cabal-update +all + +[make.install_args] +install + +[make.test_args] +test diff --git a/build/fbcode_builder/manifests/iproute2 b/build/fbcode_builder/manifests/iproute2 new file mode 100644 index 000000000..f7f3e766a --- /dev/null +++ b/build/fbcode_builder/manifests/iproute2 @@ -0,0 +1,14 @@ +[manifest] +name = iproute2 + +[download] +url = https://mirrors.edge.kernel.org/pub/linux/utils/net/iproute2/iproute2-4.12.0.tar.gz +sha256 = 46612a1e2d01bb31932557bccdb1b8618cae9a439dfffc08ef35ed8e197f14ce + +[build.os=linux] +builder = iproute2 +subdir = iproute2-4.12.0 +patchfile = iproute2_oss.patch + +[build.not(os=linux)] +builder = nop diff --git a/build/fbcode_builder/manifests/katran b/build/fbcode_builder/manifests/katran new file mode 100644 index 000000000..c4f2c74f4 --- /dev/null +++ b/build/fbcode_builder/manifests/katran @@ -0,0 +1,41 @@ +[manifest] +name = katran +fbsource_path = fbcode/katran +shipit_project = katran +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebookincubator/katran.git + +[build.not(os=linux)] +builder = nop + +[build.os=linux] +builder = cmake +subdir = . + +[cmake.defines.test=on] +BUILD_TESTS=ON + +[cmake.defines.test=off] +BUILD_TESTS=OFF + +[dependencies] +folly +fizz +libbpf +libmnl +zlib +googletest +fmt + +[debs] +libssl-dev + +[shipit.pathmap] +fbcode/katran/public_root = . +fbcode/katran = katran + +[shipit.strip] +^fbcode/katran/facebook +^fbcode/katran/OSS_SYNC diff --git a/build/fbcode_builder/manifests/libbpf b/build/fbcode_builder/manifests/libbpf new file mode 100644 index 000000000..9ab3a26be --- /dev/null +++ b/build/fbcode_builder/manifests/libbpf @@ -0,0 +1,26 @@ +[manifest] +name = libbpf + +[download] +url = https://github.com/libbpf/libbpf/archive/refs/tags/v0.7.0.tar.gz +sha256 = 5083588ce5a3a620e395ee1e596af77b4ec5771ffc71cff2af49dfee38c06361 + +# BPF only builds on linux, so make it a NOP on other platforms +[build.not(os=linux)] +builder = nop + +[build.os=linux] +builder = make +subdir = libbpf-0.7.0/src + +[make.build_args] +BUILD_STATIC_ONLY=y + +# libbpf-0.3 requires uapi headers >= 5.8 +[make.install_args] +install +install_uapi_headers +BUILD_STATIC_ONLY=y + +[dependencies] +libelf diff --git a/build/fbcode_builder/manifests/libcurl b/build/fbcode_builder/manifests/libcurl new file mode 100644 index 000000000..8c94e4679 --- /dev/null +++ b/build/fbcode_builder/manifests/libcurl @@ -0,0 +1,42 @@ +[manifest] +name = libcurl + +[rpms] +libcurl-devel +libcurl + +[debs] +libcurl4-openssl-dev + +[pps] +libcurl-gnutls + +[download] +url = https://curl.haxx.se/download/curl-7.65.1.tar.gz +sha256 = 821aeb78421375f70e55381c9ad2474bf279fc454b791b7e95fc83562951c690 + +[dependencies] +nghttp2 + +# We use system OpenSSL on Linux (see folly's manifest for details) +[dependencies.not(os=linux)] +openssl + +[build.not(os=windows)] +builder = autoconf +subdir = curl-7.65.1 + +[autoconf.args] +# fboss (which added the libcurl dep) doesn't need ldap so it is disabled here. +# if someone in the future wants to add ldap for something else, it won't hurt +# fboss. However, that would require adding an ldap manifest. +# +# For the same reason, we disable libssh2 and libidn2 which aren't really used +# but would require adding manifests if we don't disable them. +--disable-ldap +--without-libssh2 +--without-libidn2 + +[build.os=windows] +builder = cmake +subdir = curl-7.65.1 diff --git a/build/fbcode_builder/manifests/libdwarf b/build/fbcode_builder/manifests/libdwarf new file mode 100644 index 000000000..e93ba16bc --- /dev/null +++ b/build/fbcode_builder/manifests/libdwarf @@ -0,0 +1,20 @@ +[manifest] +name = libdwarf + +[rpms] +libdwarf-devel +libdwarf + +[debs] +libdwarf-dev + +[homebrew] +dwarfutils + +[download] +url = https://www.prevanders.net/libdwarf-0.9.2.tar.xz +sha256 = 22b66d06831a76f6a062126cdcad3fcc58540b89a1acb23c99f8861f50999ec3 + +[build] +builder = cmake +subdir = libdwarf-0.9.2 diff --git a/build/fbcode_builder/manifests/libelf b/build/fbcode_builder/manifests/libelf new file mode 100644 index 000000000..194d340fc --- /dev/null +++ b/build/fbcode_builder/manifests/libelf @@ -0,0 +1,23 @@ +[manifest] +name = libelf + +[rpms] +elfutils-libelf-devel-static + +[debs] +libelf-dev + +[pps] +libelf + +[download] +url = https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz +sha256 = 591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d + +# libelf only makes sense on linux, so make it a NOP on other platforms +[build.not(os=linux)] +builder = nop + +[build.os=linux] +builder = autoconf +subdir = libelf-0.8.13 diff --git a/build/fbcode_builder/manifests/libevent b/build/fbcode_builder/manifests/libevent new file mode 100644 index 000000000..91a2af90c --- /dev/null +++ b/build/fbcode_builder/manifests/libevent @@ -0,0 +1,41 @@ +[manifest] +name = libevent + +[debs] +libevent-dev + +[homebrew] +libevent + +[rpms] +libevent-devel + +[pps] +libevent + +# Note that the CMakeLists.txt file is present only in +# git repo and not in the release tarball, so take care +# to use the github generated source tarball rather than +# the explicitly uploaded source tarball +[download] +url = https://github.com/libevent/libevent/releases/download/release-2.1.12-stable/libevent-2.1.12-stable.tar.gz +sha256 = 92e6de1be9ec176428fd2367677e61ceffc2ee1cb119035037a27d346b0403bb + +[build] +builder = cmake +subdir = libevent-2.1.12-stable + +[cmake.defines] +EVENT__DISABLE_TESTS = ON +EVENT__DISABLE_BENCHMARK = ON +EVENT__DISABLE_SAMPLES = ON +EVENT__DISABLE_REGRESS = ON + +[cmake.defines.shared_libs=on] +EVENT__BUILD_SHARED_LIBRARIES = ON + +[cmake.defines.os=windows] +EVENT__LIBRARY_TYPE = STATIC + +[dependencies.not(any(os=linux, os=freebsd))] +openssl diff --git a/build/fbcode_builder/manifests/libffi b/build/fbcode_builder/manifests/libffi new file mode 100644 index 000000000..b520358fd --- /dev/null +++ b/build/fbcode_builder/manifests/libffi @@ -0,0 +1,23 @@ +[manifest] +name = libffi + +[debs] +libffi-dev + +[homebrew] +libffi + +[rpms] +libffi-devel +libffi + +[pps] +libffi + +[download] +url = https://github.com/libffi/libffi/releases/download/v3.4.2/libffi-3.4.2.tar.gz +sha256 = 540fb721619a6aba3bdeef7d940d8e9e0e6d2c193595bc243241b77ff9e93620 + +[build] +builder = autoconf +subdir = libffi-3.4.2 diff --git a/build/fbcode_builder/manifests/libgit2 b/build/fbcode_builder/manifests/libgit2 new file mode 100644 index 000000000..42bbfca92 --- /dev/null +++ b/build/fbcode_builder/manifests/libgit2 @@ -0,0 +1,33 @@ +[manifest] +name = libgit2 + +[homebrew] +libgit2 + +[rpms] +libgit2-devel + +[pps] +libgit2 + +# Ubuntu 18.04 libgit2 has clash with libcurl4-openssl-dev as it depends on +# libcurl4-gnutls-dev. Should be ok from 20.04 again +# There is a description at https://github.com/r-hub/sysreqsdb/issues/77 +[debs.not(all(distro=ubuntu,distro_vers="18.04"))] +libgit2-dev + +[download] +url = https://github.com/libgit2/libgit2/archive/v0.28.1.tar.gz +sha256 = 0ca11048795b0d6338f2e57717370208c2c97ad66c6d5eac0c97a8827d13936b + +[build] +builder = cmake +subdir = libgit2-0.28.1 + +[cmake.defines] +# Could turn this on if we also wanted to add a manifest for libssh2 +USE_SSH = OFF +BUILD_CLAR = OFF +# Have to build shared to work around annoying problems with cmake +# mis-parsing the frameworks required to link this on macos :-/ +BUILD_SHARED_LIBS = ON diff --git a/build/fbcode_builder/manifests/libiberty b/build/fbcode_builder/manifests/libiberty new file mode 100644 index 000000000..da022dfca --- /dev/null +++ b/build/fbcode_builder/manifests/libiberty @@ -0,0 +1,27 @@ +[manifest] +name = libiberty + +[rpms] +binutils-devel +binutils + +[debs] +binutils-dev + +[download] +url = https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz +sha256 = f6e4d41fd5fc778b06b7891457b3620da5ecea1006c6a4a41ae998109f85a800 + +[dependencies] +zlib + +[build] +builder = autoconf +subdir = binutils-2.42/libiberty + +# only build the parts needed for demangling +# as we still want to use system linker and assembler etc +[autoconf.args] +--disable-shared +--disable-testsuite +--enable-install-libiberty diff --git a/build/fbcode_builder/manifests/libmnl b/build/fbcode_builder/manifests/libmnl new file mode 100644 index 000000000..2b39d6cb3 --- /dev/null +++ b/build/fbcode_builder/manifests/libmnl @@ -0,0 +1,24 @@ +[manifest] +name = libmnl + +[rpms] +libmnl-devel + +# all centos 8 distros are missing this, +# but its in fedora so may be back in a later version +[rpms.not(all(any(distro=centos_stream,distro=centos),distro_vers=8))] +libmnl-static + +[debs] +libmnl-dev + +[pps] +libmnl + +[download] +url = http://www.netfilter.org/pub/libmnl/libmnl-1.0.4.tar.bz2 +sha256 = 171f89699f286a5854b72b91d06e8f8e3683064c5901fb09d954a9ab6f551f81 + +[build.os=linux] +builder = autoconf +subdir = libmnl-1.0.4 diff --git a/build/fbcode_builder/manifests/libnl b/build/fbcode_builder/manifests/libnl new file mode 100644 index 000000000..f71e10a58 --- /dev/null +++ b/build/fbcode_builder/manifests/libnl @@ -0,0 +1,21 @@ +[manifest] +name = libnl + +[rpms] +libnl3-devel +libnl3 + +[debs] +libnl-3-dev +libnl-route-3-dev + +[pps] +libnl + +[download] +url = https://www.infradead.org/~tgr/libnl/files/libnl-3.2.25.tar.gz +sha256 = 8beb7590674957b931de6b7f81c530b85dc7c1ad8fbda015398bc1e8d1ce8ec5 + +[build.os=linux] +builder = autoconf +subdir = libnl-3.2.25 diff --git a/build/fbcode_builder/manifests/libsai b/build/fbcode_builder/manifests/libsai new file mode 100644 index 000000000..a28c43415 --- /dev/null +++ b/build/fbcode_builder/manifests/libsai @@ -0,0 +1,14 @@ +[manifest] +name = libsai + +[download] +url = https://github.com/opencomputeproject/SAI/archive/v1.14.0.tar.gz +sha256 = 4e3a1d010bda0c589db46e077725a2cd9624a5cc255c89d1caa79deb408d1fa7 + +[build] +builder = nop +subdir = SAI-1.14.0 + +[install.files] +inc = include +experimental = experimental diff --git a/build/fbcode_builder/manifests/libsodium b/build/fbcode_builder/manifests/libsodium new file mode 100644 index 000000000..2cdeb8c78 --- /dev/null +++ b/build/fbcode_builder/manifests/libsodium @@ -0,0 +1,39 @@ +[manifest] +name = libsodium + +[debs] +libsodium-dev + +[homebrew] +libsodium + +[rpms] +libsodium-devel +libsodium-static + +[pps] +libsodium + +[download.not(os=windows)] +url = https://github.com/jedisct1/libsodium/releases/download/1.0.20-RELEASE/libsodium-1.0.20.tar.gz +sha256 = ebb65ef6ca439333c2bb41a0c1990587288da07f6c7fd07cb3a18cc18d30ce19 + +[build.not(os=windows)] +builder = autoconf +subdir = libsodium-1.0.20 + +[download.os=windows] +url = https://github.com/jedisct1/libsodium/releases/download/1.0.20-RELEASE/libsodium-1.0.20-msvc.zip +sha256 = 2ff97f9e3f5b341bdc808e698057bea1ae454f99e29ff6f9b62e14d0eb1b1baa + +[build.os=windows] +builder = nop + +[install.files.os=windows] +libsodium/x64/Release/v143/dynamic/libsodium.dll = bin/libsodium.dll +libsodium/x64/Release/v143/dynamic/libsodium.lib = lib/libsodium.lib +libsodium/x64/Release/v143/dynamic/libsodium.exp = lib/libsodium.exp +libsodium/x64/Release/v143/dynamic/libsodium.pdb = lib/libsodium.pdb +libsodium/include = include + +[autoconf.args] diff --git a/build/fbcode_builder/manifests/libtool b/build/fbcode_builder/manifests/libtool new file mode 100644 index 000000000..72eb2175a --- /dev/null +++ b/build/fbcode_builder/manifests/libtool @@ -0,0 +1,28 @@ +[manifest] +name = libtool + +[homebrew] +libtool + +[rpms] +libtool + +[debs] +libtool + +[pps] +libtool + +[download] +url = http://ftp.gnu.org/gnu/libtool/libtool-2.4.6.tar.gz +sha256 = e3bd4d5d3d025a36c21dd6af7ea818a2afcd4dfc1ea5a17b39d7854bcd0c06e3 + +[build] +builder = autoconf +subdir = libtool-2.4.6 + +[dependencies] +automake + +[autoconf.args] +--enable-ltdl-install diff --git a/build/fbcode_builder/manifests/libunwind b/build/fbcode_builder/manifests/libunwind new file mode 100644 index 000000000..0a4f03bc8 --- /dev/null +++ b/build/fbcode_builder/manifests/libunwind @@ -0,0 +1,17 @@ +[manifest] +name = libunwind + +[rpms] +libunwind-devel +libunwind + +[debs] +libunwind-dev + +[download] +url = https://github.com/libunwind/libunwind/releases/download/v1.8.1/libunwind-1.8.1.tar.gz +sha256 = ddf0e32dd5fafe5283198d37e4bf9decf7ba1770b6e7e006c33e6df79e6a6157 + +[build] +builder = autoconf +subdir = libunwind-1.8.1 diff --git a/build/fbcode_builder/manifests/libusb b/build/fbcode_builder/manifests/libusb new file mode 100644 index 000000000..ccbec8053 --- /dev/null +++ b/build/fbcode_builder/manifests/libusb @@ -0,0 +1,29 @@ +[manifest] +name = libusb + +[debs] +libusb-1.0-0-dev + +[homebrew] +libusb + +[rpms] +libusb-devel +libusb + +[pps] +libusb + +[download] +url = https://github.com/libusb/libusb/releases/download/v1.0.22/libusb-1.0.22.tar.bz2 +sha256 = 75aeb9d59a4fdb800d329a545c2e6799f732362193b465ea198f2aa275518157 + +[build.os=linux] +builder = autoconf +subdir = libusb-1.0.22 + +[autoconf.args] +# fboss (which added the libusb dep) doesn't need udev so it is disabled here. +# if someone in the future wants to add udev for something else, it won't hurt +# fboss. +--disable-udev diff --git a/build/fbcode_builder/manifests/libyaml b/build/fbcode_builder/manifests/libyaml new file mode 100644 index 000000000..a7ff57316 --- /dev/null +++ b/build/fbcode_builder/manifests/libyaml @@ -0,0 +1,13 @@ +[manifest] +name = libyaml + +[download] +url = http://pyyaml.org/download/libyaml/yaml-0.1.7.tar.gz +sha256 = 8088e457264a98ba451a90b8661fcb4f9d6f478f7265d48322a196cec2480729 + +[build.os=linux] +builder = autoconf +subdir = yaml-0.1.7 + +[build.not(os=linux)] +builder = nop diff --git a/build/fbcode_builder/manifests/llvm b/build/fbcode_builder/manifests/llvm new file mode 100644 index 000000000..7b069221e --- /dev/null +++ b/build/fbcode_builder/manifests/llvm @@ -0,0 +1,5 @@ +[manifest] +name = llvm + +[rpms] +llvm15-devel diff --git a/build/fbcode_builder/manifests/lmdb b/build/fbcode_builder/manifests/lmdb new file mode 100644 index 000000000..42ca0ab07 --- /dev/null +++ b/build/fbcode_builder/manifests/lmdb @@ -0,0 +1,17 @@ +[manifest] +name = lmdb + +[build] +builder = make +subdir = lmdb-LMDB_0.9.31/libraries/liblmdb + +[download] +url = https://github.com/LMDB/lmdb/archive/refs/tags/LMDB_0.9.31.tar.gz +sha256 = dd70a8c67807b3b8532b3e987b0a4e998962ecc28643e1af5ec77696b081c9b0 + +[make.build_args] +BUILD_STATIC_ONLY=y + +[make.install_args] +install +BUILD_STATIC_ONLY=y diff --git a/build/fbcode_builder/manifests/lz4 b/build/fbcode_builder/manifests/lz4 new file mode 100644 index 000000000..68a2c3061 --- /dev/null +++ b/build/fbcode_builder/manifests/lz4 @@ -0,0 +1,25 @@ +[manifest] +name = lz4 + +[homebrew] +lz4 + +[rpms] +lz4-devel +# centos 8 and centos_stream 9 are missing this rpm +[rpms.not(any(all(distro=centos,distro_vers=8),all(distro=centos_stream,distro_vers=9)))] +lz4-static + +[debs] +liblz4-dev + +[pps] +lz4 + +[download] +url = https://github.com/lz4/lz4/archive/v1.8.3.tar.gz +sha256 = 33af5936ac06536805f9745e0b6d61da606a1f8b4cc5c04dd3cbaca3b9b4fc43 + +[build] +builder = cmake +subdir = lz4-1.8.3/contrib/cmake_unofficial diff --git a/build/fbcode_builder/manifests/mcrouter b/build/fbcode_builder/manifests/mcrouter new file mode 100644 index 000000000..849e8f75d --- /dev/null +++ b/build/fbcode_builder/manifests/mcrouter @@ -0,0 +1,23 @@ +[manifest] +name = mcrouter + +[git] +repo_url = https://github.com/facebook/mcrouter.git + +[dependencies] +folly +wangle +fizz +fbthrift +mvfst +ragel + +[build] +builder = cmake +subdir = . + +[cmake.defines.test=on] +BUILD_TESTS=ON + +[cmake.defines.test=off] +BUILD_TESTS=OFF diff --git a/build/fbcode_builder/manifests/mononoke b/build/fbcode_builder/manifests/mononoke new file mode 100644 index 000000000..d9c88454d --- /dev/null +++ b/build/fbcode_builder/manifests/mononoke @@ -0,0 +1,51 @@ +[manifest] +name = mononoke +fbsource_path = fbcode/eden +shipit_project = eden +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/sapling.git + +[build.not(os=windows)] +builder = cargo + +[build.os=windows] +# building Mononoke on windows is not supported +builder = nop + +[cargo] +build_doc = true +workspace_dir = eden/mononoke + +[shipit.pathmap] +fbcode/configerator/structs/scm/hg = configerator/structs/scm/hg +fbcode/configerator/structs/scm/hg/public_autocargo = configerator/structs/scm/hg +fbcode/configerator/structs/scm/mononoke/public_autocargo = configerator/structs/scm/mononoke +fbcode/configerator/structs/scm/mononoke = configerator/structs/scm/mononoke +fbcode/eden/oss = . +fbcode/eden = eden +fbcode/eden/fs/public_autocargo = eden/fs +fbcode/eden/mononoke/public_autocargo = eden/mononoke +fbcode/eden/scm/public_autocargo = eden/scm +fbcode/tools/lfs = tools/lfs +tools/rust/ossconfigs = . + +[shipit.strip] +^fbcode/configerator/structs/scm/hg(?!/public_autocargo).*/Cargo\.toml$ +^fbcode/configerator/structs/scm/mononoke(?!/public_autocargo).*/Cargo\.toml$ +^fbcode/eden/fs(?!/public_autocargo).*/Cargo\.toml$ +^fbcode/eden/scm/lib/third-party/rust/.*/Cargo\.toml$ +^fbcode/eden/mononoke(?!/public_autocargo).*/Cargo\.toml$ +# strip other scm code unrelated to mononoke to prevent triggering unnecessary checks +^fbcode/eden(?!/mononoke|/scm/(lib|public_autocargo))/.*$ +^.*/facebook/.*$ +^.*/fb/.*$ + +[dependencies] +fb303 +fbthrift +rust-shed + +[dependencies.fb=on] +rust diff --git a/build/fbcode_builder/manifests/mvfst b/build/fbcode_builder/manifests/mvfst new file mode 100644 index 000000000..c2a797be2 --- /dev/null +++ b/build/fbcode_builder/manifests/mvfst @@ -0,0 +1,32 @@ +[manifest] +name = mvfst +fbsource_path = fbcode/quic +shipit_project = mvfst +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/mvfst.git + +[build] +builder = cmake +subdir = . + +[cmake.defines.test=on] +BUILD_TESTS = ON + +[cmake.defines.all(os=windows, test=on)] +BUILD_TESTS = OFF + +[cmake.defines.test=off] +BUILD_TESTS = OFF + +[dependencies] +folly +fizz + +[dependencies.all(test=on, not(os=windows))] +googletest + +[shipit.pathmap] +fbcode/quic/public_root = . +fbcode/quic = quic diff --git a/build/fbcode_builder/manifests/ncurses b/build/fbcode_builder/manifests/ncurses new file mode 100644 index 000000000..1bb5e8a45 --- /dev/null +++ b/build/fbcode_builder/manifests/ncurses @@ -0,0 +1,30 @@ +[manifest] +name = ncurses + +[debs] +libncurses-dev + +[homebrew] +ncurses + +[rpms] +ncurses-devel + +[download] +url = https://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.3.tar.gz +sha256 = 97fc51ac2b085d4cde31ef4d2c3122c21abc217e9090a43a30fc5ec21684e059 + +[build.not(os=windows)] +builder = autoconf +subdir = ncurses-6.3 + +[autoconf.args] +--without-cxx-binding +--without-ada + +[autoconf.args.os=linux] +--enable-shared +--with-shared + +[build.os=windows] +builder = nop diff --git a/build/fbcode_builder/manifests/nghttp2 b/build/fbcode_builder/manifests/nghttp2 new file mode 100644 index 000000000..f2b3f6b31 --- /dev/null +++ b/build/fbcode_builder/manifests/nghttp2 @@ -0,0 +1,24 @@ +[manifest] +name = nghttp2 + +[rpms] +libnghttp2-devel +libnghttp2 + +[debs] +libnghttp2-dev + +[pps] +libnghttp2 + +[download] +url = https://github.com/nghttp2/nghttp2/releases/download/v1.47.0/nghttp2-1.47.0.tar.gz +sha256 = 62f50f0e9fc479e48b34e1526df8dd2e94136de4c426b7680048181606832b7c + +[build] +builder = autoconf +subdir = nghttp2-1.47.0 + +[autoconf.args] +--enable-lib-only +--disable-dependency-tracking diff --git a/build/fbcode_builder/manifests/ninja b/build/fbcode_builder/manifests/ninja new file mode 100644 index 000000000..e50827986 --- /dev/null +++ b/build/fbcode_builder/manifests/ninja @@ -0,0 +1,32 @@ +[manifest] +name = ninja + +[debs] +ninja-build + +[homebrew] +ninja + +[rpms] +ninja-build + +[pps] +ninja + +[download.os=windows] +url = https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-win.zip +sha256 = bbde850d247d2737c5764c927d1071cbb1f1957dcabda4a130fa8547c12c695f + +[build.os=windows] +builder = nop + +[install.files.os=windows] +ninja.exe = bin/ninja.exe + +[download.not(os=windows)] +url = https://github.com/ninja-build/ninja/archive/v1.10.2.tar.gz +sha256 = ce35865411f0490368a8fc383f29071de6690cbadc27704734978221f25e2bed + +[build.not(os=windows)] +builder = ninja_bootstrap +subdir = ninja-1.10.2 diff --git a/build/fbcode_builder/manifests/nlohmann-json b/build/fbcode_builder/manifests/nlohmann-json new file mode 100644 index 000000000..7d552d95f --- /dev/null +++ b/build/fbcode_builder/manifests/nlohmann-json @@ -0,0 +1,12 @@ +[manifest] +name = nlohmann-json + +[download] +url = https://github.com/nlohmann/json/archive/refs/tags/v3.10.5.tar.gz +sha256 = 5daca6ca216495edf89d167f808d1d03c4a4d929cef7da5e10f135ae1540c7e4 + +[dependencies] + +[build] +builder = cmake +subdir = json-3.10.5 diff --git a/build/fbcode_builder/manifests/openr b/build/fbcode_builder/manifests/openr new file mode 100644 index 000000000..913d81f37 --- /dev/null +++ b/build/fbcode_builder/manifests/openr @@ -0,0 +1,38 @@ +[manifest] +name = openr +fbsource_path = facebook/openr +shipit_project = openr +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/openr.git + +[build.os=linux] +builder = cmake +# openr files take a lot of RAM to compile. +job_weight_mib = 3072 + +[build.not(os=linux)] +# boost.fiber is required and that is not available on macos. +builder = nop + +[dependencies] +boost +fb303 +fbthrift +folly +googletest +re2 +range-v3 + +[cmake.defines.test=on] +BUILD_TESTS=ON +ADD_ROOT_TESTS=OFF + +[cmake.defines.test=off] +BUILD_TESTS=OFF + + +[shipit.pathmap] +fbcode/openr = openr +fbcode/openr/public_tld = . diff --git a/build/fbcode_builder/manifests/openssl b/build/fbcode_builder/manifests/openssl new file mode 100644 index 000000000..beef31c9e --- /dev/null +++ b/build/fbcode_builder/manifests/openssl @@ -0,0 +1,32 @@ +[manifest] +name = openssl + +[debs] +libssl-dev + +[homebrew] +openssl@1.1 +# on homebrew need the matching curl and ca- + +[rpms] +openssl +openssl-devel +openssl-libs + +[pps] +openssl + +[download] +url = https://www.openssl.org/source/openssl-1.1.1l.tar.gz +sha256 = 0b7a3e5e59c34827fe0c3a74b7ec8baef302b98fa80088d7f9153aa16fa76bd1 + +# We use the system openssl on these platforms even without --allow-system-packages +[build.any(os=linux, os=freebsd)] +builder = nop + +[build.not(any(os=linux, os=freebsd))] +builder = openssl +subdir = openssl-1.1.1l + +[dependencies.os=windows] +perl diff --git a/build/fbcode_builder/manifests/osxfuse b/build/fbcode_builder/manifests/osxfuse new file mode 100644 index 000000000..b6c6c551f --- /dev/null +++ b/build/fbcode_builder/manifests/osxfuse @@ -0,0 +1,12 @@ +[manifest] +name = osxfuse + +[download] +url = https://github.com/osxfuse/osxfuse/archive/osxfuse-3.8.3.tar.gz +sha256 = 93bab6731bdfe8dc1ef069483437270ce7fe5a370f933d40d8d0ef09ba846c0c + +[build] +builder = nop + +[install.files] +osxfuse-osxfuse-3.8.3/common = include diff --git a/build/fbcode_builder/manifests/patchelf b/build/fbcode_builder/manifests/patchelf new file mode 100644 index 000000000..7025dc66a --- /dev/null +++ b/build/fbcode_builder/manifests/patchelf @@ -0,0 +1,20 @@ +[manifest] +name = patchelf + +[rpms] +patchelf + +[debs] +patchelf + +[pps] +patchelf + +[download] +url = https://github.com/NixOS/patchelf/archive/0.10.tar.gz +sha256 = b3cb6bdedcef5607ce34a350cf0b182eb979f8f7bc31eae55a93a70a3f020d13 + +[build] +builder = autoconf +subdir = patchelf-0.10 + diff --git a/build/fbcode_builder/manifests/pcre2 b/build/fbcode_builder/manifests/pcre2 new file mode 100644 index 000000000..9ba119a78 --- /dev/null +++ b/build/fbcode_builder/manifests/pcre2 @@ -0,0 +1,20 @@ +[manifest] +name = pcre2 + +[homebrew] +pcre2 + +[rpms] +pcre2-devel +pcre-static + +[debs] +libpcre2-dev + +[download] +url = https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.40/pcre2-10.40.tar.bz2 +sha256 = 14e4b83c4783933dc17e964318e6324f7cae1bc75d8f3c79bc6969f00c159d68 + +[build] +builder = cmake +subdir = pcre2-10.40 diff --git a/build/fbcode_builder/manifests/perl b/build/fbcode_builder/manifests/perl new file mode 100644 index 000000000..32bddc51c --- /dev/null +++ b/build/fbcode_builder/manifests/perl @@ -0,0 +1,11 @@ +[manifest] +name = perl + +[download.os=windows] +url = http://strawberryperl.com/download/5.28.1.1/strawberry-perl-5.28.1.1-64bit-portable.zip +sha256 = 935c95ba096fa11c4e1b5188732e3832d330a2a79e9882ab7ba8460ddbca810d + +[build.os=windows] +builder = nop +subdir = perl + diff --git a/build/fbcode_builder/manifests/pexpect b/build/fbcode_builder/manifests/pexpect new file mode 100644 index 000000000..682e66a54 --- /dev/null +++ b/build/fbcode_builder/manifests/pexpect @@ -0,0 +1,12 @@ +[manifest] +name = pexpect + +[download] +url = https://files.pythonhosted.org/packages/0e/3e/377007e3f36ec42f1b84ec322ee12141a9e10d808312e5738f52f80a232c/pexpect-4.7.0-py2.py3-none-any.whl +sha256 = 2094eefdfcf37a1fdbfb9aa090862c1a4878e5c7e0e7e7088bdb511c558e5cd1 + +[build] +builder = python-wheel + +[dependencies] +python-ptyprocess diff --git a/build/fbcode_builder/manifests/proxygen b/build/fbcode_builder/manifests/proxygen new file mode 100644 index 000000000..a7b48043f --- /dev/null +++ b/build/fbcode_builder/manifests/proxygen @@ -0,0 +1,37 @@ +[manifest] +name = proxygen +fbsource_path = fbcode/proxygen +shipit_project = proxygen +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/proxygen.git + +[build.os=windows] +builder = nop + +[build] +builder = cmake +subdir = . +job_weight_mib = 3072 + +[cmake.defines.test=on] +BUILD_TESTS = ON + +[cmake.defines.test=off] +BUILD_TESTS = OFF + +[dependencies] +zlib +gperf +folly +fizz +wangle +mvfst + +[dependencies.test=on] +googletest + +[shipit.pathmap] +fbcode/proxygen/public_tld = . +fbcode/proxygen = proxygen diff --git a/build/fbcode_builder/manifests/python b/build/fbcode_builder/manifests/python new file mode 100644 index 000000000..00fed973c --- /dev/null +++ b/build/fbcode_builder/manifests/python @@ -0,0 +1,48 @@ +[manifest] +name = python + +[homebrew] +python@3.8 + +[rpms] +python3 +python3-devel + +# sapling needs dataclasses which arrive in 3.7, and the bionic python is 3.6 +[debs.all(distro=ubuntu,distro_vers="18.04")] +python3.8-dev + +[debs.not(all(distro=ubuntu,distro_vers="18.04"))] +python3-all-dev + +[pps] +python3 + +[download] +url = https://www.python.org/ftp/python/3.8.13/Python-3.8.13.tgz +sha256 = 903b92d76354366b1d9c4434d0c81643345cef87c1600adfa36095d7b00eede4 + +[build] +builder = autoconf +subdir = Python-3.8.13 + +[autoconf.args] +--enable-shared +--with-ensurepip=install + +# python's pkg-config libffi detection is broken +# See https://bugs.python.org/issue34823 for clearest description +# and pending PR https://github.com/python/cpython/pull/20451 +# The documented workaround requires an environment variable derived from +# pkg-config to be passed into its configure step +[autoconf.envcmd.LDFLAGS] +pkg-config +--libs-only-L +libffi + +[dependencies] +libffi +# eden tests expect the python bz2 support +bz2 +# eden tests expect the python curses support +ncurses diff --git a/build/fbcode_builder/manifests/python-filelock b/build/fbcode_builder/manifests/python-filelock new file mode 100644 index 000000000..40502de7c --- /dev/null +++ b/build/fbcode_builder/manifests/python-filelock @@ -0,0 +1,9 @@ +[manifest] +name = python-filelock + +[download] +url = https://files.pythonhosted.org/packages/31/24/ee722b92f23b9ebd87783e893a75352c048bbbc1f67dce0d63b58b46cb48/filelock-3.3.2-py3-none-any.whl +sha256 = bb2a1c717df74c48a2d00ed625e5a66f8572a3a30baacb7657add1d7bac4097b + +[build] +builder = python-wheel diff --git a/build/fbcode_builder/manifests/python-ptyprocess b/build/fbcode_builder/manifests/python-ptyprocess new file mode 100644 index 000000000..adc60e048 --- /dev/null +++ b/build/fbcode_builder/manifests/python-ptyprocess @@ -0,0 +1,9 @@ +[manifest] +name = python-ptyprocess + +[download] +url = https://files.pythonhosted.org/packages/d1/29/605c2cc68a9992d18dada28206eeada56ea4bd07a239669da41674648b6f/ptyprocess-0.6.0-py2.py3-none-any.whl +sha256 = d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f + +[build] +builder = python-wheel diff --git a/build/fbcode_builder/manifests/python-setuptools b/build/fbcode_builder/manifests/python-setuptools new file mode 100644 index 000000000..7ca2e1e49 --- /dev/null +++ b/build/fbcode_builder/manifests/python-setuptools @@ -0,0 +1,9 @@ +[manifest] +name = python-setuptools + +[download] +url = https://files.pythonhosted.org/packages/c0/7a/3da654f49c95d0cc6e9549a855b5818e66a917e852ec608e77550c8dc08b/setuptools-69.1.1-py3-none-any.whl +sha256 = 02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56 + +[build] +builder = python-wheel diff --git a/build/fbcode_builder/manifests/python-six b/build/fbcode_builder/manifests/python-six new file mode 100644 index 000000000..a712188dc --- /dev/null +++ b/build/fbcode_builder/manifests/python-six @@ -0,0 +1,9 @@ +[manifest] +name = python-six + +[download] +url = https://files.pythonhosted.org/packages/73/fb/00a976f728d0d1fecfe898238ce23f502a721c0ac0ecfedb80e0d88c64e9/six-1.12.0-py2.py3-none-any.whl +sha256 = 3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c + +[build] +builder = python-wheel diff --git a/build/fbcode_builder/manifests/python-toml b/build/fbcode_builder/manifests/python-toml new file mode 100644 index 000000000..b49a3b8fb --- /dev/null +++ b/build/fbcode_builder/manifests/python-toml @@ -0,0 +1,9 @@ +[manifest] +name = python-toml + +[download] +url = https://files.pythonhosted.org/packages/a2/12/ced7105d2de62fa7c8fb5fce92cc4ce66b57c95fb875e9318dba7f8c5db0/toml-0.10.0-py2.py3-none-any.whl +sha256 = 235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e + +[build] +builder = python-wheel diff --git a/build/fbcode_builder/manifests/ragel b/build/fbcode_builder/manifests/ragel new file mode 100644 index 000000000..336a39b20 --- /dev/null +++ b/build/fbcode_builder/manifests/ragel @@ -0,0 +1,19 @@ +[manifest] +name = ragel + +[debs] +ragel + +[homebrew] +ragel + +[rpms] +ragel + +[download] +url = https://www.colm.net/files/ragel/ragel-6.10.tar.gz +sha256 = 5f156edb65d20b856d638dd9ee2dfb43285914d9aa2b6ec779dac0270cd56c3f + +[build] +builder = autoconf +subdir = ragel-6.10 diff --git a/build/fbcode_builder/manifests/range-v3 b/build/fbcode_builder/manifests/range-v3 new file mode 100644 index 000000000..f96403c83 --- /dev/null +++ b/build/fbcode_builder/manifests/range-v3 @@ -0,0 +1,11 @@ +[manifest] +name = range-v3 + +[download] +url = https://github.com/ericniebler/range-v3/archive/refs/tags/0.11.0.tar.gz +sha256 = 376376615dbba43d3bef75aa590931431ecb49eb36d07bb726a19f680c75e20c + + +[build] +builder = cmake +subdir = range-v3-0.11.0 diff --git a/build/fbcode_builder/manifests/re2 b/build/fbcode_builder/manifests/re2 new file mode 100644 index 000000000..1fe1eccbd --- /dev/null +++ b/build/fbcode_builder/manifests/re2 @@ -0,0 +1,23 @@ +[manifest] +name = re2 + +[homebrew] +re2 + +[debs] +libre2-dev + +[rpms] +re2 +re2-devel + +[pps] +re2 + +[download] +url = https://github.com/google/re2/archive/2020-11-01.tar.gz +sha256 = 8903cc66c9d34c72e2bc91722288ebc7e3ec37787ecfef44d204b2d6281954d7 + +[build] +builder = cmake +subdir = re2-2020-11-01 diff --git a/build/fbcode_builder/manifests/rocksdb b/build/fbcode_builder/manifests/rocksdb new file mode 100644 index 000000000..c56066378 --- /dev/null +++ b/build/fbcode_builder/manifests/rocksdb @@ -0,0 +1,38 @@ +[manifest] +name = rocksdb + +[download] +url = https://github.com/facebook/rocksdb/archive/refs/tags/v8.7.3.zip +sha256 = 36c06b61dc167f2455990d60dd88d734b73aa8c4dfc095243efd0243834c6cd3 + +[dependencies] +lz4 +snappy + +[build] +builder = cmake +subdir = rocksdb-8.7.3 + +[cmake.defines] +WITH_SNAPPY=ON +WITH_LZ4=ON +WITH_TESTS=OFF +WITH_BENCHMARK_TOOLS=OFF +# We get relocation errors with the static gflags lib, +# and there's no clear way to make it pick the shared gflags +# so just turn it off. +WITH_GFLAGS=OFF +# Disable the use of -Werror +FAIL_ON_WARNINGS = OFF + +[cmake.defines.os=windows] +ROCKSDB_INSTALL_ON_WINDOWS=ON +# RocksDB hard codes the paths to the snappy libs to something +# that doesn't exist; ignoring the usual cmake rules. As a result, +# we can't build it with snappy without either patching rocksdb or +# without introducing more complex logic to the build system to +# connect the snappy build outputs to rocksdb's custom logic here. +# Let's just turn it off on windows. +WITH_SNAPPY=OFF +WITH_LZ4=ON +ROCKSDB_SKIP_THIRDPARTY=ON diff --git a/build/fbcode_builder/manifests/rust-shed b/build/fbcode_builder/manifests/rust-shed new file mode 100644 index 000000000..31e2b61d9 --- /dev/null +++ b/build/fbcode_builder/manifests/rust-shed @@ -0,0 +1,35 @@ +[manifest] +name = rust-shed +fbsource_path = fbcode/common/rust/shed +shipit_project = rust-shed +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebookexperimental/rust-shed.git + +[build] +builder = cargo + +[cargo] +build_doc = true +workspace_dir = + +[shipit.pathmap] +fbcode/common/rust/shed = shed +fbcode/common/rust/shed/public_autocargo = shed +fbcode/common/rust/shed/public_tld = . +tools/rust/ossconfigs = . + +[shipit.strip] +^fbcode/common/rust/shed/(?!public_autocargo|public_tld).+/Cargo\.toml$ + +[dependencies] +fbthrift +fb303 + +# We use the system openssl on linux +[dependencies.not(os=linux)] +openssl + +[dependencies.fbsource=on] +rust diff --git a/build/fbcode_builder/manifests/snappy b/build/fbcode_builder/manifests/snappy new file mode 100644 index 000000000..c458a0ae8 --- /dev/null +++ b/build/fbcode_builder/manifests/snappy @@ -0,0 +1,30 @@ +[manifest] +name = snappy + +[homebrew] +snappy + +[debs] +libsnappy-dev + +[rpms] +snappy-devel + +[pps] +snappy + +[download] +url = https://github.com/google/snappy/archive/1.1.7.tar.gz +sha256 = 3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4 + +[build] +builder = cmake +subdir = snappy-1.1.7 + +[cmake.defines] +SNAPPY_BUILD_TESTS = OFF + +# Avoid problems like `relocation R_X86_64_PC32 against symbol` on ELF systems +# when linking rocksdb, which builds PIC even when building a static lib +[cmake.defines.os=linux] +BUILD_SHARED_LIBS = ON diff --git a/build/fbcode_builder/manifests/sqlite3 b/build/fbcode_builder/manifests/sqlite3 new file mode 100644 index 000000000..1966f0fab --- /dev/null +++ b/build/fbcode_builder/manifests/sqlite3 @@ -0,0 +1,27 @@ +[manifest] +name = sqlite3 + +[debs] +libsqlite3-dev + +[homebrew] +sqlite + +[rpms] +sqlite-devel +sqlite-libs + +[pps] +sqlite3 + +[download] +url = https://sqlite.org/2019/sqlite-amalgamation-3280000.zip +sha256 = d02fc4e95cfef672b45052e221617a050b7f2e20103661cda88387349a9b1327 + +[dependencies] +cmake +ninja + +[build] +builder = sqlite +subdir = sqlite-amalgamation-3280000 diff --git a/build/fbcode_builder/manifests/wangle b/build/fbcode_builder/manifests/wangle new file mode 100644 index 000000000..6b330d620 --- /dev/null +++ b/build/fbcode_builder/manifests/wangle @@ -0,0 +1,27 @@ +[manifest] +name = wangle +fbsource_path = fbcode/wangle +shipit_project = wangle +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/wangle.git + +[build] +builder = cmake +subdir = wangle + +[cmake.defines.test=on] +BUILD_TESTS=ON + +[cmake.defines.test=off] +BUILD_TESTS=OFF + +[dependencies] +folly +googletest +fizz + +[shipit.pathmap] +fbcode/wangle/public_tld = . +fbcode/wangle = wangle diff --git a/build/fbcode_builder/manifests/watchman b/build/fbcode_builder/manifests/watchman new file mode 100644 index 000000000..31596bc93 --- /dev/null +++ b/build/fbcode_builder/manifests/watchman @@ -0,0 +1,48 @@ +[manifest] +name = watchman +fbsource_path = fbcode/watchman +shipit_project = watchman +shipit_fbcode_builder = true + +[git] +repo_url = https://github.com/facebook/watchman.git + +[build] +builder = cmake + +[dependencies] +boost +cpptoml +edencommon +fb303 +fbthrift +folly +pcre2 +googletest +python-setuptools + +[dependencies.fbsource=on] +rust + +[shipit.pathmap] +fbcode/watchman = watchman +fbcode/watchman/oss = . +fbcode/eden/fs = eden/fs + +[shipit.strip] +^fbcode/eden/fs/(?!.*\.thrift|service/shipit_test_file\.txt) + +[cmake.defines.fb=on] +ENABLE_EDEN_SUPPORT=ON +IS_FB_BUILD=ON + +# FB macos specific settings +[cmake.defines.all(fb=on,os=darwin)] +# this path is coupled with the FB internal watchman-osx.spec +WATCHMAN_STATE_DIR=/opt/facebook/watchman/var/run/watchman +# tell cmake not to try to create /opt/facebook/... +INSTALL_WATCHMAN_STATE_DIR=OFF +USE_SYS_PYTHON=OFF + +[depends.environment] +WATCHMAN_VERSION_OVERRIDE diff --git a/build/fbcode_builder/manifests/ws_airstore b/build/fbcode_builder/manifests/ws_airstore new file mode 100644 index 000000000..b779d4be6 --- /dev/null +++ b/build/fbcode_builder/manifests/ws_airstore @@ -0,0 +1,35 @@ +[manifest] +name = ws_airstore +fbsource_path = fbcode/warm_storage/experimental/ws_airstore/ +shipit_project = WS_AIRStore +shipit_fbcode_builder = true + +[build.os=linux] +builder = cmake + +[build.not(os=linux)] +# We only support Linux +builder = nop + +[dependencies] +boost +double-conversion +fizz +fmt +folly +googletest +libcurl +libevent +libffi +libsodium +openssl +sqlite3 +wangle +zstd +zlib +xz + +[shipit.pathmap] +fbcode/warm_storage/experimental/ws_airstore = . + +[shipit.strip] diff --git a/build/fbcode_builder/manifests/xxhash b/build/fbcode_builder/manifests/xxhash new file mode 100644 index 000000000..0af55726c --- /dev/null +++ b/build/fbcode_builder/manifests/xxhash @@ -0,0 +1,5 @@ +[manifest] +name = xxhash + +[rpms] +xxhash-devel diff --git a/build/fbcode_builder/manifests/xz b/build/fbcode_builder/manifests/xz new file mode 100644 index 000000000..0b27ad63c --- /dev/null +++ b/build/fbcode_builder/manifests/xz @@ -0,0 +1,22 @@ +[manifest] +name = xz + +[debs] +liblzma-dev + +[homebrew] +xz + +[rpms] +xz-devel + +[download] +url = https://tukaani.org/xz/xz-5.2.5.tar.gz +sha256 = f6f4910fd033078738bd82bfba4f49219d03b17eb0794eb91efbae419f4aba10 + +[build] +builder = autoconf +subdir = xz-5.2.5 + +[autoconf.args] +--disable-shared diff --git a/build/fbcode_builder/manifests/yaml-cpp b/build/fbcode_builder/manifests/yaml-cpp new file mode 100644 index 000000000..bffa540fe --- /dev/null +++ b/build/fbcode_builder/manifests/yaml-cpp @@ -0,0 +1,20 @@ +[manifest] +name = yaml-cpp + +[download] +url = https://github.com/jbeder/yaml-cpp/archive/yaml-cpp-0.6.2.tar.gz +sha256 = e4d8560e163c3d875fd5d9e5542b5fd5bec810febdcba61481fe5fc4e6b1fd05 + +[build.os=linux] +builder = cmake +subdir = yaml-cpp-yaml-cpp-0.6.2 + +[build.not(os=linux)] +builder = nop + +[dependencies] +boost +googletest + +[cmake.defines] +YAML_CPP_BUILD_TESTS=OFF diff --git a/build/fbcode_builder/manifests/zlib b/build/fbcode_builder/manifests/zlib new file mode 100644 index 000000000..bb32e63eb --- /dev/null +++ b/build/fbcode_builder/manifests/zlib @@ -0,0 +1,28 @@ +[manifest] +name = zlib + +[debs] +zlib1g-dev + +[homebrew] +zlib + +[rpms.not(distro=fedora)] +zlib-devel +zlib-static + +[rpms.distro=fedora] +zlib-ng-compat-devel +zlib-ng-compat-static + +[pps] +zlib + +[download] +url = https://zlib.net/zlib-1.3.1.tar.gz +sha256 = 9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23 + +[build] +builder = cmake +subdir = zlib-1.3.1 +patchfile = zlib_dont_build_more_than_needed.patch diff --git a/build/fbcode_builder/manifests/zstd b/build/fbcode_builder/manifests/zstd new file mode 100644 index 000000000..aac189fb8 --- /dev/null +++ b/build/fbcode_builder/manifests/zstd @@ -0,0 +1,34 @@ +[manifest] +name = zstd + +[homebrew] +zstd + +# 18.04 zstd is too old +[debs.not(all(distro=ubuntu,distro_vers="18.04"))] +libzstd-dev + +[rpms] +libzstd-devel +libzstd + +[pps] +zstd + +[download] +url = https://github.com/facebook/zstd/releases/download/v1.5.5/zstd-1.5.5.tar.gz +sha256 = 9c4396cc829cfae319a6e2615202e82aad41372073482fce286fac78646d3ee4 + +[build] +builder = cmake +subdir = zstd-1.5.5/build/cmake + +# The zstd cmake build explicitly sets the install name +# for the shared library in such a way that cmake discards +# the path to the library from the install_name, rendering +# the library non-resolvable during the build. The short +# term solution for this is just to link static on macos. +# +# And while we're at it, let's just always link statically. +[cmake.defines] +ZSTD_BUILD_SHARED = OFF diff --git a/build/fbcode_builder/manifests/zstrong b/build/fbcode_builder/manifests/zstrong new file mode 100644 index 000000000..5205c2c67 --- /dev/null +++ b/build/fbcode_builder/manifests/zstrong @@ -0,0 +1,33 @@ +[manifest] +name = zstrong + +[git] +repo_url = https://github.com/facebookincubator/zstrong.git + +[build] +builder = cmake + +[dependencies] +zstd + +[dependencies.test=on] +benchmark +fmt +googletest + +[shipit.pathmap] +fbcode/data_compression/experimental/zstrong = . + +[shipit.strip] +^fbcode/data_compression/experimental/zstrong/zstrong/zs2_config\.h$ + +[cmake.defines] +BUILD_SHARED_LIBS=OFF + +[cmake.defines.test=on] +BUILD_TESTS=ON +BUILD_BENCHMARKS=ON + +[cmake.defines.test=off] +BUILD_TESTS=OFF +BUILD_BENCHMARKS=OFF diff --git a/build/fbcode_builder/patches/boost_comparator_operator_fix.patch b/build/fbcode_builder/patches/boost_comparator_operator_fix.patch new file mode 100644 index 000000000..3771f2fff --- /dev/null +++ b/build/fbcode_builder/patches/boost_comparator_operator_fix.patch @@ -0,0 +1,11 @@ +diff --git a/boost/serialization/strong_typedef.hpp b/boost/serialization/strong_typedef.hpp +--- a/boost/serialization/strong_typedef.hpp ++++ b/boost/serialization/strong_typedef.hpp +@@ -44,6 +44,7 @@ + operator const T&() const {return t;} \ + operator T&() {return t;} \ + bool operator==(const D& rhs) const {return t == rhs.t;} \ ++ bool operator==(const T& lhs) const {return t == lhs;} \ + bool operator<(const D& rhs) const {return t < rhs.t;} \ + }; + diff --git a/build/fbcode_builder/patches/iproute2_oss.patch b/build/fbcode_builder/patches/iproute2_oss.patch new file mode 100644 index 000000000..7c478afca --- /dev/null +++ b/build/fbcode_builder/patches/iproute2_oss.patch @@ -0,0 +1,36 @@ +diff --git a/bridge/fdb.c b/bridge/fdb.c +--- a/bridge/fdb.c ++++ b/bridge/fdb.c +@@ -31,7 +31,7 @@ + + static unsigned int filter_index, filter_vlan, filter_state; + +-json_writer_t *jw_global; ++static json_writer_t *jw_global; + + static void usage(void) + { +diff --git a/ip/ipmroute.c b/ip/ipmroute.c +--- a/ip/ipmroute.c ++++ b/ip/ipmroute.c +@@ -44,7 +44,7 @@ + exit(-1); + } + +-struct rtfilter { ++static struct rtfilter { + int tb; + int af; + int iif; +diff --git a/ip/xfrm_monitor.c b/ip/xfrm_monitor.c +--- a/ip/xfrm_monitor.c ++++ b/ip/xfrm_monitor.c +@@ -34,7 +34,7 @@ + #include "ip_common.h" + + static void usage(void) __attribute__((noreturn)); +-int listen_all_nsid; ++static int listen_all_nsid; + + static void usage(void) + { diff --git a/build/fbcode_builder/patches/zlib_dont_build_more_than_needed.patch b/build/fbcode_builder/patches/zlib_dont_build_more_than_needed.patch new file mode 100644 index 000000000..2ef115714 --- /dev/null +++ b/build/fbcode_builder/patches/zlib_dont_build_more_than_needed.patch @@ -0,0 +1,33 @@ +diff -Naur ../zlib-1.3.1/CMakeLists.txt ./CMakeLists.txt +--- ../zlib-1.3.1/CMakeLists.txt 2024-01-22 10:32:37.000000000 -0800 ++++ ./CMakeLists.txt 2024-01-23 13:14:09.870289968 -0800 +@@ -149,10 +149,8 @@ + set(ZLIB_DLL_SRCS ${CMAKE_CURRENT_BINARY_DIR}/zlib1rc.obj) + endif(MINGW) + +-add_library(zlib SHARED ${ZLIB_SRCS} ${ZLIB_DLL_SRCS} ${ZLIB_PUBLIC_HDRS} ${ZLIB_PRIVATE_HDRS}) ++add_library(zlib ${ZLIB_SRCS} ${ZLIB_DLL_SRCS} ${ZLIB_PUBLIC_HDRS} ${ZLIB_PRIVATE_HDRS}) + target_include_directories(zlib PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}) +-add_library(zlibstatic STATIC ${ZLIB_SRCS} ${ZLIB_PUBLIC_HDRS} ${ZLIB_PRIVATE_HDRS}) +-target_include_directories(zlibstatic PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}) + set_target_properties(zlib PROPERTIES DEFINE_SYMBOL ZLIB_DLL) + set_target_properties(zlib PROPERTIES SOVERSION 1) + +@@ -169,7 +167,7 @@ + + if(UNIX) + # On unix-like platforms the library is almost always called libz +- set_target_properties(zlib zlibstatic PROPERTIES OUTPUT_NAME z) ++ set_target_properties(zlib PROPERTIES OUTPUT_NAME z) + if(NOT APPLE AND NOT(CMAKE_SYSTEM_NAME STREQUAL AIX)) + set_target_properties(zlib PROPERTIES LINK_FLAGS "-Wl,--version-script,\"${CMAKE_CURRENT_SOURCE_DIR}/zlib.map\"") + endif() +@@ -179,7 +177,7 @@ + endif() + + if(NOT SKIP_INSTALL_LIBRARIES AND NOT SKIP_INSTALL_ALL ) +- install(TARGETS zlib zlibstatic ++ install(TARGETS zlib + RUNTIME DESTINATION "${INSTALL_BIN_DIR}" + ARCHIVE DESTINATION "${INSTALL_LIB_DIR}" + LIBRARY DESTINATION "${INSTALL_LIB_DIR}" ) diff --git a/mcrouter/CMakeLists.txt b/mcrouter/CMakeLists.txt new file mode 100644 index 000000000..8ee829041 --- /dev/null +++ b/mcrouter/CMakeLists.txt @@ -0,0 +1,140 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/RouterRegistry.h" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/RouterRegistry-impl.h" + COMMAND + ${CMAKE_COMMAND} -E create_symlink + "${CMAKE_CURRENT_SOURCE_DIR}/RouterRegistry-impl.h" + "${CMAKE_CURRENT_BINARY_DIR}/RouterRegistry.h" + COMMENT "Creating RouterRegistry.h symlink") + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/ThriftAcceptor.h" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/ThriftAcceptor-impl.h" + COMMAND + ${CMAKE_COMMAND} -E create_symlink + "${CMAKE_CURRENT_SOURCE_DIR}/ThriftAcceptor-impl.h" + "${CMAKE_CURRENT_BINARY_DIR}/ThriftAcceptor.h" + COMMENT "Creating ThriftAcceptor.h symlink") + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/HostWithShard-fwd.h" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/HostWithShard-fwd-impl.h" + COMMAND + ${CMAKE_COMMAND} -E create_symlink + "${CMAKE_CURRENT_SOURCE_DIR}/HostWithShard-fwd-impl.h" + "${CMAKE_CURRENT_BINARY_DIR}/HostWithShard-fwd.h" + COMMENT "Creating HostWithShard-fwd.h symlink") + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/mcrouter_sr_deps.h" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/mcrouter_sr_deps-impl.h" + COMMAND + ${CMAKE_COMMAND} -E create_symlink + "${CMAKE_CURRENT_SOURCE_DIR}/mcrouter_sr_deps-impl.h" + "${CMAKE_CURRENT_BINARY_DIR}/mcrouter_sr_deps.h" + COMMENT "Creating mcrouter_sr_deps.h symlink") + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/config-impl.h" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/mcrouter_config-impl.h" + COMMAND + ${CMAKE_COMMAND} -E create_symlink + "${CMAKE_CURRENT_SOURCE_DIR}/mcrouter_config-impl.h" + "${CMAKE_CURRENT_BINARY_DIR}/config-impl.h" + COMMENT "Creating config-impl.h symlink") + +add_custom_target( + mcrouter_symlink_headers DEPENDS RouterRegistry.h ThriftAcceptor.h + HostWithShard-fwd.h mcrouter_sr_deps.h config-impl.h) + +add_library( + mcroutercore + AsyncLog.cpp + AsyncWriter.cpp + CarbonRouterClient.cpp + CarbonRouterClientBase.cpp + CarbonRouterFactory.cpp + CarbonRouterInstance.cpp + CarbonRouterInstanceBase.cpp + ConfigApi.cpp + FileDataProvider.cpp + FileObserver.cpp + flavor.cpp + LeaseTokenMap.cpp + mcrouter_config.cpp + McrouterFiberContext.cpp + McrouterLogFailure.cpp + McrouterLogger.cpp + McrouterManager.cpp + McSpoolUtils.cpp + options.cpp + OptionsUtil.cpp + PoolFactory.cpp + ProxyBase.cpp + ProxyConfigBuilder.cpp + ProxyDestination.cpp + ProxyDestinationBase.cpp + ProxyDestinationKey.cpp + ProxyDestinationMap.cpp + ProxyRequestContext.cpp + ProxyStats.cpp + route.cpp + RoutingPrefix.cpp + RuntimeVarsData.cpp + ServiceInfo.cpp + stats.cpp + ThreadUtil.cpp + ThriftAcceptor.cpp + TkoLog.cpp + TkoTracker.cpp + ExternalStatsHandler.cpp) + +add_dependencies(mcroutercore mcrouter_symlink_headers) + +target_link_libraries( + mcroutercore + PUBLIC mcrouter_routes mcrouter_carbon_result_thrift mcrouter_carbon_thrift mcrouter_common_thrift + mcrouter_memcache_thrift memcache_service_thrift + PRIVATE mcrouter_invalidation FBThrift::thriftcpp2 Folly::folly atomic) + +add_executable(mcrouter main.cpp RequestAclChecker.cpp StandaloneConfig.cpp + StandaloneUtils.cpp) + +target_link_libraries( + mcrouter + mcroutercore + mcrouterinternal + Folly::folly + FBThrift::thriftcpp2 + FBThrift::async + mcrouter_carbon_result_thrift + mcrouter_carbon_thrift + mcrouter_common_thrift + mcrouter_memcache_thrift + memcache_service_thrift + FBThrift::serverdbginfo + FBThrift::transport + FBThrift::thriftanyrep + FBThrift::thrifttype + FBThrift::thrifttyperep + FBThrift::thriftprotocol + FBThrift::rpcmetadata + FBThrift::thriftannotation + FBThrift::thriftmetadata + FBThrift::concurrency + FBThrift::runtime + FBThrift::thrift-core + fmt::fmt + wangle::wangle + pthread + atomic) + +add_subdirectory(lib) +add_subdirectory(routes) +add_subdirectory(tools) +add_subdirectory(test) diff --git a/mcrouter/configure.ac b/mcrouter/configure.ac index 8d64cb75b..efcc409d0 100644 --- a/mcrouter/configure.ac +++ b/mcrouter/configure.ac @@ -132,14 +132,15 @@ AC_CHECK_LIB([double-conversion],[ceil],[],[AC_MSG_ERROR( [Please install double-conversion library])]) AC_CHECK_LIB([dl], [dlopen], []) AC_CHECK_LIB([iberty], [cplus_demangle_v3_callback], []) -AC_CHECK_LIB([folly],[getenv],[],[AC_MSG_ERROR( - [Please install the folly library])]) +#AC_CHECK_LIB([folly],[getenv],[],[AC_MSG_ERROR( +# [Please install the folly library])]) AC_CHECK_LIB([sodium],[sodium_init],[],[AC_MSG_ERROR( [Please install the libsodium library])]) -AC_CHECK_LIB([fizz],[getenv],[],[AC_MSG_ERROR( - [Please install the fizz library])]) -AC_CHECK_HEADER([folly/Likely.h], [], [AC_MSG_ERROR( - [Could not find folly, please download from https://github.com/facebook/folly])], []) +#AC_CHECK_LIB([fizz],[getenv],[],[AC_MSG_ERROR( +# [Please install the fizz library])]) +#AC_CHECK_HEADER([folly/Likely.h], [], [AC_MSG_ERROR( +# [Could not find folly, please download from https://github.com/facebook/folly])], []) +PKG_CHECK_MODULES([FBDEPS],[libfolly]) # Commenting out the wangle dependency from here, because there is a tricky # inter-library dependency that resolves with the right order of LDFLAGS, and @@ -196,7 +197,7 @@ AC_CHECK_FUNCS([gettimeofday \ dup2 \ ftruncate]) -LIBS="$LIBS $BOOST_LDFLAGS $BOOST_CONTEXT_LIB $BOOST_FILESYSTEM_LIB \ +LIBS="$LIBS $FBDEPS_LDFLAGS $BOOST_LDFLAGS $BOOST_CONTEXT_LIB $BOOST_FILESYSTEM_LIB \ $BOOST_PROGRAM_OPTIONS_LIB $BOOST_SYSTEM_LIB $BOOST_REGEX_LIB \ $BOOST_THREAD_LIB -lpthread -pthread -ldl -lunwind \ -lbz2 -llz4 -llzma -lsnappy -lzstd -latomic" diff --git a/mcrouter/lib/CMakeLists.txt b/mcrouter/lib/CMakeLists.txt new file mode 100644 index 000000000..9e2013a23 --- /dev/null +++ b/mcrouter/lib/CMakeLists.txt @@ -0,0 +1,77 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library(mcrouter_clocks Clocks.cpp) + +add_library( + mcrouter_hash_functions + RendezvousHashFunc.cpp RendezvousHashHelper.cpp WeightedCh3HashFunc.cpp + WeightedCh4HashFunc.cpp WeightedChHashFuncBase.cpp + WeightedRendezvousHashFunc.cpp) + +target_link_libraries(mcrouter_hash_functions PRIVATE mcrouter_fbi Folly::folly) + +add_library(mcrouter_utils DynamicUtil.cpp IOBufUtil.cpp) + +target_link_libraries(mcrouter_utils PRIVATE Folly::folly FBThrift::thriftcpp2) + +add_library( + mcrouter_compression_codecs + Compression.cpp + CompressionCodecManager.cpp + IovecCursor.cpp + Lz4CompressionCodec.cpp + Lz4Immutable.cpp + Lz4ImmutableCompressionCodec.cpp + ZstdCompressionCodec.cpp) + +target_link_libraries(mcrouter_compression_codecs PRIVATE mcrouter_utils + Folly::folly) + +add_library(mcrouter_thread_pools AuxiliaryCPUThreadPool.cpp + AuxiliaryIOThreadPool.cpp) + +target_link_libraries(mcrouter_thread_pools PRIVATE Folly::folly) + +add_library(mcrouter_fiber_local FiberLocalInternal.cpp) + +target_link_libraries(mcrouter_fiber_local PRIVATE Folly::folly) + +add_library(mcrouter_failover_errors_settings FailoverErrorsSettingsBase.cpp) + +target_link_libraries( + mcrouter_failover_errors_settings + PRIVATE mcrouter_fbi mcrouter_mc_protocol mcrouter_network_messages + Folly::folly FBThrift::thriftcpp2) + +add_library(mcrouterinternal MessageQueue.cpp StatsReply.cpp) + +target_link_libraries( + mcrouterinternal + PUBLIC mcrouter_fbi + mcrouter_config + mcrouter_debug + mcrouter_network_messages + mcrouter_hash_functions + mcrouter_compression_codecs + mcrouter_thread_pools + mcrouter_carbon_protocol + mcrouter_mc_protocol + mcrouter_network + mcrouter_failover_errors_settings + mcrouter_carbon_result_thrift + mcrouter_carbon_thrift + mcrouter_common_thrift + mcrouter_memcache_thrift + memcache_service_thrift) + +add_subdirectory(carbon) +add_subdirectory(config) +add_subdirectory(debug) +add_subdirectory(fbi) +add_subdirectory(mc) +add_subdirectory(network) +add_subdirectory(invalidation) +add_subdirectory(test) diff --git a/mcrouter/lib/carbon/CMakeLists.txt b/mcrouter/lib/carbon/CMakeLists.txt new file mode 100644 index 000000000..72e591497 --- /dev/null +++ b/mcrouter/lib/carbon/CMakeLists.txt @@ -0,0 +1,26 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_fbthrift_cpp_library(mcrouter_carbon_result_thrift carbon_result.thrift) + +add_fbthrift_cpp_library(mcrouter_carbon_thrift carbon.thrift) + +add_library(mcrouter_carbon_protocol CarbonQueueAppender.cpp + CarbonProtocolReader.cpp Result.cpp) + +target_link_libraries( + mcrouter_carbon_protocol + PRIVATE mcrouter_carbon_result_thrift Folly::folly + PUBLIC FBThrift::thriftcpp2) + +add_library(mcrouter_carbon_clients CmdLineClient.cpp JsonClient.cpp) + +target_link_libraries( + mcrouter_carbon_clients PRIVATE mcrouterinternal Folly::folly + FBThrift::thriftcpp2) + +add_subdirectory(connection) +add_subdirectory(example) +add_subdirectory(test) diff --git a/mcrouter/lib/carbon/connection/CMakeLists.txt b/mcrouter/lib/carbon/connection/CMakeLists.txt new file mode 100644 index 000000000..df99a9135 --- /dev/null +++ b/mcrouter/lib/carbon/connection/CMakeLists.txt @@ -0,0 +1,10 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library(mcrouter_carbon_connection ExternalCarbonConnectionImpl.cpp) + +target_link_libraries( + mcrouter_carbon_connection PRIVATE mcrouter_network_messages Folly::folly + FBThrift::thriftcpp2) diff --git a/mcrouter/lib/carbon/example/CMakeLists.txt b/mcrouter/lib/carbon/example/CMakeLists.txt new file mode 100644 index 000000000..08cf1e35c --- /dev/null +++ b/mcrouter/lib/carbon/example/CMakeLists.txt @@ -0,0 +1,18 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_subdirectory(gen) + +if(BUILD_TESTS) + add_executable( + mcrouter_hello_goodbye_tests "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + HelloGoodbyeTest.cpp) + + target_link_libraries( + mcrouter_hello_goodbye_tests PRIVATE mcrouter_hello_goodbye_messages + Folly::folly GTest::gtest) + + gtest_discover_tests(mcrouter_hello_goodbye_tests) +endif() diff --git a/mcrouter/lib/carbon/example/gen/CMakeLists.txt b/mcrouter/lib/carbon/example/gen/CMakeLists.txt new file mode 100644 index 000000000..0faa5929b --- /dev/null +++ b/mcrouter/lib/carbon/example/gen/CMakeLists.txt @@ -0,0 +1,31 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_fbthrift_cpp_library(mcrouter_hello_goodbye_thrift HelloGoodbye.thrift + OPTIONS "${THRIFT_OPTIONS}") + +add_fbthrift_cpp_library( + mcrouter_hello_goodbye_service_thrift HelloGoodbyeService.thrift SERVICES + HelloGoodbye OPTIONS "${THRIFT_OPTIONS}") + +add_library(mcrouter_hello_goodbye_messages HelloGoodbyeMessages.cpp + HelloGoodbyeMessagesThrift.cpp) + +target_link_libraries(mcrouter_hello_goodbye_messages + PUBLIC mcrouter_hello_goodbye_thrift) + +add_library( + mcrouter_hello_goodbye + HelloGoodbyeClientTool.cpp HelloGoodbyeRouterInfo.cpp + HelloGoodbyeRouterInfo-BuildExtraProvider.cpp + HelloGoodbyeRouterInfo-ExternTemplate.cpp) + +add_dependencies(mcrouter_hello_goodbye mcrouter_symlink_headers) + +target_link_libraries( + mcrouter_hello_goodbye + PUBLIC mcrouter_hello_goodbye_service_thrift mcrouter_hello_goodbye_messages + PRIVATE mcrouter_carbon_connection mcrouter_routes Folly::folly + FBThrift::thriftcpp2) diff --git a/mcrouter/lib/carbon/example/gen/HelloGoodbyeService.thrift b/mcrouter/lib/carbon/example/gen/HelloGoodbyeService.thrift index 1b5f29199..46c1febc0 100644 --- a/mcrouter/lib/carbon/example/gen/HelloGoodbyeService.thrift +++ b/mcrouter/lib/carbon/example/gen/HelloGoodbyeService.thrift @@ -16,14 +16,12 @@ include "thrift/annotation/cpp.thrift" include "mcrouter/lib/network/gen/Common.thrift" include "mcrouter/lib/carbon/example/gen/HelloGoodbye.thrift" -include "common/fb303/if/fb303.thrift" - cpp_include "mcrouter/lib/carbon/example/gen/HelloGoodbyeMessages.h" namespace cpp2 hellogoodbye.thrift namespace py3 hellogoodbye.thrift -service HelloGoodbye extends fb303.FacebookService { +service HelloGoodbye { HelloGoodbye_GoodbyeReply goodbye(1: HelloGoodbye_GoodbyeRequest request) (thread = "eb") HelloGoodbye_HelloReply hello(1: HelloGoodbye_HelloRequest request) throws (1: Common.CarbonResultBusy carbonResultBusy, 2: Common.CarbonResultRemoteError carbonResultRemoteError) (thread = "eb") Common_McVersionReply mcVersion(1: Common_McVersionRequest request) (thread = "eb") diff --git a/mcrouter/lib/carbon/test/CMakeLists.txt b/mcrouter/lib/carbon/test/CMakeLists.txt new file mode 100644 index 000000000..2637068a3 --- /dev/null +++ b/mcrouter/lib/carbon/test/CMakeLists.txt @@ -0,0 +1,29 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable( + mcrouter_carbon_tests + "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + # TODO: These tests do not compile because the required Thrift clients cannot + # be generated. + # CarbonMessageConversionUtilsTest.cpp + # CarbonStructuresTest.cpp + # JsonClientTest.cpp + # Util.cpp + RequestReplyUtilTest.cpp + SerializationTest.cpp + Timestamp.cpp + VariantTest.cpp) + +target_link_libraries( + mcrouter_carbon_tests + PRIVATE mcrouter_carbon_protocol mcrouter_carbon_clients GTest::gtest + Folly::folly) + +gtest_discover_tests(mcrouter_carbon_tests) diff --git a/mcrouter/lib/config/CMakeLists.txt b/mcrouter/lib/config/CMakeLists.txt new file mode 100644 index 000000000..06567b833 --- /dev/null +++ b/mcrouter/lib/config/CMakeLists.txt @@ -0,0 +1,10 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library(mcrouter_config ConfigPreprocessor.cpp RendezvousHash.cpp) + +target_link_libraries(mcrouter_config PRIVATE mcrouter_fbi Folly::folly) + +add_subdirectory(test) diff --git a/mcrouter/lib/config/test/CMakeLists.txt b/mcrouter/lib/config/test/CMakeLists.txt new file mode 100644 index 000000000..40827cd8f --- /dev/null +++ b/mcrouter/lib/config/test/CMakeLists.txt @@ -0,0 +1,19 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable( + mcrouter_config_tests "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + config_preprocessor_test.cpp rendezvous_hash_test.cpp) + +target_link_libraries(mcrouter_config_tests PRIVATE mcrouter_config + GTest::gtest Folly::folly) + +# Run these tests in the source tree so that they can easily load fixture files. +gtest_discover_tests(mcrouter_config_tests + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}) diff --git a/mcrouter/lib/debug/CMakeLists.txt b/mcrouter/lib/debug/CMakeLists.txt new file mode 100644 index 000000000..34ef58319 --- /dev/null +++ b/mcrouter/lib/debug/CMakeLists.txt @@ -0,0 +1,9 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library(mcrouter_debug ConnectionFifo.cpp ConnectionFifoProtocol.cpp + FifoManager.cpp Fifo.cpp) + +target_link_libraries(mcrouter_debug PRIVATE mcrouter_fbi Folly::folly) diff --git a/mcrouter/lib/fbi/CMakeLists.txt b/mcrouter/lib/fbi/CMakeLists.txt new file mode 100644 index 000000000..81e5afae3 --- /dev/null +++ b/mcrouter/lib/fbi/CMakeLists.txt @@ -0,0 +1,14 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library(mcrouter_fbi counting_sem.cpp WeightedFurcHash.cpp hash.c network.c) + +target_link_libraries( + mcrouter_fbi + PUBLIC mcrouter_fbi_cpp + PRIVATE Folly::folly) + +add_subdirectory(cpp) +add_subdirectory(test) diff --git a/mcrouter/lib/fbi/cpp/CMakeLists.txt b/mcrouter/lib/fbi/cpp/CMakeLists.txt new file mode 100644 index 000000000..68057c6d0 --- /dev/null +++ b/mcrouter/lib/fbi/cpp/CMakeLists.txt @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library(mcrouter_fbi_cpp LogFailure.cpp globals.cpp LowerBoundPrefixMap.cpp + ParsingUtil.cpp util.cpp) + +target_link_libraries(mcrouter_fbi_cpp PRIVATE Folly::folly) + +add_subdirectory(test) diff --git a/mcrouter/lib/fbi/cpp/test/CMakeLists.txt b/mcrouter/lib/fbi/cpp/test/CMakeLists.txt new file mode 100644 index 000000000..49c3bf267 --- /dev/null +++ b/mcrouter/lib/fbi/cpp/test/CMakeLists.txt @@ -0,0 +1,23 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable( + mcrouter_fbi_cpp_tests "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + LowerBoundPrefixMapTest.cpp ObjectPoolTests.cpp) + +target_link_libraries(mcrouter_fbi_cpp_tests PRIVATE mcrouter_fbi_cpp + GTest::gtest Folly::folly) + +add_executable(mcrouter_fbi_cpp_benchmarks FuncGeneratorBenchmark.cpp) + +target_link_libraries( + mcrouter_fbi_cpp_benchmarks PRIVATE mcrouter_fbi_cpp Folly::folly + Folly::follybenchmark) + +gtest_discover_tests(mcrouter_fbi_cpp_tests) diff --git a/mcrouter/lib/fbi/test/CMakeLists.txt b/mcrouter/lib/fbi/test/CMakeLists.txt new file mode 100644 index 000000000..c0bb860c5 --- /dev/null +++ b/mcrouter/lib/fbi/test/CMakeLists.txt @@ -0,0 +1,16 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable(mcrouter_fbi_tests "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + sem_test.cpp test_util.cpp hash_test.cpp) + +target_link_libraries(mcrouter_fbi_tests PRIVATE mcrouter_fbi GTest::gtest + Folly::folly) + +gtest_discover_tests(mcrouter_fbi_tests) diff --git a/mcrouter/lib/invalidation/CMakeLists.txt b/mcrouter/lib/invalidation/CMakeLists.txt new file mode 100644 index 000000000..554a96da1 --- /dev/null +++ b/mcrouter/lib/invalidation/CMakeLists.txt @@ -0,0 +1,16 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library( + mcrouter_invalidation + McInvalidationKvPairs.cpp +) + +target_link_libraries( + mcrouter_invalidation + PUBLIC Folly::folly FBThrift::thriftcpp2 +) + +add_subdirectory(test) \ No newline at end of file diff --git a/mcrouter/lib/invalidation/test/CMakeLists.txt b/mcrouter/lib/invalidation/test/CMakeLists.txt new file mode 100644 index 000000000..9aa659748 --- /dev/null +++ b/mcrouter/lib/invalidation/test/CMakeLists.txt @@ -0,0 +1,19 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable( + mcrouter_invalidation_tests + "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + McInvalidationKvPairsTest.cpp +) + +target_link_libraries( + mcrouter_invalidation_tests + PRIVATE mcrouter_invalidation mcrouter_network_messages Folly::folly GTest::gtest +) diff --git a/mcrouter/lib/mc/CMakeLists.txt b/mcrouter/lib/mc/CMakeLists.txt new file mode 100644 index 000000000..a8b7f0690 --- /dev/null +++ b/mcrouter/lib/mc/CMakeLists.txt @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library(mcrouter_mc_protocol msg.cpp) diff --git a/mcrouter/lib/network/CMakeLists.txt b/mcrouter/lib/network/CMakeLists.txt new file mode 100644 index 000000000..ce91da7fc --- /dev/null +++ b/mcrouter/lib/network/CMakeLists.txt @@ -0,0 +1,60 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_custom_command( + OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/McAsciiParser-gen.cpp + COMMAND ragel -G1 ${CMAKE_CURRENT_SOURCE_DIR}/McAsciiParser.rl -o + ${CMAKE_CURRENT_BINARY_DIR}/McAsciiParser-gen.cpp + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/McAsciiParser.rl + VERBATIM) + +add_library( + mcrouter_network + AccessPoint.cpp + AsciiSerialized.cpp + AsyncMcClientImpl.cpp + AsyncTlsToPlaintextSocket.cpp + CaretProtocol.cpp + FailureDomains.cpp + FizzContextProvider.cpp + McClientRequestContext.cpp + McSerializedRequest.cpp + McSSLUtil.cpp + Qos.cpp + SecurityOptions.cpp + ServerLoad.cpp + SocketConnector.cpp + SocketUtil.cpp + ThreadLocalSSLContextProvider.cpp + ThriftTransport.cpp + WriteBuffer.cpp + McAsciiParser-gen.cpp + McAsciiParser.cpp + McParser.cpp + AsyncMcServer.cpp + AsyncMcServerWorker.cpp + ConnectionTracker.cpp + CpuController.cpp + McServerRequestContext.cpp + McServerRequestContext.cpp + McServerSession.cpp + McServerThriftRequestContext.cpp + MultiOpParent.cpp) + +target_link_libraries( + mcrouter_network + PUBLIC Folly::folly + PRIVATE mcrouter_clocks + mcrouter_carbon_protocol + mcrouter_memcache_thrift + mcrouter_debug + mcrouter_thread_pools + mcrouter_compression_codecs + mcrouter_utils + fizz::fizz + FBThrift::thriftcpp2) + +add_subdirectory(gen) +add_subdirectory(test) diff --git a/mcrouter/lib/network/gen/CMakeLists.txt b/mcrouter/lib/network/gen/CMakeLists.txt new file mode 100644 index 000000000..98c433152 --- /dev/null +++ b/mcrouter/lib/network/gen/CMakeLists.txt @@ -0,0 +1,29 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_fbthrift_cpp_library(mcrouter_common_thrift Common.thrift DEPENDS mcrouter_carbon_thrift OPTIONS + "${THRIFT_OPTIONS}") + +add_fbthrift_cpp_library(mcrouter_memcache_thrift Memcache.thrift DEPENDS mcrouter_common_thrift + OPTIONS "${THRIFT_OPTIONS}") + +add_fbthrift_cpp_library( + memcache_service_thrift + MemcacheService.thrift + DEPENDS + mcrouter_memcache_thrift + FBThrift::async + SERVICES + Memcache + OPTIONS + "${THRIFT_OPTIONS}") + +add_library( + mcrouter_network_messages CommonMessages.cpp CommonMessagesThrift.cpp + MemcacheMessages.cpp MemcacheMessagesThrift.cpp) + +target_link_libraries( + mcrouter_network_messages PUBLIC mcrouter_common_thrift mcrouter_memcache_thrift + mcrouter_carbon_protocol) diff --git a/mcrouter/lib/network/test/CMakeLists.txt b/mcrouter/lib/network/test/CMakeLists.txt new file mode 100644 index 000000000..8a7307f7f --- /dev/null +++ b/mcrouter/lib/network/test/CMakeLists.txt @@ -0,0 +1,124 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable(mcrouter_mock_mc_server MockMc.cpp MockMcServer.cpp) + +target_link_libraries( + mcrouter_mock_mc_server + PRIVATE mcrouter_network + mcrouterinternal + Folly::folly + FBThrift::thriftcpp2 + FBThrift::async + FBThrift::serverdbginfo + FBThrift::transport + FBThrift::thriftanyrep + FBThrift::thrifttype + FBThrift::thrifttyperep + FBThrift::thriftprotocol + FBThrift::rpcmetadata + FBThrift::thriftannotation + FBThrift::thriftmetadata + FBThrift::concurrency + FBThrift::runtime + FBThrift::thrift-core) + +add_executable(mcrouter_mock_mc_thrift_server MockMc.cpp MockMcThriftServer.cpp) + +target_link_libraries( + mcrouter_mock_mc_thrift_server + PRIVATE mcrouter_network + memcache_service_thrift + mcrouterinternal + Folly::folly + FBThrift::thriftcpp2 + FBThrift::async + FBThrift::serverdbginfo + FBThrift::transport + FBThrift::thriftanyrep + FBThrift::thrifttype + FBThrift::thrifttyperep + FBThrift::thriftprotocol + FBThrift::rpcmetadata + FBThrift::thriftannotation + FBThrift::thriftmetadata + FBThrift::concurrency + FBThrift::runtime + FBThrift::thrift-core) + +add_executable(mcrouter_mock_mc_server_dual MockMc.cpp MockMcServerDual.cpp) + +target_link_libraries( + mcrouter_mock_mc_server_dual + PRIVATE mcrouter_network + memcache_service_thrift + mcrouterinternal + Folly::folly + FBThrift::thriftcpp2 + FBThrift::async + FBThrift::serverdbginfo + FBThrift::transport + FBThrift::thriftanyrep + FBThrift::thrifttype + FBThrift::thrifttyperep + FBThrift::thriftprotocol + FBThrift::rpcmetadata + FBThrift::thriftannotation + FBThrift::thriftmetadata + FBThrift::concurrency + FBThrift::runtime + FBThrift::thrift-core) + +add_executable( + mcrouter_network_tests + "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + ClientSocket.cpp + ListenSocket.cpp + AccessPointTest.cpp + # TODO: Fix certificate issue + # AsyncMcClientTestSync.cpp + CarbonMessageDispatcherTest.cpp + CarbonMockMcTest.cpp + CarbonQueueAppenderTest.cpp + McAsciiParserTest.cpp + McParserTest.cpp + McServerAsciiParserTest.cpp + MockMc.cpp + SessionTest.cpp + SessionTestHarness.cpp + TestClientServerUtil.cpp + TestMcAsciiParserUtil.cpp) + +target_link_libraries( + mcrouter_network_tests + PRIVATE mcrouter_network + mcrouter_network_tests_gen + mcrouter_network_messages + Folly::folly + GTest::gtest + FBThrift::thriftcpp2 + FBThrift::async + FBThrift::serverdbginfo + FBThrift::transport + FBThrift::thriftanyrep + FBThrift::thrifttype + FBThrift::thrifttyperep + FBThrift::thriftprotocol + FBThrift::rpcmetadata + FBThrift::thriftannotation + FBThrift::thriftmetadata + FBThrift::concurrency + FBThrift::runtime + FBThrift::thrift-core) + +# Run these tests in the source tree so that they can easily load fixture files. +gtest_discover_tests(mcrouter_network_tests + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}) + +add_subdirectory(gen) diff --git a/mcrouter/lib/network/test/gen/CMakeLists.txt b/mcrouter/lib/network/test/gen/CMakeLists.txt new file mode 100644 index 000000000..c2bbcbbe8 --- /dev/null +++ b/mcrouter/lib/network/test/gen/CMakeLists.txt @@ -0,0 +1,13 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_fbthrift_cpp_library(mcrouter_network_carbon_test_thrift CarbonTest.thrift + OPTIONS "${THRIFT_OPTIONS}") + +add_library(mcrouter_network_tests_gen CarbonTestMessages.cpp + CarbonTestMessagesThrift.cpp) + +target_link_libraries(mcrouter_network_tests_gen + PUBLIC mcrouter_network_carbon_test_thrift) diff --git a/mcrouter/lib/test/CMakeLists.txt b/mcrouter/lib/test/CMakeLists.txt new file mode 100644 index 000000000..098c1284f --- /dev/null +++ b/mcrouter/lib/test/CMakeLists.txt @@ -0,0 +1,52 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable( + mcrouter_lib_tests + "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + FiberLocalTest.cpp + HashTestUtil.cpp + Ch3HashTest.cpp + CompressionCodecManagerTest.cpp + CompressionTest.cpp + CompressionTestUtil.cpp + Crc32HashTest.cpp + DynamicUtilTest.cpp + IovecCursorTest.cpp + Lz4ImmutableTest.cpp + McResUtilTest.cpp + MigrateRouteTest.cpp + RandomRouteTest.cpp + RendezvousHashTest.cpp + RouteHandleTest.cpp + WeightedCh3HashFuncTest.cpp + WeightedCh4HashFuncTest.cpp + WeightedChHashFuncBaseTest.cpp + WeightedRendezvousHashTest.cpp) + +target_link_libraries( + mcrouter_lib_tests + PRIVATE mcrouter_compression_codecs + mcrouter_hash_functions + mcrouter_fiber_local + mcrouter_utils + mcrouter_memcache_thrift + mcroutercore + GTest::gtest + Folly::folly + Folly::follybenchmark + FBThrift::thriftcpp2) + +add_executable(mcrouter_weighted_hash_benchmark WeightedHashBenchmark.cpp) + +target_link_libraries( + mcrouter_weighted_hash_benchmark PRIVATE mcrouter_hash_functions Folly::folly + Folly::follybenchmark) + +gtest_discover_tests(mcrouter_lib_tests) diff --git a/mcrouter/lib/test/CompressionCodecManagerTest.cpp b/mcrouter/lib/test/CompressionCodecManagerTest.cpp index 39e48a48c..d6c27c747 100644 --- a/mcrouter/lib/test/CompressionCodecManagerTest.cpp +++ b/mcrouter/lib/test/CompressionCodecManagerTest.cpp @@ -19,6 +19,8 @@ namespace facebook { namespace memcache { namespace test { +#ifndef DISABLE_COMPRESSION + namespace { void validateCodec(CompressionCodec* codec) { @@ -346,6 +348,10 @@ TEST(CompressionCodecManager, getBest_serverWithoutCodecs) { CodecIdRange{1, 6}, 1234 /* body size */, 0 /* reply type id */)); } +#endif // DISABLE_COMPRESSION + } // namespace test } // namespace memcache } // namespace facebook + + diff --git a/mcrouter/mcrouter_config-impl.h b/mcrouter/mcrouter_config-impl.h index 5e0fd599f..f852921e0 100644 --- a/mcrouter/mcrouter_config-impl.h +++ b/mcrouter/mcrouter_config-impl.h @@ -6,7 +6,3 @@ */ #pragma once - -#ifndef HAVE_CONFIG_H -static_assert(false, "mcrouter: invalid build"); -#endif diff --git a/mcrouter/mcrouter_config.h b/mcrouter/mcrouter_config.h index da02c20c0..7cfa89533 100644 --- a/mcrouter/mcrouter_config.h +++ b/mcrouter/mcrouter_config.h @@ -7,10 +7,6 @@ #pragma once -#ifndef HAVE_CONFIG_H -static_assert(false, "mcrouter: invalid build"); -#endif - /** * This header contains features specific for open source */ diff --git a/mcrouter/routes/CMakeLists.txt b/mcrouter/routes/CMakeLists.txt new file mode 100644 index 000000000..479663419 --- /dev/null +++ b/mcrouter/routes/CMakeLists.txt @@ -0,0 +1,44 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_library( + mcrouter_routes + BigValueRoute.cpp + CarbonLookasideRoute.cpp + FailoverRateLimiter.cpp + HashStopAllowListRoute.cpp + KeyParseRoute.cpp + LatencyInjectionRoute.cpp + McBucketRoute.cpp + McImportResolver.cpp + McRouteHandleProvider.cpp + McRouteHandleProvider-AllFastestRoute.cpp + McRouteHandleProvider-CarbonLookasideRoute.cpp + McRouteHandleProvider-FailoverRoute.cpp + McRouteHandleProvider-HashRoute.cpp + McRouteHandleProvider-PoolRoute.cpp + NullRoute.cpp + RateLimiter.cpp + RendezvousRouteHelpers.cpp + ShadowSettings.cpp + ShardHashFunc.cpp + ShardSelectionRouteFactory.cpp + ShardSplitRoute.cpp + ShardSplitter.cpp + SlowWarmUpRouteSettings.cpp + StagingRoute.cpp + WarmUpRoute.cpp) + +target_link_libraries( + mcrouter_routes + PRIVATE mcroutercore + mcrouterinternal + mcrouter_network_messages + mcrouter_failover_errors_settings + mcrouter_utils + Folly::folly + FBThrift::thriftcpp2) + +add_subdirectory(test) diff --git a/mcrouter/routes/test/CMakeLists.txt b/mcrouter/routes/test/CMakeLists.txt new file mode 100644 index 000000000..586a6eead --- /dev/null +++ b/mcrouter/routes/test/CMakeLists.txt @@ -0,0 +1,52 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable( + mcrouter_routes_tests + "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + BigValueRouteTest.cpp + BlackholeRouteTest.cpp + ClientCompatibilityCheckerRouteTest.cpp + CollectionRouteFactoryTest.cpp + ConstShardHashFuncTest.cpp + DistributionRouteTest.cpp + EagerShardSelectionRouteTest.cpp + EagerShardSelectionShadowRouteTest.cpp + ErrorRouteTest.cpp + FailoverRouteTest.cpp + FailoverWithExptimeRouteTest.cpp + HashStopAllowListRouteTest.cpp + KeyParseRouteTest.cpp + KeySplitRouteTest.cpp + LatencyInjectionRouteTest.cpp + LatestRouteTest.cpp + LoadBalancerRouteTest.cpp + McBucketRouteTest.cpp + McRefillRouteTest.cpp + MissFailoverRouteTest.cpp + OriginalClientHashRouteTest.cpp + OutstandingLimitRouteTest.cpp + PrefixSelectorRouteTest.cpp + RateLimitRouteTest.cpp + RootRouteTest.cpp + RoutePolicyMapTest.cpp + ShadowRouteTest.cpp + ShadowSettingsTest.cpp + ShardSelectionRouteTest.cpp + ShardSplitRouteTest.cpp + ShardSplitterTest.cpp + SlowWarmUpRouteTest.cpp + StagingRouteTest.cpp + WarmUpRouteTest.cpp) + +target_link_libraries( + mcrouter_routes_tests PRIVATE mcrouter_hello_goodbye mcrouter_routes + GTest::gtest Folly::folly FBThrift::thriftcpp2) + +gtest_discover_tests(mcrouter_routes_tests) diff --git a/mcrouter/test/CMakeLists.txt b/mcrouter/test/CMakeLists.txt new file mode 100644 index 000000000..0e1f77946 --- /dev/null +++ b/mcrouter/test/CMakeLists.txt @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_subdirectory(cpp_unit_tests) diff --git a/mcrouter/test/cpp_unit_tests/CMakeLists.txt b/mcrouter/test/cpp_unit_tests/CMakeLists.txt new file mode 100644 index 000000000..acad0ce6e --- /dev/null +++ b/mcrouter/test/cpp_unit_tests/CMakeLists.txt @@ -0,0 +1,32 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable( + cpp_unit_tests + "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + awriter_test.cpp + config_api_test.cpp + exponential_smooth_data_test.cpp + file_observer_test.cpp + flavor_test.cpp + LeaseTokenMapTest.cpp + mc_route_handle_provider_test.cpp + McrouterClientUsage.cpp + observable_test.cpp + options_test.cpp + pool_factory_test.cpp + ProxyRequestContextTest.cpp + route_test.cpp + runtime_vars_data_test.cpp) + +target_link_libraries(cpp_unit_tests GTest::gtest Folly::folly mcroutercore + mcrouterinternal mcrouter_hello_goodbye) + +# Run these tests in the source tree so that they can easily load fixture files. +gtest_discover_tests(cpp_unit_tests WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}) diff --git a/mcrouter/test/cpp_unit_tests/main.cpp b/mcrouter/test/main.cpp similarity index 68% rename from mcrouter/test/cpp_unit_tests/main.cpp rename to mcrouter/test/main.cpp index e008d4c98..080bb765c 100644 --- a/mcrouter/test/cpp_unit_tests/main.cpp +++ b/mcrouter/test/main.cpp @@ -5,9 +5,18 @@ * LICENSE file in the root directory of this source tree. */ +#include #include +#include // Configure folly to enable INFO+ messages, and everything else to // enable WARNING+. // Set the default log handler to log asynchronously by default. FOLLY_INIT_LOGGING_CONFIG(".=WARNING,folly=INFO; default:async=true"); + +int main(int argc, char** argv) { + ::testing::InitGoogleTest(&argc, argv); + folly::Init init(&argc, &argv); + + return RUN_ALL_TESTS(); +} diff --git a/mcrouter/tools/CMakeLists.txt b/mcrouter/tools/CMakeLists.txt new file mode 100644 index 000000000..b50215eb8 --- /dev/null +++ b/mcrouter/tools/CMakeLists.txt @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_subdirectory(mcpiper) diff --git a/mcrouter/tools/mcpiper/CMakeLists.txt b/mcrouter/tools/mcpiper/CMakeLists.txt new file mode 100644 index 000000000..04c783e69 --- /dev/null +++ b/mcrouter/tools/mcpiper/CMakeLists.txt @@ -0,0 +1,36 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +add_executable( + mcpiper + AnsiColorCodeStream.cpp + Config.cpp + FifoReader.cpp + main.cpp + McPiper.cpp + MessagePrinter.cpp + StyledString.cpp + Util.cpp) + +target_link_libraries( + mcpiper + mcrouterinternal + Folly::folly + FBThrift::thriftcpp2 + FBThrift::async + FBThrift::serverdbginfo + FBThrift::transport + FBThrift::thriftanyrep + FBThrift::thrifttype + FBThrift::thrifttyperep + FBThrift::thriftprotocol + FBThrift::rpcmetadata + FBThrift::thriftmetadata + FBThrift::concurrency + FBThrift::thrift-core + fmt::fmt + wangle::wangle) + +add_subdirectory(test) diff --git a/mcrouter/tools/mcpiper/test/CMakeLists.txt b/mcrouter/tools/mcpiper/test/CMakeLists.txt new file mode 100644 index 000000000..762cd9a31 --- /dev/null +++ b/mcrouter/tools/mcpiper/test/CMakeLists.txt @@ -0,0 +1,18 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. + +if(NOT BUILD_TESTS) + return() +endif() + +add_executable( + mcpiper_tests "${CMAKE_SOURCE_DIR}/mcrouter/test/main.cpp" + # McPiperVisitorTest.cpp +) + +target_link_libraries(mcpiper_tests PRIVATE mcrouterinternal GTest::gtest + Folly::folly) + +gtest_discover_tests(mcpiper_tests)