diff --git a/.github/workflows/bazel.yml b/.github/workflows/bazel.yml index 24007c44..a209c1c0 100644 --- a/.github/workflows/bazel.yml +++ b/.github/workflows/bazel.yml @@ -1,6 +1,6 @@ name: Bazel build -on: [push] +on: [ push ] jobs: build: @@ -24,6 +24,14 @@ jobs: path: "~/.cache/bazel" key: bazel + - name: Clang format + shell: bash + run: ./ci/linting/clang-format.sh + + - name: Bazel format + shell: bash + run: ./ci/linting/buildifier.sh + - name: Build shell: bash run: bazel build ... diff --git a/.github/workflows/cmake-windows.yml b/.github/workflows/cmake-windows.yml index 9eb0d3d3..3453c25e 100644 --- a/.github/workflows/cmake-windows.yml +++ b/.github/workflows/cmake-windows.yml @@ -1,6 +1,6 @@ name: CMake Windows build -on: [push] +on: [ push ] env: BUILD_TYPE: Release @@ -10,24 +10,24 @@ jobs: runs-on: windows-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v2 - - uses: ilammy/msvc-dev-cmd@v1 + - uses: ilammy/msvc-dev-cmd@v1 - - name: Create Build Environment - run: cmake -E make_directory ${{github.workspace}}\out + - name: Create Build Environment + run: cmake -E make_directory ${{github.workspace}}\out - - name: Configure CMake - working-directory: ${{github.workspace}}\out - run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE -S ${{github.workspace}} -B ${{github.workspace}}\out -DPHTREE_BUILD_EXAMPLES=ON -DPHTREE_BUILD_TESTS=ON + - name: Configure CMake + working-directory: ${{github.workspace}}\out + run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE -S ${{github.workspace}} -B ${{github.workspace}}\out -DPHTREE_BUILD_EXAMPLES=ON -DPHTREE_BUILD_TESTS=ON - - name: Build - working-directory: ${{github.workspace}}\out - # Execute the build. You can specify a specific target with "--target " - run: cmake --build . --config ${env:BUILD_TYPE} + - name: Build + working-directory: ${{github.workspace}}\out + # Execute the build. You can specify a specific target with "--target " + run: cmake --build . --config ${env:BUILD_TYPE} - - name: Test - working-directory: ${{github.workspace}}\out - # Execute tests defined by the CMake configuration. - # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail - run: ctest -C ${env:BUILD_TYPE} + - name: Test + working-directory: ${{github.workspace}}\out + # Execute tests defined by the CMake configuration. + # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail + run: ctest -C ${env:BUILD_TYPE} diff --git a/.github/workflows/cmake.yml b/.github/workflows/cmake.yml index abdea7aa..962c2c4f 100644 --- a/.github/workflows/cmake.yml +++ b/.github/workflows/cmake.yml @@ -1,6 +1,6 @@ name: CMake build -on: [push] +on: [ push ] env: BUILD_TYPE: Release @@ -10,36 +10,36 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - - name: Create Build Environment - run: cmake -E make_directory ${{github.workspace}}/build - - - name: Configure CMake - # Use a bash shell so we can use the same syntax for environment variable - # access regardless of the host operating system - shell: bash - working-directory: ${{github.workspace}}/build - # Note the current convention is to use the -S and -B options here to specify source - # and build directories, but this is only available with CMake 3.13 and higher. - # The CMake binaries on the Github Actions machines are (as of this writing) 3.12 - run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DPHTREE_BUILD_ALL=ON - - - name: Build - working-directory: ${{github.workspace}}/build - shell: bash - # Execute the build. You can specify a specific target with "--target " - run: cmake --build . --config $BUILD_TYPE - - - name: Test - working-directory: ${{github.workspace}}/build - shell: bash - # Execute tests defined by the CMake configuration. - # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail - # TODO Currently tests are run via bazel only. - run: ctest -C $BUILD_TYPE - - - name: Example - working-directory: ${{github.workspace}}/build - shell: bash - run: examples/Example + - uses: actions/checkout@v2 + + - name: Create Build Environment + run: cmake -E make_directory ${{github.workspace}}/build + + - name: Configure CMake + # Use a bash shell so we can use the same syntax for environment variable + # access regardless of the host operating system + shell: bash + working-directory: ${{github.workspace}}/build + # Note the current convention is to use the -S and -B options here to specify source + # and build directories, but this is only available with CMake 3.13 and higher. + # The CMake binaries on the Github Actions machines are (as of this writing) 3.12 + run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DPHTREE_BUILD_ALL=ON + + - name: Build + working-directory: ${{github.workspace}}/build + shell: bash + # Execute the build. You can specify a specific target with "--target " + run: cmake --build . --config $BUILD_TYPE + + - name: Test + working-directory: ${{github.workspace}}/build + shell: bash + # Execute tests defined by the CMake configuration. + # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail + # TODO Currently tests are run via bazel only. + run: ctest -C $BUILD_TYPE + + - name: Example + working-directory: ${{github.workspace}}/build + shell: bash + run: examples/Example diff --git a/.github/workflows/codcecov.yml b/.github/workflows/codcecov.yml new file mode 100644 index 00000000..e5eca13a --- /dev/null +++ b/.github/workflows/codcecov.yml @@ -0,0 +1,42 @@ +name: Upload CodeCov Report +on: [ push ] +jobs: + run: + runs-on: windows-latest + name: Build, Test , Upload Code Coverage Report + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + fetch-depth: ‘2’ + id: checkout_code + - name: Setup MSBuild and add to PATH + uses: microsoft/setup-msbuild@v1.0.2 + id: setup_msbuild + + - name: Generate Solution + run: cmake -G "Visual Studio 17 2022" -A x64 . -DPHTREE_CODE_COVERAGE=ON -DCMAKE_BUILD_TYPE=Debug + + - name: Run MSBuild + id: run_msbuild + run: msbuild /p:Configuration=Debug /p:Platform=x64 /p:gtest_force_shared_crt=on phtree.sln + - name: Setup VSTest and add to PATH + uses: darenm/Setup-VSTest@v1 + id: setup_vstest + + - name: Setup OpenCppCoverage and add to PATH + id: setup_opencppcoverage + run: | + choco install OpenCppCoverage -y + echo "C:\Program Files\OpenCppCoverage" >> $env:GITHUB_PATH + + - name: Generate Report + id: generate_test_report + shell: cmd + run: OpenCppCoverage.exe --modules phtree --export_type cobertura:phtree.xml -- "vstest.console.exe" test\Debug\all_tests.exe + - name: Upload Report to Codecov + uses: codecov/codecov-action@v3 + with: + files: ./phtree.xml + fail_ci_if_error: true + functionalities: fix diff --git a/.gitignore b/.gitignore index 75f038ae..db7ffe21 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ compile_commands.json perf.data* build out +cygwin CMakeSettings.json /cmake-build-debug/ diff --git a/CHANGELOG.md b/CHANGELOG.md index b74eb8ff..2584379e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,10 +6,14 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added -- Nothing. +- Added build features: + - linting for C++ and bazel files. + - Added CI status badges. + - Added test coverage + [#53](https://github.com/tzaeschke/phtree-cpp/issues/53) ### Changed -- Nothing. +- Cleaned up build scripts. [#53](https://github.com/tzaeschke/phtree-cpp/issues/53) ### Removed - Nothing. diff --git a/CMakeLists.txt b/CMakeLists.txt index 6936ad64..69492bdb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,6 +15,7 @@ if (NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE "Release" CACHE STRING "Choose Release or Debug" FORCE) endif () + # --------------------------------------------------------------------------------------- # Build options # --------------------------------------------------------------------------------------- @@ -28,10 +29,15 @@ option(PHTREE_BUILD_EXAMPLES "Build examples" OFF) # testing options option(PHTREE_BUILD_TESTS "Build tests" OFF) #option(PHTREE_BUILD_TESTS_HO "Build tests using the header only version" OFF) +option(PHTREE_CODE_COVERAGE "Collect coverage from test library" OFF) +if (PHTREE_CODE_COVERAGE) + set(PHTREE_BUILD_TESTS ON) +endif () # bench options option(PHTREE_BUILD_BENCHMARKS "Build benchmarks (Requires https://github.com/google/benchmark.git to be installed)" OFF) + # --------------------------------------------------------------------------------------- # Compiler config # --------------------------------------------------------------------------------------- @@ -78,6 +84,9 @@ else () else () set(CMAKE_CXX_FLAGS_RELEASE "-O3 -mavx") endif () + if (PHTREE_CODE_COVERAGE) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -Wall -Werror -Wa,-mbig-obj") + endif () endif () # --------------------------------------------------------------------------------------- @@ -90,7 +99,7 @@ if (PHTREE_BUILD_EXAMPLES OR PHTREE_BUILD_ALL) add_subdirectory(examples) endif () -if (!MSVC AND (PHTREE_BUILD_BENCHMARKS OR PHTREE_BUILD_ALL)) +if ((PHTREE_BUILD_BENCHMARKS OR PHTREE_BUILD_ALL) AND NOT MSVC) message(STATUS "Generating benchmarks") add_subdirectory(benchmark) endif () diff --git a/README.md b/README.md index 7875e62d..360ac201 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,13 @@ -**This is a fork of [Improbable's PH-tree](https://github.com/improbable-eng/phtree-cpp)**. +**This is a fork of [Improbable's (currently unmaintained) PH-tree](https://github.com/improbable-eng/phtree-cpp)**. + +Multi-dimensional / spatial index with very fast insert/erase/relocate operations and scalability with large datasets. +This library is C++ / header only. + +![Bazel Linux build](https://github.com/tzaeschke/phtree-cpp/actions/workflows/bazel.yml/badge.svg) +![CMake Linux build](https://github.com/tzaeschke/phtree-cpp/actions/workflows/cmake.yml/badge.svg) +![CMake Windows build](https://github.com/tzaeschke/phtree-cpp/actions/workflows/cmake-windows.yml/badge.svg) +[![codecov](https://codecov.io/gh/tzaeschke/phtree-cpp/branch/master/graph/badge.svg?token=V5XVRQG754)](https://codecov.io/gh/tzaeschke/phtree-cpp) +[![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) # PH-Tree C++ diff --git a/WORKSPACE b/WORKSPACE index 98b0dce9..89f0736d 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -1,18 +1,6 @@ # Bazel bootstrapping -load("//tools/build_rules:http.bzl", "http_archive", "http_file") - -http_archive( - name = "bazel_skylib", - sha256 = "1dde365491125a3db70731e25658dfdd3bc5dbdfd11b840b3e987ecf043c7ca0", - url = "https://github.com/bazelbuild/bazel-skylib/releases/download/0.9.0/bazel_skylib-0.9.0.tar.gz", -) - -load("@bazel_skylib//lib:versions.bzl", "versions") - -versions.check( - minimum_bazel_version = "2.0.0", -) +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file") # NOTE: We make third_party/ its own bazel workspace because it allows to run `bazel build ...` without # having all targets defined in third-party BUILD files in that directory buildable. diff --git a/benchmark/BUILD b/benchmark/BUILD index 4df29874..66860316 100644 --- a/benchmark/BUILD +++ b/benchmark/BUILD @@ -25,8 +25,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -40,8 +40,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -55,8 +55,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -70,8 +70,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -85,8 +85,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -100,8 +100,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -115,8 +115,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -130,8 +130,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -145,8 +145,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -160,8 +160,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -175,8 +175,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -190,8 +190,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -205,8 +205,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -220,8 +220,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -235,8 +235,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -250,8 +250,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -265,8 +265,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -280,8 +280,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -295,8 +295,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -310,8 +310,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -325,8 +325,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -340,8 +340,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -355,8 +355,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], @@ -370,8 +370,8 @@ cc_binary( ], linkstatic = True, deps = [ - "//phtree", ":benchmark", + "//phtree", "@gbenchmark//:benchmark", "@spdlog", ], diff --git a/ci/includes/bazel.sh b/ci/includes/bazel.sh deleted file mode 100755 index 79a70e5d..00000000 --- a/ci/includes/bazel.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -source ci/includes/os.sh - -# Main function that should be used by scripts sourcing this file. -function runBazel() { - BAZEL_SUBCOMMAND="$1" - shift - "$(pwd)/tools/bazel" "$BAZEL_SUBCOMMAND" ${BAZEL_CI_CONFIG:-} "$@" -} - -function getBazelVersion() { - echo "4.2.2" -} diff --git a/ci/linting/buildifier.sh b/ci/linting/buildifier.sh index 1be7b1c3..1344e2e3 100755 --- a/ci/linting/buildifier.sh +++ b/ci/linting/buildifier.sh @@ -4,7 +4,6 @@ set -x -e -u -o pipefail cd "$(dirname "$0")/../../" -source ci/includes/bazel.sh source ci/includes/os.sh MAYBEARG='-mode=check' @@ -16,9 +15,9 @@ if [ $# -eq 1 ]; then fi # Ensure Bazel is installed. -runBazel version +bazel version -if runBazel run buildifier -- ${MAYBEARG} -v $(find "$(pwd)/" \( -name BUILD -o -name WORKSPACE \) -type f); then +if bazel run buildifier -- ${MAYBEARG} -v $(find "$(pwd)/" \( -name BUILD -o -name WORKSPACE \) -type f); then echo -e "\033[0;32mAll BUILD and WORKSPACE files passed buildifier linting check.\033[0m" else echo -e "\033[0;31mThe above listed BUILD and WORKSPACE file(s) didn't pass the buildifier linting check!\033[0m" diff --git a/ci/linting/clang-format.sh b/ci/linting/clang-format.sh index cebf4a22..551151be 100755 --- a/ci/linting/clang-format.sh +++ b/ci/linting/clang-format.sh @@ -3,7 +3,6 @@ set -e -u -o pipefail source ci/includes/os.sh -source ci/includes/bazel.sh TARGETS="//..." EXCLUDED_TARGETS="" @@ -66,22 +65,22 @@ function generateAqueryTargetString() { function bazelLintTest() { # Use bazel to create patch files for all eligible source files. # Fail if any of the patch files are non-empty (i.e. lint was detected). - CLANG_FORMAT="$(clangFormatLocation)" runBazel build --config lint --output_groups=clang_format_test -- $(generateBuildTargetString) + CLANG_FORMAT="$(clangFormatLocation)" bazel build --config lint --output_groups=clang_format_test -- $(generateBuildTargetString) } function bazelLintFix() { # Use bazel to create patch files for all eligible source files. - CLANG_FORMAT="$(clangFormatLocation)" runBazel build --config lint --output_groups=clang_format_patches_only -- $(generateBuildTargetString) + CLANG_FORMAT="$(clangFormatLocation)" bazel build --config lint --output_groups=clang_format_patches_only -- $(generateBuildTargetString) # Find bazel-bin prefix. - BAZEL_BIN=$(runBazel info bazel-bin) + BAZEL_BIN=$(bazel info bazel-bin) # I.e. on Linux, this is `bazel-out/k8-gcc-opt/bin`. - PREFIX=${BAZEL_BIN#$(runBazel info execution_root)/} + PREFIX=${BAZEL_BIN#$(bazel info execution_root)/} # Use aquery to get the list of output files of the `CreatePatch` action, # Then strip the patch path down to that of its source file, and apply # the patch file generated by Bazel to the original source file. - CLANG_FORMAT="$(clangFormatLocation)" runBazel aquery --config lint --include_aspects --output_groups clang_format_patches_only "mnemonic(\"CreatePatch\", $(generateAqueryTargetString))" --output textproto \ + CLANG_FORMAT="$(clangFormatLocation)" bazel aquery --config lint --include_aspects --output_groups clang_format_patches_only "mnemonic(\"CreatePatch\", $(generateAqueryTargetString))" --output textproto \ `# Get relative paths to source files` \ `# perl used instead of grep --perl-regexp since grep macOS doesnt support it` \ | perl -ne "while(/(?<=exec_path: \"${PREFIX//\//\\/}\/).*\.patch_.+(?=\")/g){print \"\$&\n\";}" \ diff --git a/phtree/common/BUILD b/phtree/common/BUILD index b25588b1..a8e5728f 100644 --- a/phtree/common/BUILD +++ b/phtree/common/BUILD @@ -3,6 +3,8 @@ package(default_visibility = ["//visibility:private"]) cc_library( name = "common", hdrs = [ + "b_plus_tree_hash_map.h", + "b_plus_tree_map.h", "base_types.h", "bits.h", "common.h", @@ -11,8 +13,6 @@ cc_library( "distance.h", "filter.h", "flat_array_map.h", - "b_plus_tree_hash_map.h", - "b_plus_tree_map.h", "flat_sparse_map.h", "tree_stats.h", ], diff --git a/test/BUILD b/test/BUILD index 3191aefe..55a8a213 100644 --- a/test/BUILD +++ b/test/BUILD @@ -8,7 +8,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -21,7 +21,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -34,7 +34,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -47,7 +47,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -60,7 +60,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -73,7 +73,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -86,7 +86,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -99,7 +99,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -112,7 +112,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -125,7 +125,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -138,7 +138,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -151,7 +151,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -164,7 +164,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -177,7 +177,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -190,7 +190,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -203,7 +203,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -216,7 +216,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -229,7 +229,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -242,7 +242,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) @@ -255,8 +255,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree:phtree", + "//phtree", "//test/testing/gtest_main", ], ) - diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 3484ccb3..ae9c9462 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -1,9 +1,6 @@ cmake_minimum_required(VERSION 3.14) project(phtree-tests LANGUAGES CXX) -# Avoids LNK2038 Error with MSVC -set(gtest_force_shared_crt on) - include(FetchContent) include(common/scripts.cmake) @@ -12,9 +9,11 @@ FetchContent_Declare( GIT_REPOSITORY https://github.com/google/googletest.git GIT_TAG release-1.12.1 ) +if (MSVC) + # Avoids LNK2038 Error with MSVC + set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) +endif () FetchContent_MakeAvailable(googletest) -add_library(GTest::GTest INTERFACE IMPORTED) -target_link_libraries(GTest::GTest INTERFACE gtest_main) # The next line is optional, but keeps your CACHE cleaner: mark_as_advanced( @@ -27,35 +26,61 @@ mark_as_advanced( set_target_properties(gtest PROPERTIES FOLDER extern) set_target_properties(gtest_main PROPERTIES FOLDER extern) +#include(GoogleTest) +#gtest_discover_tests(all_tests_driver) -# package_add_test(phtree_all_test phtree_test.cc phtree_d_test.cc phtree_f_test.cc) -package_add_test(phtree_test phtree_test.cc) -package_add_test(phtree_test_const_values phtree_test_const_values.cc) -package_add_test(phtree_test_issues phtree_test_issues.cc) -target_compile_definitions(phtree_test_issues PUBLIC SKIP_TEST_MEMORY_LEAKS=ON) -package_add_test(phtree_test_ptr_values phtree_test_ptr_values.cc) -package_add_test(phtree_test_unique_ptr_values phtree_test_unique_ptr_values.cc) - -package_add_test(phtree_f_test phtree_f_test.cc) +if (PHTREE_CODE_COVERAGE) + package_add_test_main(all_tests + all_tests.cc + phtree_test.cc + phtree_test_const_values.cc + phtree_test_issues.cc + phtree_test_ptr_values.cc + phtree_test_unique_ptr_values.cc + phtree_f_test.cc + phtree_d_test.cc + phtree_d_test_copy_move.cc + phtree_d_test_custom_key.cc + phtree_d_test_filter.cc + phtree_d_test_preprocessor.cc + phtree_box_f_test.cc + phtree_box_d_test.cc + phtree_box_d_test_filter.cc + phtree_box_d_test_query_types.cc + phtree_multimap_d_test.cc + phtree_multimap_d_test_copy_move.cc + phtree_multimap_d_test_filter.cc + phtree_multimap_d_test_unique_ptr_values.cc + phtree_multimap_box_d_test.cc) + target_compile_definitions(all_tests PUBLIC SKIP_TEST_MEMORY_LEAKS=ON) +else () + package_add_test(phtree_test phtree_test.cc) + package_add_test(phtree_test_const_values phtree_test_const_values.cc) + package_add_test(phtree_test_issues phtree_test_issues.cc) + target_compile_definitions(phtree_test_issues PUBLIC SKIP_TEST_MEMORY_LEAKS=ON) + package_add_test(phtree_test_ptr_values phtree_test_ptr_values.cc) + package_add_test(phtree_test_unique_ptr_values phtree_test_unique_ptr_values.cc) -package_add_test(phtree_d_test phtree_d_test.cc) -package_add_test(phtree_d_test_copy_move phtree_d_test_copy_move.cc) -package_add_test(phtree_d_test_custom_key phtree_d_test_custom_key.cc) -package_add_test(phtree_d_test_filter phtree_d_test_filter.cc) -package_add_test(phtree_d_test_preprocessor phtree_d_test_preprocessor.cc) + package_add_test(phtree_f_test phtree_f_test.cc) -package_add_test(phtree_box_f_test phtree_box_f_test.cc) + package_add_test(phtree_d_test phtree_d_test.cc) + package_add_test(phtree_d_test_copy_move phtree_d_test_copy_move.cc) + package_add_test(phtree_d_test_custom_key phtree_d_test_custom_key.cc) + package_add_test(phtree_d_test_filter phtree_d_test_filter.cc) + package_add_test(phtree_d_test_preprocessor phtree_d_test_preprocessor.cc) -package_add_test(phtree_box_d_test phtree_box_d_test.cc) -package_add_test(phtree_box_d_test_filter phtree_box_d_test_filter.cc) -package_add_test(phtree_box_d_test_query_types phtree_box_d_test_query_types.cc) + package_add_test(phtree_box_f_test phtree_box_f_test.cc) -package_add_test(phtree_multimap_d_test phtree_multimap_d_test.cc) -package_add_test(phtree_multimap_d_test_copy_move phtree_multimap_d_test_copy_move.cc) -package_add_test(phtree_multimap_d_test_filter phtree_multimap_d_test_filter.cc) -package_add_test(phtree_multimap_d_test_unique_ptr_values phtree_multimap_d_test_unique_ptr_values.cc) + package_add_test(phtree_box_d_test phtree_box_d_test.cc) + package_add_test(phtree_box_d_test_filter phtree_box_d_test_filter.cc) + package_add_test(phtree_box_d_test_query_types phtree_box_d_test_query_types.cc) -package_add_test(phtree_multimap_box_d_test phtree_multimap_box_d_test.cc) + package_add_test(phtree_multimap_d_test phtree_multimap_d_test.cc) + package_add_test(phtree_multimap_d_test_copy_move phtree_multimap_d_test_copy_move.cc) + package_add_test(phtree_multimap_d_test_filter phtree_multimap_d_test_filter.cc) + package_add_test(phtree_multimap_d_test_unique_ptr_values phtree_multimap_d_test_unique_ptr_values.cc) -add_subdirectory(common) + package_add_test(phtree_multimap_box_d_test phtree_multimap_box_d_test.cc) + add_subdirectory(common) +endif () diff --git a/test/all_tests.cc b/test/all_tests.cc new file mode 100644 index 00000000..ddc6dfc6 --- /dev/null +++ b/test/all_tests.cc @@ -0,0 +1,11 @@ +#include + +// #include "gtest/gtest.h" + +//#include "phtree_f_test.cc" +//#include "phtree_test.cc" + +int main(int argc, char** argv) { + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} \ No newline at end of file diff --git a/test/common/BUILD b/test/common/BUILD index 01452079..d9912bff 100644 --- a/test/common/BUILD +++ b/test/common/BUILD @@ -8,7 +8,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -21,7 +21,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -34,7 +34,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -47,7 +47,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -60,7 +60,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -73,7 +73,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -86,7 +86,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -99,7 +99,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -112,7 +112,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) @@ -125,7 +125,7 @@ cc_test( ], linkstatic = True, deps = [ - "//phtree/common:common", + "//phtree/common", "//test/testing/gtest_main", ], ) diff --git a/test/common/scripts.cmake b/test/common/scripts.cmake index 012bb4fa..bfcc0bd5 100644 --- a/test/common/scripts.cmake +++ b/test/common/scripts.cmake @@ -3,7 +3,7 @@ macro(package_add_test TESTNAME) add_executable(${TESTNAME} ${ARGN}) # link the Google test infrastructure, mocking library, and a default main function to # the test executable. Remove g_test_main if writing your own main function. - target_link_libraries(${TESTNAME} gtest gmock gtest_main) + target_link_libraries(${TESTNAME} GTest::gtest_main) target_include_directories(${TESTNAME} PRIVATE ${PROJECT_SOURCE_DIR}/..) # gtest_discover_tests replaces gtest_add_tests, # see https://cmake.org/cmake/help/v3.10/module/GoogleTest.html for more options to pass to it @@ -14,3 +14,20 @@ macro(package_add_test TESTNAME) ) set_target_properties(${TESTNAME} PROPERTIES FOLDER test) endmacro() + +macro(package_add_test_main TESTNAME) + # create an executable in which the tests will be stored + add_executable(${TESTNAME} ${ARGN}) + # link the Google test infrastructure, mocking library, and a default main function to + # the test executable. Remove g_test_main if writing your own main function. + target_link_libraries(${TESTNAME} gtest gmock) + target_include_directories(${TESTNAME} PRIVATE ${PROJECT_SOURCE_DIR}/..) + # gtest_discover_tests replaces gtest_add_tests, + # see https://cmake.org/cmake/help/v3.10/module/GoogleTest.html for more options to pass to it + gtest_discover_tests(${TESTNAME} + # set a working directory so your project root so that you can find test data via paths relative to the project root + WORKING_DIRECTORY ${PROJECT_DIR} + PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${PROJECT_DIR}" + ) + set_target_properties(${TESTNAME} PROPERTIES FOLDER test) +endmacro() \ No newline at end of file diff --git a/test/phtree_box_d_test.cc b/test/phtree_box_d_test.cc index cf4c3955..ad1782d0 100644 --- a/test/phtree_box_d_test.cc +++ b/test/phtree_box_d_test.cc @@ -21,6 +21,8 @@ using namespace improbable::phtree; +namespace phtree_box_d_test { + class DoubleRng { public: DoubleRng(double minIncl, double maxExcl) : eng(), rnd{minIncl, maxExcl} {} @@ -752,3 +754,5 @@ TEST(PhTreeBoxDTest, SmokeTestTreeAPI) { PhTreeBoxD<3, const Id> treeConst; treeConst.emplace(PhBoxD<3>({1, 2, 3}, {4, 5, 6}), Id(1)); } + +} // namespace phtree_box_d_test diff --git a/test/phtree_box_d_test_filter.cc b/test/phtree_box_d_test_filter.cc index 93fac118..f457421f 100644 --- a/test/phtree_box_d_test_filter.cc +++ b/test/phtree_box_d_test_filter.cc @@ -21,6 +21,8 @@ using namespace improbable::phtree; +namespace phtree_box_d_test_filter { + template using TestKey = PhBoxD; @@ -629,4 +631,6 @@ TEST(PhTreeBoxDFilterTest, TestAABBQuery) { Query0<3>(&testAABBQuery<3>); QueryManyAABB<3>(&testAABBQuery<3>); QueryAll<3>(&testAABBQuery<3>); -} \ No newline at end of file +} + +} // namespace phtree_box_d_test_filter diff --git a/test/phtree_box_d_test_query_types.cc b/test/phtree_box_d_test_query_types.cc index fea0cd99..84a77a83 100644 --- a/test/phtree_box_d_test_query_types.cc +++ b/test/phtree_box_d_test_query_types.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_box_d_test_query_types { + template using TestPoint = PhBoxD; @@ -60,3 +62,5 @@ TEST(PhTreeBoxDTestQueryTypes, SmokeTestQuery) { q3++; ASSERT_EQ(q3, tree.end()); } + +} // namespace phtree_box_d_test_query_types diff --git a/test/phtree_box_f_test.cc b/test/phtree_box_f_test.cc index c8546528..e95f8334 100644 --- a/test/phtree_box_f_test.cc +++ b/test/phtree_box_f_test.cc @@ -21,6 +21,8 @@ using namespace improbable::phtree; +namespace phtree_box_f_test { + template using TestPoint = PhBoxF; @@ -756,3 +758,5 @@ TEST(PhTreeBoxFTest, SmokeTestTreeAPI) { PhTreeBoxF<3, const Id> treeConst; treeConst.emplace(TestPoint<3>({1, 2, 3}, {4, 5, 6}), Id(1)); } + +} // namespace phtree_box_f_test diff --git a/test/phtree_d_test.cc b/test/phtree_d_test.cc index 8894c6fd..c18d559d 100644 --- a/test/phtree_d_test.cc +++ b/test/phtree_d_test.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_d_test { + template using TestPoint = PhPointD; @@ -1152,3 +1154,5 @@ TEST(PhTreeDTest, SmokeTestTreeAPI) { PhTreeD<3, const Id> treeConst; treeConst.emplace(PhPointD<3>{1, 2, 3}, Id(1)); } + +} // namespace phtree_d_test diff --git a/test/phtree_d_test_copy_move.cc b/test/phtree_d_test_copy_move.cc index c20fcf68..506bc66b 100644 --- a/test/phtree_d_test_copy_move.cc +++ b/test/phtree_d_test_copy_move.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_d_test_copy_move { + template using TestPoint = PhPointD; @@ -296,3 +298,5 @@ TEST(PhTreeDTestCopyMove, SmokeTestBasicOpsCopyFails) { SmokeTestBasicOpsMoveOnly<20, IdCopyOrMove>(100); SmokeTestBasicOpsMoveOnly<63, IdCopyOrMove>(100); } + +} // namespace phtree_d_test_copy_move diff --git a/test/phtree_d_test_custom_key.cc b/test/phtree_d_test_custom_key.cc index 914b66f5..4a22f54d 100644 --- a/test/phtree_d_test_custom_key.cc +++ b/test/phtree_d_test_custom_key.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_d_test_custom_key { + static const double MY_MULTIPLIER = 1000000.; /* @@ -215,3 +217,5 @@ void SmokeTestBasicOps() { TEST(PhTreeDTestCustomKey, SmokeTestBasicOps) { SmokeTestBasicOps<3>(); } + +} // namespace phtree_d_test_custom_key diff --git a/test/phtree_d_test_filter.cc b/test/phtree_d_test_filter.cc index 551e343b..86ef2c3f 100644 --- a/test/phtree_d_test_filter.cc +++ b/test/phtree_d_test_filter.cc @@ -21,6 +21,8 @@ using namespace improbable::phtree; +namespace phtree_d_test_filter { + template using TestPoint = PhPointD; @@ -240,8 +242,7 @@ struct CallbackConst { } }; -[[maybe_unused]] -static void print_id_counters() { +[[maybe_unused]] static void print_id_counters() { std::cout << "dc=" << f_default_construct_ << " c=" << f_construct_ << " cc=" << f_copy_construct_ << " mc=" << f_move_construct_ << " ca=" << f_copy_assign_ << " ma=" << f_move_assign_ << " d=" << f_destruct_ @@ -479,3 +480,5 @@ TEST(PhTreeDFilterTest, TestSphereQueryAll) { testSphereQuery(p, 10000, 1000, n); ASSERT_EQ(1000, n); } + +} // namespace phtree_d_test_filter diff --git a/test/phtree_d_test_preprocessor.cc b/test/phtree_d_test_preprocessor.cc index d01c891c..588a2a20 100644 --- a/test/phtree_d_test_preprocessor.cc +++ b/test/phtree_d_test_preprocessor.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_d_test_preprocessor { + template using TestPoint = PhPointD; @@ -151,3 +153,5 @@ TEST(PhTreeDTestPreprocessor, SmokeTestBasicOps) { SmokeTestBasicOps<10>(); SmokeTestBasicOps<20>(); } + +} // namespace phtree_d_test_preprocessor diff --git a/test/phtree_f_test.cc b/test/phtree_f_test.cc index c7d593c9..42d6f5dd 100644 --- a/test/phtree_f_test.cc +++ b/test/phtree_f_test.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_f_test { + template using TestPoint = PhPointF; @@ -993,3 +995,5 @@ TEST(PhTreeFTest, SmokeTestTreeAPI) { PhTreeF<3, const Id> treeConst; treeConst.emplace(PhPointF<3>{1, 2, 3}, Id(1)); } + +} // namespace phtree_f_test diff --git a/test/phtree_multimap_box_d_test.cc b/test/phtree_multimap_box_d_test.cc index e34d1206..386de516 100644 --- a/test/phtree_multimap_box_d_test.cc +++ b/test/phtree_multimap_box_d_test.cc @@ -21,6 +21,8 @@ using namespace improbable::phtree; +namespace phtree_multimap_box_d_test { + // Number of entries that have the same coordinate static const size_t NUM_DUPL = 4; static const double WORLD_MIN = -1000; @@ -58,16 +60,19 @@ struct Id { int _i; int data_; }; +} // namespace phtree_multimap_box_d_test namespace std { template <> -struct hash { - size_t operator()(const Id& x) const { +struct hash { + size_t operator()(const phtree_multimap_box_d_test::Id& x) const { return std::hash{}(x._i); } }; }; // namespace std +namespace phtree_multimap_box_d_test { + struct PointDistance { PointDistance(double distance, size_t id) : _distance(distance), _id(static_cast(id)) {} @@ -1012,3 +1017,5 @@ TEST(PhTreeMMBoxDTest, SmokeTestTreeAPI) { treePtr.clear(); delete idPtr; } + +} // namespace phtree_multimap_box_d_test diff --git a/test/phtree_multimap_d_test.cc b/test/phtree_multimap_d_test.cc index 001a1207..495fa1ff 100644 --- a/test/phtree_multimap_d_test.cc +++ b/test/phtree_multimap_d_test.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_multimap_d_test { + // Number of entries that have the same coordinate static const size_t NUM_DUPL = 4; static const double WORLD_MIN = -1000; @@ -57,16 +59,19 @@ struct Id { int _i; int data_; }; +} namespace std { template <> -struct hash { - size_t operator()(const Id& x) const { +struct hash { + size_t operator()(const phtree_multimap_d_test::Id& x) const { return std::hash{}(x._i); } }; }; // namespace std +namespace phtree_multimap_d_test { + struct PointDistance { PointDistance(double distance, size_t id) : _distance(distance), _id(static_cast(id)) {} @@ -1278,4 +1283,6 @@ TEST(PhTreeMMDTest, TestMovableIterators) { // Not movable due to constant fields // ASSERT_TRUE(std::is_move_assignable_v()))>); -} \ No newline at end of file +} + +} // namespace phtree_multimap_d_test diff --git a/test/phtree_multimap_d_test_copy_move.cc b/test/phtree_multimap_d_test_copy_move.cc index 49f307e9..ed3c652a 100644 --- a/test/phtree_multimap_d_test_copy_move.cc +++ b/test/phtree_multimap_d_test_copy_move.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_multimap_d_test_copy_move { + // Number of entries that have the same coordinate static const size_t NUM_DUPL = 4; static const double WORLD_MIN = -1000; @@ -102,28 +104,31 @@ struct IdCopyOrMove { size_t _i{}; int _data{}; }; +} namespace std { template <> -struct hash { - size_t operator()(const IdCopyOnly& x) const { +struct hash { + size_t operator()(const phtree_multimap_d_test_copy_move::IdCopyOnly& x) const { return std::hash{}(x._i); } }; template <> -struct hash { - size_t operator()(const IdMoveOnly& x) const { +struct hash { + size_t operator()(const phtree_multimap_d_test_copy_move::IdMoveOnly& x) const { return std::hash{}(x._i); } }; template <> -struct hash { - size_t operator()(const IdCopyOrMove& x) const { +struct hash { + size_t operator()(const phtree_multimap_d_test_copy_move::IdCopyOrMove& x) const { return std::hash{}(x._i); } }; }; // namespace std +namespace phtree_multimap_d_test_copy_move { + struct IdHash { template std::size_t operator()(std::pair const& v) const { @@ -321,3 +326,5 @@ TEST(PhTreeMMDTestCopyMove, SmokeTestBasicOpsCopyFails) { SmokeTestBasicOpsMoveOnly<20, IdCopyOrMove>(100); SmokeTestBasicOpsMoveOnly<63, IdCopyOrMove>(100); } + +} // namespace phtree_multimap_d_test_copy_move diff --git a/test/phtree_multimap_d_test_filter.cc b/test/phtree_multimap_d_test_filter.cc index 0fc5576d..89b04057 100644 --- a/test/phtree_multimap_d_test_filter.cc +++ b/test/phtree_multimap_d_test_filter.cc @@ -21,6 +21,8 @@ using namespace improbable::phtree; +namespace phtree_multimap_d_test_filter { + // Number of entries that have the same coordinate static const size_t NUM_DUPL = 4; [[maybe_unused]] static const double WORLD_MIN = -1000; @@ -61,16 +63,19 @@ struct Id { int _i; }; +} // namespace phtree_multimap_d_test_filter namespace std { template <> -struct hash { - size_t operator()(const Id& x) const { +struct hash { + size_t operator()(const phtree_multimap_d_test_filter::Id& x) const { return std::hash{}(x._i); } }; }; // namespace std +namespace phtree_multimap_d_test_filter { + struct IdHash { template std::size_t operator()(std::pair const& v) const { @@ -683,3 +688,5 @@ TEST(PhTreeMMDFilterTest, TestAABBQuery) { QueryManyAABB<3>(&testAABBQuery<3>); QueryAll<3>(&testAABBQuery<3>); } + +} // namespace phtree_multimap_d_test_filter diff --git a/test/phtree_multimap_d_test_unique_ptr_values.cc b/test/phtree_multimap_d_test_unique_ptr_values.cc index 28c31c2f..5364804a 100644 --- a/test/phtree_multimap_d_test_unique_ptr_values.cc +++ b/test/phtree_multimap_d_test_unique_ptr_values.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_multimap_d_test_unique_ptr_values { + // Number of entries that have the same coordinate static const size_t NUM_DUPL = 4; static const double WORLD_MIN = -1000; @@ -56,26 +58,33 @@ struct IdObj { }; using Id = std::unique_ptr; +} // namespace phtree_multimap_d_test_unique_ptr_values namespace std { template <> -struct hash { - size_t operator()(const Id& x) const { +struct hash { + size_t operator()(const phtree_multimap_d_test_unique_ptr_values::Id& x) const { return std::hash{}(x->_i); } }; }; // namespace std struct equal_to_content { - bool operator()(const Id& x1, const Id& x2) const { + bool operator()( + const phtree_multimap_d_test_unique_ptr_values::Id& x1, + const phtree_multimap_d_test_unique_ptr_values::Id& x2) const { return (*x1) == (*x2); } }; struct less_content { - bool operator()(const Id& x1, const Id& x2) const { + bool operator()( + const phtree_multimap_d_test_unique_ptr_values::Id& x1, + const phtree_multimap_d_test_unique_ptr_values::Id& x2) const { return (*x1)._i < (*x2)._i; } }; +namespace phtree_multimap_d_test_unique_ptr_values { + template using TestTree = PhTreeMultiMap< DIM, @@ -375,3 +384,5 @@ TEST(PhTreeMMDTestUniquePtr, TestUpdateWithRelocateIfCornerCases) { ASSERT_EQ(0u, tree.relocate_if(point0, point1, TWO)); PhTreeDebugHelper::CheckConsistency(tree); } + +} // namespace phtree_multimap_d_test_unique_ptr_values diff --git a/test/phtree_test.cc b/test/phtree_test.cc index 51a1d5b5..88aa2b40 100644 --- a/test/phtree_test.cc +++ b/test/phtree_test.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_test { + template using TestPoint = PhPoint; @@ -1360,4 +1362,6 @@ TEST(PhTreeTest, TestMovableIterators) { // Not movable due to constant fields // ASSERT_TRUE(std::is_move_assignable_v()))>); -} \ No newline at end of file +} + +} // namespace phtree_test diff --git a/test/phtree_test_const_values.cc b/test/phtree_test_const_values.cc index bcce72bc..64dd432d 100644 --- a/test/phtree_test_const_values.cc +++ b/test/phtree_test_const_values.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_test_const_values { + template using TestPoint = PhPoint; @@ -697,3 +699,5 @@ TEST(PhTreeTestConst, TestKnnQuery) { ASSERT_EQ(Nq, n); } } + +} // namespace phtree_test_const_values diff --git a/test/phtree_test_issues.cc b/test/phtree_test_issues.cc index a8f2b9d8..de9c67f1 100644 --- a/test/phtree_test_issues.cc +++ b/test/phtree_test_issues.cc @@ -22,28 +22,28 @@ using namespace improbable::phtree; - using namespace std; +namespace phtree_test_issues { + #if defined(__clang__) || defined(__GNUC__) -void mem_usage(double &vm_usage, double &resident_set) { +void mem_usage(double& vm_usage, double& resident_set) { vm_usage = 0.0; resident_set = 0.0; - ifstream stat_stream("/proc/self/stat", ios_base::in); //get info from proc directory - //create some variables to get info + ifstream stat_stream("/proc/self/stat", ios_base::in); // get info from proc directory + // create some variables to get info string pid, comm, state, ppid, pgrp, session, tty_nr; string tpgid, flags, minflt, cminflt, majflt, cmajflt; string utime, stime, cutime, cstime, priority, nice; string O, itrealvalue, starttime; unsigned long vsize; long rss; - stat_stream >> pid >> comm >> state >> ppid >> pgrp >> session >> tty_nr - >> tpgid >> flags >> minflt >> cminflt >> majflt >> cmajflt - >> utime >> stime >> cutime >> cstime >> priority >> nice - >> O >> itrealvalue >> starttime >> vsize >> rss; // don't care about the rest + stat_stream >> pid >> comm >> state >> ppid >> pgrp >> session >> tty_nr >> tpgid >> flags >> + minflt >> cminflt >> majflt >> cmajflt >> utime >> stime >> cutime >> cstime >> priority >> + nice >> O >> itrealvalue >> starttime >> vsize >> rss; // don't care about the rest stat_stream.close(); - long page_size_kb = sysconf(_SC_PAGE_SIZE) / 1024; // for x86-64 is configured to use 2MB pages + long page_size_kb = sysconf(_SC_PAGE_SIZE) / 1024; // for x86-64 is configured to use 2MB pages vm_usage = vsize / 1024.0; resident_set = rss * page_size_kb; } @@ -57,7 +57,8 @@ int get_resident_mem_kb() { void print_mem() { double vm, rss; mem_usage(vm, rss); - cout << " Virtual Memory: " << vm << " KB" << std::endl << " Resident set size: " << rss << " KB" << endl; + cout << " Virtual Memory: " << vm << " KB" << std::endl + << " Resident set size: " << rss << " KB" << endl; } #elif defined(_MSC_VER) @@ -67,8 +68,9 @@ int get_resident_mem_kb() { void print_mem() { double vm = 0, rss = 0; - //mem_usage(vm, rss); - cout << " Virtual Memory: " << vm << " KB" << std::endl << " Resident set size: " << rss << " KB" << endl; + // mem_usage(vm, rss); + cout << " Virtual Memory: " << vm << " KB" << std::endl + << " Resident set size: " << rss << " KB" << endl; } #endif @@ -76,17 +78,18 @@ auto start_timer() { return std::chrono::steady_clock::now(); } -template -void end_timer(T start, const char *prefix) { +template +void end_timer(T start, const char* prefix) { auto end = std::chrono::steady_clock::now(); std::chrono::duration elapsed_seconds1 = end - start; - std::cout << "elapsed time " << prefix << " = " << elapsed_seconds1.count() << " s" << std::endl; + std::cout << "elapsed time " << prefix << " = " << elapsed_seconds1.count() << " s" + << std::endl; } // Disabled for cmake CI builds because it always fails #if !defined(SKIP_TEST_MEMORY_LEAKS) TEST(PhTreeTestIssues, TestIssue60) { - //auto tree = PhTreeMultiMapD<2, int>(); + // auto tree = PhTreeMultiMapD<2, int>(); auto tree = PhTreeMultiMapD<2, int, ConverterIEEE<2>, std::set>(); std::vector> vecPos; int dim = 1000; @@ -94,18 +97,19 @@ TEST(PhTreeTestIssues, TestIssue60) { auto start1 = start_timer(); for (int i = 0; i < num; ++i) { - PhPointD<2> p = {(double) (rand() % dim), (double) (rand() % dim)}; + PhPointD<2> p = {(double)(rand() % dim), (double)(rand() % dim)}; vecPos.push_back(p); tree.emplace(p, i); } end_timer(start1, "1"); // "warm up": relocate() will inevitably allocate a little bit of memory (new nodes etc). - // This warm up allocates this memory before we proceed to leak testing which ensures that the memory does not grow. + // This warm up allocates this memory before we proceed to leak testing which ensures that the + // memory does not grow. for (int j = 0; j < 100; ++j) { for (int i = 0; i < num; ++i) { - PhPointD<2> &p = vecPos[i]; - PhPointD<2> newp = {(double) (rand() % dim), (double) (rand() % dim)}; + PhPointD<2>& p = vecPos[i]; + PhPointD<2> newp = {(double)(rand() % dim), (double)(rand() % dim)}; tree.relocate(p, newp, i); p = newp; } @@ -117,8 +121,8 @@ TEST(PhTreeTestIssues, TestIssue60) { auto mem_start_2 = get_resident_mem_kb(); for (int j = 0; j < 100; ++j) { for (int i = 0; i < num; ++i) { - PhPointD<2> &p = vecPos[i]; - PhPointD<2> newp = {(double) (rand() % dim), (double) (rand() % dim)}; + PhPointD<2>& p = vecPos[i]; + PhPointD<2> newp = {(double)(rand() % dim), (double)(rand() % dim)}; tree.relocate(p, newp, i); p = newp; } @@ -134,7 +138,7 @@ TEST(PhTreeTestIssues, TestIssue60) { // Disabled for cmake CI builds because it always fails #if !defined(SKIP_TEST_MEMORY_LEAKS) TEST(PhTreeTestIssues, TestIssue60_minimal) { - //auto tree = PhTreeMultiMapD<2, int>(); + // auto tree = PhTreeMultiMapD<2, int>(); auto tree = PhTreeMultiMapD<2, int, ConverterIEEE<2>, std::set>(); std::vector> vecPos; int dim = 1000; @@ -142,18 +146,19 @@ TEST(PhTreeTestIssues, TestIssue60_minimal) { auto start1 = start_timer(); for (int i = 0; i < num; ++i) { - PhPointD<2> p = {(double) (rand() % dim), (double) (rand() % dim)}; + PhPointD<2> p = {(double)(rand() % dim), (double)(rand() % dim)}; vecPos.push_back(p); tree.emplace(p, i); } end_timer(start1, "1"); // "warm up": relocate() will inevitably allocate a little bit of memory (new nodes etc). - // This warm up allocates this memory before we proceed to leak testing which ensures that the memory does not grow. + // This warm up allocates this memory before we proceed to leak testing which ensures that the + // memory does not grow. for (int j = 0; j < 100; ++j) { for (int i = 0; i < num; ++i) { - PhPointD<2> &p = vecPos[i]; - PhPointD<2> newp = {(double) (rand() % dim), (double) (rand() % dim)}; + PhPointD<2>& p = vecPos[i]; + PhPointD<2> newp = {(double)(rand() % dim), (double)(rand() % dim)}; tree.relocate(p, newp, i); p = newp; } @@ -165,7 +170,7 @@ TEST(PhTreeTestIssues, TestIssue60_minimal) { auto mem_start_2 = get_resident_mem_kb(); for (int j = 0; j < 100; ++j) { for (int i = 0; i < num; ++i) { - PhPointD<2> &p = vecPos[i]; + PhPointD<2>& p = vecPos[i]; PhPointD<2> newp = {p[0] + 1, p[1] + 1}; tree.relocate(p, newp, i); p = newp; @@ -186,7 +191,7 @@ TEST(PhTreeTestIssues, TestIssue6_3_MAP) { int num = 100000; for (int i = 0; i < num; ++i) { - PhPointD<2> p = {(double) (rand() % dim), (double) (rand() % dim)}; + PhPointD<2> p = {(double)(rand() % dim), (double)(rand() % dim)}; vecPos.push_back(p); tree.emplace(p, i); } @@ -194,10 +199,10 @@ TEST(PhTreeTestIssues, TestIssue6_3_MAP) { print_mem(); for (int i = 0; i < num; ++i) { PhPointD<2> p = vecPos[i]; - PhPointD<2> newp = {(double) (rand() % dim), (double) (rand() % dim)}; + PhPointD<2> newp = {(double)(rand() % dim), (double)(rand() % dim)}; tree.relocate(p, newp); } print_mem(); } - +} // namespace phtree_test_issues diff --git a/test/phtree_test_ptr_values.cc b/test/phtree_test_ptr_values.cc index 9ab74cf3..6368b477 100644 --- a/test/phtree_test_ptr_values.cc +++ b/test/phtree_test_ptr_values.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_test_ptr_values { + template using TestPoint = PhPoint; @@ -780,3 +782,5 @@ TEST(PhTreeTestPtr, TestKnnQuery) { } depopulate(values); } + +} // namespace phtree_test_ptr_values diff --git a/test/phtree_test_unique_ptr_values.cc b/test/phtree_test_unique_ptr_values.cc index 6a790304..1be2bc0a 100644 --- a/test/phtree_test_unique_ptr_values.cc +++ b/test/phtree_test_unique_ptr_values.cc @@ -20,6 +20,8 @@ using namespace improbable::phtree; +namespace phtree_test_unique_ptr_values { + template using TestPoint = PhPoint; @@ -295,3 +297,5 @@ TEST(PhTreeTestUniquePtr, TestUpdateWithRelocateIf) { ASSERT_EQ(1, (*tree.find(points[1]))->_i); ASSERT_EQ(1u, tree.size()); } + +} // namespace phtree_test_unique_ptr_values diff --git a/tools/bazel b/tools/bazel deleted file mode 100755 index 03324532..00000000 --- a/tools/bazel +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env bash - -TOOLS_DIR="$(dirname "$0")" - -source "${TOOLS_DIR}"/../ci/includes/os.sh -source "${TOOLS_DIR}"/../ci/includes/bazel.sh - -# All information required for the script to select or, if necessary, install bazel is contained -# in this code block. -# If a higher version of bazel is required, update `REQUIRED_BAZEL_VERSION` and the -# `REQUIRED_BAZEL_SHA256` values for each platform. -REQUIRED_BAZEL_VERSION="$(getBazelVersion)" -BAZEL_INSTALLATION_DIR="${HOME}/.bazel_installations/${REQUIRED_BAZEL_VERSION}" -if isLinux; then - DOWNLOAD_CMD="wget -q --no-clobber -O bazel" - BAZEL_EXE="bazel-${REQUIRED_BAZEL_VERSION}-linux-x86_64" - - if which clang-10 1>/dev/null; then - # We follow the symlink of clang-10 here to avoid a bug with the LLVM package when combined with -no-canonical-prefixes. - export CC="$(readlink -f "$(which clang-10)")" - else - echo -e "\033[0;33mWarning: You don't seem to have clang-9 correctly installed. Please check README.md to ensure your compiler is set up correctly. Continuing with whatever compiler bazel detects, your mileage might vary.\033[0m" - fi -elif isMacOS; then - DOWNLOAD_CMD="wget -q --no-clobber -O bazel" - BAZEL_EXE="bazel-${REQUIRED_BAZEL_VERSION}-darwin-x86_64" -else - DOWNLOAD_CMD="curl -L -s -o bazel.exe" - # Windows does not have an installer but retrieves the executable directly. - BAZEL_EXE="bazel-${REQUIRED_BAZEL_VERSION}-windows-x86_64.exe" - - export BAZEL_VC="C:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools\VC" - if [[ ! -d "$BAZEL_VC" ]]; then - export BAZEL_VC="C:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\VC" - fi - if [[ ! -d "$BAZEL_VC" ]]; then - echo -e "\033[0;33mWarning: You don't seem to have Visual Studio 2019 installed correctly. Continuing with whatever compiler bazel detects, your mileage might vary.\033[0m" - fi -fi - -BAZEL_TARGET_PATH="${BAZEL_INSTALLATION_DIR}/bin/bazel" - -# Check if correct version is already installed. -if [[ -f "${BAZEL_TARGET_PATH}" ]]; then - if [[ ! -x "${BAZEL_TARGET_PATH}" ]]; then - echo "ERROR: Bazel executable at '${BAZEL_TARGET_PATH}' does not have execute permission" - stat "${BAZEL_TARGET_PATH}" - exit 1 - fi - BAZEL_SUBCOMMAND="$1" - shift - exec -a "$0" "${BAZEL_TARGET_PATH}" "$BAZEL_SUBCOMMAND" "$@" -fi - -cat << EOM -================================================= -Bazel version ${REQUIRED_BAZEL_VERSION} is not -installed under ~/.bazel_installations - -Installing bazel ${REQUIRED_BAZEL_VERSION} now... -================================================= -EOM - -# Create root directory if needed. -if [[ ! -d "${BAZEL_INSTALLATION_DIR}" ]]; then - echo "Installation directory created." - mkdir -p "${BAZEL_INSTALLATION_DIR}" -fi - -# Install correct bazel version. -# If we don't have a local Bazel install at this point we need to retrieve the right version from GitHub. -mkdir -p "${BAZEL_INSTALLATION_DIR}/bin/tmp" -pushd "${BAZEL_INSTALLATION_DIR}/bin/tmp" -rm bazel 2>/dev/null || true # Remove bazel binary if already present in tmp dir - indicates previous failed download. -echo "Starting download of bazel ${REQUIRED_BAZEL_VERSION}..." -${DOWNLOAD_CMD} "https://github.com/bazelbuild/bazel/releases/download/${REQUIRED_BAZEL_VERSION}/${BAZEL_EXE}" -echo "Download finished." -# Mark downloaded file executable and move out of tmp directory. -chmod a+x "bazel" -mv bazel .. -popd - -echo "Executing downloaded bazel..." -BAZEL_SUBCOMMAND="$1" -shift -exec -a "$0" "${BAZEL_TARGET_PATH}" "$BAZEL_SUBCOMMAND" "$@" diff --git a/tools/build_rules/BUILD b/tools/build_rules/BUILD deleted file mode 100644 index e69de29b..00000000 diff --git a/tools/build_rules/http.bzl b/tools/build_rules/http.bzl deleted file mode 100644 index 26e5ba2f..00000000 --- a/tools/build_rules/http.bzl +++ /dev/null @@ -1,463 +0,0 @@ -# Copyright 2016 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Rules for downloading files and archives over HTTP. -### Setup -To use these rules, load them in your `WORKSPACE` file as follows: -```python -load( - "@bazel_tools//tools/build_defs/repo:http.bzl", - "http_archive", - "http_file", - "http_jar", -) -``` -These rules are improved versions of the native http rules and will eventually -replace the native rules. -""" - -load( - ":utils.bzl", - "patch", - "read_netrc", - "update_attrs", - "use_netrc", - "workspace_and_buildfile", -) - -# Shared between http_jar, http_file and http_archive. -_AUTH_PATTERN_DOC = """An optional dict mapping host names to custom authorization patterns. -If a URL's host name is present in this dict the value will be used as a pattern when -generating the authorization header for the http request. This enables the use of custom -authorization schemes used in a lot of common cloud storage providers. -The pattern currently supports 2 tokens: <login> and -<password>, which are replaced with their equivalent value -in the netrc file for the same host name. After formatting, the result is set -as the value for the Authorization field of the HTTP request. -Example attribute and netrc for a http download to an oauth2 enabled API using a bearer token: -
-auth_patterns = {
-    "storage.cloudprovider.com": "Bearer <password>"
-}
-
-netrc: -
-machine storage.cloudprovider.com
-        password RANDOM-TOKEN
-
-The final HTTP request would have the following header: -
-Authorization: Bearer RANDOM-TOKEN
-
-""" - -def _get_auth(ctx, urls): - """Given the list of URLs obtain the correct auth dict.""" - if ctx.attr.netrc: - netrc = read_netrc(ctx, ctx.attr.netrc) - return use_netrc(netrc, urls, ctx.attr.auth_patterns) - - if "HOME" in ctx.os.environ and not ctx.os.name.startswith("windows"): - netrcfile = "%s/.netrc" % (ctx.os.environ["HOME"]) - if ctx.execute(["test", "-f", netrcfile]).return_code == 0: - netrc = read_netrc(ctx, netrcfile) - return use_netrc(netrc, urls, ctx.attr.auth_patterns) - - if "USERPROFILE" in ctx.os.environ and ctx.os.name.startswith("windows"): - netrcfile = "%s/.netrc" % (ctx.os.environ["USERPROFILE"]) - if ctx.path(netrcfile).exists: - netrc = read_netrc(ctx, netrcfile) - return use_netrc(netrc, urls, ctx.attr.auth_patterns) - - return {} - -def _http_archive_impl(ctx): - """Implementation of the http_archive rule.""" - if not ctx.attr.url and not ctx.attr.urls: - fail("At least one of url and urls must be provided") - if ctx.attr.build_file and ctx.attr.build_file_content: - fail("Only one of build_file and build_file_content can be provided.") - - all_urls = [] - if ctx.attr.urls: - all_urls = ctx.attr.urls - if ctx.attr.url: - all_urls = [ctx.attr.url] + all_urls - - auth = _get_auth(ctx, all_urls) - - download_info = ctx.download_and_extract( - all_urls, - "", - ctx.attr.sha256, - ctx.attr.type, - ctx.attr.strip_prefix, - canonical_id = ctx.attr.canonical_id, - auth = auth, - ) - workspace_and_buildfile(ctx) - patch(ctx) - - return update_attrs(ctx.attr, _http_archive_attrs.keys(), {"sha256": download_info.sha256}) - -_HTTP_FILE_BUILD = """ -package(default_visibility = ["//visibility:public"]) -filegroup( - name = "file", - srcs = ["{}"], -) -""" - -def _http_file_impl(ctx): - """Implementation of the http_file rule.""" - repo_root = ctx.path(".") - forbidden_files = [ - repo_root, - ctx.path("WORKSPACE"), - ctx.path("BUILD"), - ctx.path("BUILD.bazel"), - ctx.path("file/BUILD"), - ctx.path("file/BUILD.bazel"), - ] - downloaded_file_path = ctx.attr.downloaded_file_path - download_path = ctx.path("file/" + downloaded_file_path) - if download_path in forbidden_files or not str(download_path).startswith(str(repo_root)): - fail("'%s' cannot be used as downloaded_file_path in http_file" % ctx.attr.downloaded_file_path) - auth = _get_auth(ctx, ctx.attr.urls) - download_info = ctx.download( - ctx.attr.urls, - "file/" + downloaded_file_path, - ctx.attr.sha256, - ctx.attr.executable, - canonical_id = ctx.attr.canonical_id, - auth = auth, - ) - ctx.file("WORKSPACE", "workspace(name = \"{name}\")".format(name = ctx.name)) - ctx.file("file/BUILD", _HTTP_FILE_BUILD.format(downloaded_file_path)) - - return update_attrs(ctx.attr, _http_file_attrs.keys(), {"sha256": download_info.sha256}) - -_HTTP_JAR_BUILD = """ -load("@rules_java//java:defs.bzl", "java_import") -package(default_visibility = ["//visibility:public"]) -java_import( - name = 'jar', - jars = ['downloaded.jar'], - visibility = ['//visibility:public'], -) -filegroup( - name = 'file', - srcs = ['downloaded.jar'], - visibility = ['//visibility:public'], -) -""" - -def _http_jar_impl(ctx): - """Implementation of the http_jar rule.""" - all_urls = [] - if ctx.attr.urls: - all_urls = ctx.attr.urls - if ctx.attr.url: - all_urls = [ctx.attr.url] + all_urls - auth = _get_auth(ctx, all_urls) - download_info = ctx.download( - all_urls, - "jar/downloaded.jar", - ctx.attr.sha256, - canonical_id = ctx.attr.canonical_id, - auth = auth, - ) - ctx.file("WORKSPACE", "workspace(name = \"{name}\")".format(name = ctx.name)) - ctx.file("jar/BUILD", _HTTP_JAR_BUILD) - return update_attrs(ctx.attr, _http_jar_attrs.keys(), {"sha256": download_info.sha256}) - -_http_archive_attrs = { - "url": attr.string( - doc = - """A URL to a file that will be made available to Bazel. -This must be a file, http or https URL. Redirections are followed. -Authentication is not supported. -This parameter is to simplify the transition from the native http_archive -rule. More flexibility can be achieved by the urls parameter that allows -to specify alternative URLs to fetch from. -""", - ), - "urls": attr.string_list( - doc = - """A list of URLs to a file that will be made available to Bazel. -Each entry must be a file, http or https URL. Redirections are followed. -Authentication is not supported.""", - ), - "sha256": attr.string( - doc = """The expected SHA-256 of the file downloaded. -This must match the SHA-256 of the file downloaded. _It is a security risk -to omit the SHA-256 as remote files can change._ At best omitting this -field will make your build non-hermetic. It is optional to make development -easier but should be set before shipping.""", - ), - "netrc": attr.string( - doc = "Location of the .netrc file to use for authentication", - ), - "auth_patterns": attr.string_dict( - doc = _AUTH_PATTERN_DOC, - ), - "canonical_id": attr.string( - doc = """A canonical id of the archive downloaded. -If specified and non-empty, bazel will not take the archive from cache, -unless it was added to the cache by a request with the same canonical id. -""", - ), - "strip_prefix": attr.string( - doc = """A directory prefix to strip from the extracted files. -Many archives contain a top-level directory that contains all of the useful -files in archive. Instead of needing to specify this prefix over and over -in the `build_file`, this field can be used to strip it from all of the -extracted files. -For example, suppose you are using `foo-lib-latest.zip`, which contains the -directory `foo-lib-1.2.3/` under which there is a `WORKSPACE` file and are -`src/`, `lib/`, and `test/` directories that contain the actual code you -wish to build. Specify `strip_prefix = "foo-lib-1.2.3"` to use the -`foo-lib-1.2.3` directory as your top-level directory. -Note that if there are files outside of this directory, they will be -discarded and inaccessible (e.g., a top-level license file). This includes -files/directories that start with the prefix but are not in the directory -(e.g., `foo-lib-1.2.3.release-notes`). If the specified prefix does not -match a directory in the archive, Bazel will return an error.""", - ), - "type": attr.string( - doc = """The archive type of the downloaded file. -By default, the archive type is determined from the file extension of the -URL. If the file has no extension, you can explicitly specify one of the -following: `"zip"`, `"jar"`, `"war"`, `"tar"`, `"tar.gz"`, `"tgz"`, -`"tar.xz"`, or `tar.bz2`.""", - ), - "patches": attr.label_list( - default = [], - doc = - "A list of files that are to be applied as patches after " + - "extracting the archive. By default, it uses the Bazel-native patch implementation " + - "which doesn't support fuzz match and binary patch, but Bazel will fall back to use " + - "patch command line tool if `patch_tool` attribute is specified or there are " + - "arguments other than `-p` in `patch_args` attribute.", - ), - "patch_tool": attr.string( - default = "", - doc = "The patch(1) utility to use. If this is specified, Bazel will use the specifed " + - "patch tool instead of the Bazel-native patch implementation.", - ), - "patch_args": attr.string_list( - default = ["-p0"], - doc = - "The arguments given to the patch tool. Defaults to -p0, " + - "however -p1 will usually be needed for patches generated by " + - "git. If multiple -p arguments are specified, the last one will take effect." + - "If arguments other than -p are specified, Bazel will fall back to use patch " + - "command line tool instead of the Bazel-native patch implementation. When falling " + - "back to patch command line tool and patch_tool attribute is not specified, " + - "`patch` will be used.", - ), - "patch_cmds": attr.string_list( - default = [], - doc = "Sequence of Bash commands to be applied on Linux/Macos after patches are applied.", - ), - "patch_cmds_win": attr.string_list( - default = [], - doc = "Sequence of Powershell commands to be applied on Windows after patches are " + - "applied. If this attribute is not set, patch_cmds will be executed on Windows, " + - "which requires Bash binary to exist.", - ), - "build_file": attr.label( - allow_single_file = True, - doc = - "The file to use as the BUILD file for this repository." + - "This attribute is an absolute label (use '@//' for the main " + - "repo). The file does not need to be named BUILD, but can " + - "be (something like BUILD.new-repo-name may work well for " + - "distinguishing it from the repository's actual BUILD files. " + - "Either build_file or build_file_content can be specified, but " + - "not both.", - ), - "build_file_content": attr.string( - doc = - "The content for the BUILD file for this repository. " + - "Either build_file or build_file_content can be specified, but " + - "not both.", - ), - "workspace_file": attr.label( - doc = - "The file to use as the `WORKSPACE` file for this repository. " + - "Either `workspace_file` or `workspace_file_content` can be " + - "specified, or neither, but not both.", - ), - "workspace_file_content": attr.string( - doc = - "The content for the WORKSPACE file for this repository. " + - "Either `workspace_file` or `workspace_file_content` can be " + - "specified, or neither, but not both.", - ), -} - -http_archive = repository_rule( - implementation = _http_archive_impl, - attrs = _http_archive_attrs, - doc = - """Downloads a Bazel repository as a compressed archive file, decompresses it, -and makes its targets available for binding. -It supports the following file extensions: `"zip"`, `"jar"`, `"war"`, `"tar"`, -`"tar.gz"`, `"tgz"`, `"tar.xz"`, and `tar.bz2`. -Examples: - Suppose the current repository contains the source code for a chat program, - rooted at the directory `~/chat-app`. It needs to depend on an SSL library - which is available from http://example.com/openssl.zip. This `.zip` file - contains the following directory structure: - ``` - WORKSPACE - src/ - openssl.cc - openssl.h - ``` - In the local repository, the user creates a `openssl.BUILD` file which - contains the following target definition: - ```python - cc_library( - name = "openssl-lib", - srcs = ["src/openssl.cc"], - hdrs = ["src/openssl.h"], - ) - ``` - Targets in the `~/chat-app` repository can depend on this target if the - following lines are added to `~/chat-app/WORKSPACE`: - ```python - load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") - http_archive( - name = "my_ssl", - urls = ["http://example.com/openssl.zip"], - sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - build_file = "@//:openssl.BUILD", - ) - ``` - Then targets would specify `@my_ssl//:openssl-lib` as a dependency. -""", -) - -_http_file_attrs = { - "executable": attr.bool( - doc = "If the downloaded file should be made executable.", - ), - "downloaded_file_path": attr.string( - default = "downloaded", - doc = "Path assigned to the file downloaded", - ), - "sha256": attr.string( - doc = """The expected SHA-256 of the file downloaded. -This must match the SHA-256 of the file downloaded. _It is a security risk -to omit the SHA-256 as remote files can change._ At best omitting this -field will make your build non-hermetic. It is optional to make development -easier but should be set before shipping.""", - ), - "canonical_id": attr.string( - doc = """A canonical id of the archive downloaded. -If specified and non-empty, bazel will not take the archive from cache, -unless it was added to the cache by a request with the same canonical id. -""", - ), - "urls": attr.string_list( - mandatory = True, - doc = """A list of URLs to a file that will be made available to Bazel. -Each entry must be a file, http or https URL. Redirections are followed. -Authentication is not supported.""", - ), - "netrc": attr.string( - doc = "Location of the .netrc file to use for authentication", - ), - "auth_patterns": attr.string_dict( - doc = _AUTH_PATTERN_DOC, - ), -} - -http_file = repository_rule( - implementation = _http_file_impl, - attrs = _http_file_attrs, - doc = - """Downloads a file from a URL and makes it available to be used as a file -group. -Examples: - Suppose you need to have a debian package for your custom rules. This package - is available from http://example.com/package.deb. Then you can add to your - WORKSPACE file: - ```python - load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") - http_file( - name = "my_deb", - urls = ["http://example.com/package.deb"], - sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - ) - ``` - Targets would specify `@my_deb//file` as a dependency to depend on this file. -""", -) - -_http_jar_attrs = { - "sha256": attr.string( - doc = "The expected SHA-256 of the file downloaded.", - ), - "canonical_id": attr.string( - doc = """A canonical id of the archive downloaded. -If specified and non-empty, bazel will not take the archive from cache, -unless it was added to the cache by a request with the same canonical id. -""", - ), - "url": attr.string( - doc = - "The URL to fetch the jar from. It must end in `.jar`.", - ), - "urls": attr.string_list( - doc = - "A list of URLS the jar can be fetched from. They have to end " + - "in `.jar`.", - ), - "netrc": attr.string( - doc = "Location of the .netrc file to use for authentication", - ), - "auth_patterns": attr.string_dict( - doc = _AUTH_PATTERN_DOC, - ), -} - -http_jar = repository_rule( - implementation = _http_jar_impl, - attrs = _http_jar_attrs, - doc = - """Downloads a jar from a URL and makes it available as java_import -Downloaded files must have a .jar extension. -Examples: - Suppose the current repository contains the source code for a chat program, rooted at the - directory `~/chat-app`. It needs to depend on an SSL library which is available from - `http://example.com/openssl-0.2.jar`. - Targets in the `~/chat-app` repository can depend on this target if the following lines are - added to `~/chat-app/WORKSPACE`: - ```python - load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_jar") - http_jar( - name = "my_ssl", - url = "http://example.com/openssl-0.2.jar", - sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - ) - ``` - Targets would specify @my_ssl//jar as a dependency to depend on this jar. - You may also reference files on the current system (localhost) by using "file:///path/to/file" - if you are on Unix-based systems. If you're on Windows, use "file:///c:/path/to/file". In both - examples, note the three slashes (`/`) -- the first two slashes belong to `file://` and the third - one belongs to the absolute path to the file. -""", -) diff --git a/tools/build_rules/utils.bzl b/tools/build_rules/utils.bzl deleted file mode 100644 index b2a70051..00000000 --- a/tools/build_rules/utils.bzl +++ /dev/null @@ -1,322 +0,0 @@ -# Copyright 2018 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utils for manipulating external repositories, once fetched. -### Setup -These utilities are intended to be used by other repository rules. They -can be loaded as follows. -```python -load( - "@bazel_tools//tools/build_defs/repo:utils.bzl", - "workspace_and_buildfile", - "patch", - "update_attrs", -) -``` -""" - -def workspace_and_buildfile(ctx): - """Utility function for writing WORKSPACE and, if requested, a BUILD file. - This rule is intended to be used in the implementation function of a - repository rule. - It assumes the parameters `name`, `build_file`, `build_file_content`, - `workspace_file`, and `workspace_file_content` to be - present in `ctx.attr`; the latter four possibly with value None. - Args: - ctx: The repository context of the repository rule calling this utility - function. - """ - if ctx.attr.build_file and ctx.attr.build_file_content: - ctx.fail("Only one of build_file and build_file_content can be provided.") - - if ctx.attr.workspace_file and ctx.attr.workspace_file_content: - ctx.fail("Only one of workspace_file and workspace_file_content can be provided.") - - if ctx.attr.workspace_file: - ctx.file("WORKSPACE", ctx.read(ctx.attr.workspace_file)) - elif ctx.attr.workspace_file_content: - ctx.file("WORKSPACE", ctx.attr.workspace_file_content) - else: - ctx.file("WORKSPACE", "workspace(name = \"{name}\")\n".format(name = ctx.name)) - - if ctx.attr.build_file: - ctx.file("BUILD.bazel", ctx.read(ctx.attr.build_file)) - elif ctx.attr.build_file_content: - ctx.file("BUILD.bazel", ctx.attr.build_file_content) - -def _is_windows(ctx): - return ctx.os.name.lower().find("windows") != -1 - -def _use_native_patch(patch_args): - """If patch_args only contains -p options, we can use the native patch implementation.""" - for arg in patch_args: - if not arg.startswith("-p"): - return False - return True - -def patch(ctx, patches = None, patch_cmds = None, patch_cmds_win = None, patch_tool = None, patch_args = None): - """Implementation of patching an already extracted repository. - This rule is intended to be used in the implementation function of - a repository rule. If the parameters `patches`, `patch_tool`, - `patch_args`, `patch_cmds` and `patch_cmds_win` are not specified - then they are taken from `ctx.attr`. - Args: - ctx: The repository context of the repository rule calling this utility - function. - patches: The patch files to apply. List of strings, Labels, or paths. - patch_cmds: Bash commands to run for patching, passed one at a - time to bash -c. List of strings - patch_cmds_win: Powershell commands to run for patching, passed - one at a time to powershell /c. List of strings. If the - boolean value of this parameter is false, patch_cmds will be - used and this parameter will be ignored. - patch_tool: Path of the patch tool to execute for applying - patches. String. - patch_args: Arguments to pass to the patch tool. List of strings. - """ - bash_exe = ctx.os.environ["BAZEL_SH"] if "BAZEL_SH" in ctx.os.environ else "bash" - powershell_exe = ctx.os.environ["BAZEL_POWERSHELL"] if "BAZEL_POWERSHELL" in ctx.os.environ else "powershell.exe" - - if patches == None and hasattr(ctx.attr, "patches"): - patches = ctx.attr.patches - if patches == None: - patches = [] - - if patch_cmds == None and hasattr(ctx.attr, "patch_cmds"): - patch_cmds = ctx.attr.patch_cmds - if patch_cmds == None: - patch_cmds = [] - - if patch_cmds_win == None and hasattr(ctx.attr, "patch_cmds_win"): - patch_cmds_win = ctx.attr.patch_cmds_win - if patch_cmds_win == None: - patch_cmds_win = [] - - if patch_tool == None and hasattr(ctx.attr, "patch_tool"): - patch_tool = ctx.attr.patch_tool - if not patch_tool: - patch_tool = "patch" - native_patch = True - else: - native_patch = False - - if patch_args == None and hasattr(ctx.attr, "patch_args"): - patch_args = ctx.attr.patch_args - if patch_args == None: - patch_args = [] - - if len(patches) > 0 or len(patch_cmds) > 0: - ctx.report_progress("Patching repository") - - if native_patch and _use_native_patch(patch_args): - if patch_args: - strip = int(patch_args[-1][2:]) - else: - strip = 0 - for patchfile in patches: - ctx.patch(patchfile, strip) - else: - for patchfile in patches: - command = "{patchtool} {patch_args} < {patchfile}".format( - patchtool = patch_tool, - patchfile = ctx.path(patchfile), - patch_args = " ".join([ - "'%s'" % arg - for arg in patch_args - ]), - ) - st = ctx.execute([bash_exe, "-c", command]) - if st.return_code: - fail("Error applying patch %s:\n%s%s" % - (str(patchfile), st.stderr, st.stdout)) - - if _is_windows(ctx) and patch_cmds_win: - for cmd in patch_cmds_win: - st = ctx.execute([powershell_exe, "/c", cmd]) - if st.return_code: - fail("Error applying patch command %s:\n%s%s" % - (cmd, st.stdout, st.stderr)) - else: - for cmd in patch_cmds: - st = ctx.execute([bash_exe, "-c", cmd]) - if st.return_code: - fail("Error applying patch command %s:\n%s%s" % - (cmd, st.stdout, st.stderr)) - -def update_attrs(orig, keys, override): - """Utility function for altering and adding the specified attributes to a particular repository rule invocation. - This is used to make a rule reproducible. - Args: - orig: dict of actually set attributes (either explicitly or implicitly) - by a particular rule invocation - keys: complete set of attributes defined on this rule - override: dict of attributes to override or add to orig - Returns: - dict of attributes with the keys from override inserted/updated - """ - result = {} - for key in keys: - if getattr(orig, key) != None: - result[key] = getattr(orig, key) - result["name"] = orig.name - result.update(override) - return result - -def maybe(repo_rule, name, **kwargs): - """Utility function for only adding a repository if it's not already present. - This is to implement safe repositories.bzl macro documented in - https://docs.bazel.build/versions/master/skylark/deploying.html#dependencies. - Args: - repo_rule: repository rule function. - name: name of the repository to create. - **kwargs: remaining arguments that are passed to the repo_rule function. - Returns: - Nothing, defines the repository when needed as a side-effect. - """ - if not native.existing_rule(name): - repo_rule(name = name, **kwargs) - -def read_netrc(ctx, filename): - """Utility function to parse at least a basic .netrc file. - Args: - ctx: The repository context of the repository rule calling this utility - function. - filename: the name of the .netrc file to read - Returns: - dict mapping a machine names to a dict with the information provided - about them - """ - contents = ctx.read(filename) - - # Parse the file. This is mainly a token-based update of a simple state - # machine, but we need to keep the line structure to correctly determine - # the end of a `macdef` command. - netrc = {} - currentmachinename = None - currentmachine = {} - macdef = None - currentmacro = "" - cmd = None - for line in contents.splitlines(): - if line.startswith("#"): - # Comments start with #. Ignore these lines. - continue - elif macdef: - # as we're in a macro, just determine if we reached the end. - if line: - currentmacro += line + "\n" - else: - # reached end of macro, add it - currentmachine[macdef] = currentmacro - macdef = None - currentmacro = "" - else: - # Essentially line.split(None) which starlark does not support. - tokens = [ - w.strip() - for w in line.split(" ") - if len(w.strip()) > 0 - ] - for token in tokens: - if cmd: - # we have a command that expects another argument - if cmd == "machine": - # a new machine definition was provided, so save the - # old one, if present - if not currentmachinename == None: - netrc[currentmachinename] = currentmachine - currentmachine = {} - currentmachinename = token - elif cmd == "macdef": - macdef = "macdef %s" % (token,) - # a new macro definition; the documentation says - # "its contents begin with the next .netrc line [...]", - # so should there really be tokens left in the current - # line, they're not part of the macro. - - else: - currentmachine[cmd] = token - cmd = None - elif token in [ - "machine", - "login", - "password", - "account", - "macdef", - ]: - # command takes one argument - cmd = token - elif token == "default": - # defines the default machine; again, store old machine - if not currentmachinename == None: - netrc[currentmachinename] = currentmachine - - # We use the empty string for the default machine, as that - # can never be a valid hostname ("default" could be, in the - # default search domain). - currentmachinename = "" - currentmachine = {} - else: - fail("Unexpected token '%s' while reading %s" % - (token, filename)) - if not currentmachinename == None: - netrc[currentmachinename] = currentmachine - return netrc - -def use_netrc(netrc, urls, patterns): - """Compute an auth dict from a parsed netrc file and a list of URLs. - Args: - netrc: a netrc file already parsed to a dict, e.g., as obtained from - read_netrc - urls: a list of URLs. - patterns: optional dict of url to authorization patterns - Returns: - dict suitable as auth argument for ctx.download; more precisely, the dict - will map all URLs where the netrc file provides login and password to a - dict containing the corresponding login, password and optional authorization pattern, - as well as the mapping of "type" to "basic" or "pattern". - """ - auth = {} - for url in urls: - schemerest = url.split("://", 1) - if len(schemerest) < 2: - continue - if not (schemerest[0] in ["http", "https"]): - # For other protocols, bazel currently does not support - # authentication. So ignore them. - continue - host = schemerest[1].split("/")[0].split(":")[0] - if not host in netrc: - continue - authforhost = netrc[host] - if host in patterns: - auth_dict = { - "type": "pattern", - "pattern": patterns[host], - } - - if "login" in authforhost: - auth_dict["login"] = authforhost["login"] - - if "password" in authforhost: - auth_dict["password"] = authforhost["password"] - - auth[url] = auth_dict - elif "login" in authforhost and "password" in authforhost: - auth[url] = { - "type": "basic", - "login": authforhost["login"], - "password": authforhost["password"], - } - - return auth