diff --git a/.bazelci/postsubmit.yml b/.bazelci/postsubmit.yml index 44f84e030323a6..9a8be2a69ffb2c 100644 --- a/.bazelci/postsubmit.yml +++ b/.bazelci/postsubmit.yml @@ -157,6 +157,7 @@ tasks: - build - test macos: + shards: 20 shell_commands: - sed -i.bak -e 's/^# android_sdk_repository/android_sdk_repository/' -e 's/^# android_ndk_repository/android_ndk_repository/' WORKSPACE.bzlmod @@ -173,7 +174,7 @@ tasks: test_flags: - "--config=ci-macos" # Fine tune the number of test jobs running in parallel to avoid timeout - - "--local_test_jobs=8" + - "--local_test_jobs=2" test_targets: - "//scripts/..." - "//src/main/starlark/tests/builtins_bzl/..." @@ -200,15 +201,21 @@ tasks: - "-//src/test/shell/bazel:jdeps_test" # https://github.com/bazelbuild/bazel/issues/21495 - "-//src/test/shell/bazel:srcs_test" - # Macs can't find python, so these fail: https://github.com/bazelbuild/bazel/issues/18776 - - "-//src/test/shell/bazel/android:android_instrumentation_test_integration_test" - - "-//src/test/shell/bazel/android:android_instrumentation_test_integration_test_with_head_android_tools" - - "-//src/test/shell/bazel/android:aapt_integration_test" - - "-//src/test/shell/bazel/android:aapt_integration_test_with_head_android_tools" + # Disable android tests since we are moving Android rules out of Bazel repo. + - "-//src/test/shell/bazel/android/..." + - "-//src/tools/android/java/com/google/devtools/build/android/..." + - "-//src/test/java/com/google/devtools/build/android/dexer:AllTests" + # ServerTests frequently runs into deadlocks on Intel Macs + - "-//src/test/java/com/google/devtools/build/lib/server:ServerTests" + # bazel_proto_library_test is timeout flaky on Intel Macs, which usually means a runtime of 2 hours or more + - "-//src/test/shell/bazel:bazel_proto_library_test" + # Takes too long on Intel VMs + - "-//src/test/shell/bazel:bazel_bootstrap_distfile_test" include_json_profile: - build - test macos_arm64: + shards: 5 shell_commands: - sed -i.bak -e 's/^# android_sdk_repository/android_sdk_repository/' -e 's/^# android_ndk_repository/android_ndk_repository/' WORKSPACE.bzlmod @@ -238,7 +245,7 @@ tasks: - "//tools/bash/..." # https://github.com/bazelbuild/bazel/issues/17410 - "-//src/test/java/com/google/devtools/build/lib/platform:SystemMemoryPressureEventTest" - # https://github.com/bazelbuild/bazel/issues/16521 & https://github.com/bazelbuild/bazel/issues/18776 + # Disable android tests since we are moving Android rules out of Bazel repo. - "-//src/test/shell/bazel/android/..." - "-//src/tools/android/java/com/google/devtools/build/android/..." - "-//src/test/java/com/google/devtools/build/android/dexer:AllTests" diff --git a/.bazelci/presubmit.yml b/.bazelci/presubmit.yml index 8d1af114adb3df..7ca87d910e6615 100644 --- a/.bazelci/presubmit.yml +++ b/.bazelci/presubmit.yml @@ -163,7 +163,7 @@ tasks: - build - test macos: - shards: 4 + shards: 20 shell_commands: - sed -i.bak -e 's/^# android_sdk_repository/android_sdk_repository/' -e 's/^# android_ndk_repository/android_ndk_repository/' WORKSPACE.bzlmod @@ -180,7 +180,7 @@ tasks: test_flags: - "--config=ci-macos" # Fine tune the number of test jobs running in parallel to avoid timeout - - "--local_test_jobs=8" + - "--local_test_jobs=2" test_targets: - "//scripts/..." - "//src/main/starlark/tests/builtins_bzl/..." @@ -208,18 +208,14 @@ tasks: - "-//src/test/shell/bazel:jdeps_test" # https://github.com/bazelbuild/bazel/issues/21495 - "-//src/test/shell/bazel:srcs_test" - # Macs can't find python, so these fail: https://github.com/bazelbuild/bazel/issues/18776 - - "-//src/test/shell/bazel/android:android_instrumentation_test_integration_test" - - "-//src/test/shell/bazel/android:android_instrumentation_test_integration_test_with_head_android_tools" - - "-//src/test/shell/bazel/android:aapt_integration_test" - - "-//src/test/shell/bazel/android:aapt_integration_test_with_head_android_tools" + # Disable android tests since we are moving Android rules them out of Bazel repo. + - "-//src/test/shell/bazel/android/..." + - "-//src/tools/android/java/com/google/devtools/build/android/..." + - "-//src/test/java/com/google/devtools/build/android/dexer:AllTests" # Disable the top 50 most time-consuming tests on macOS Intel platform in presubmit. # Those tests are still covered in postsubmit and by macOS arm64 platform in presubmit. # To run any of the following test in presubmit, just comment out the corresponding line. - # TODO(pcloudy): Disable the android tests after enabling them on Apple Silicon platform. - "-//src/test/shell/bazel:bazel_bootstrap_distfile_test" - # - "-//src/test/shell/bazel/android:android_integration_test" - # - "-//src/test/shell/bazel/android:android_integration_test_with_head_android_tools" - "-//src/test/shell/bazel:bazel_proto_library_test" - "-//src/test/py/bazel:runfiles_test" - "-//src/test/shell/integration:loading_phase_tests" @@ -227,7 +223,6 @@ tasks: - "-//src/test/shell/integration:target_compatible_with_test" - "-//src/test/shell/integration:bazel_json_worker_test" - "-//src/test/shell/bazel:bazel_coverage_java_jdk21_toolchain_head_test" - # - "-//src/test/shell/bazel/android:aar_integration_test" - "-//src/test/shell/bazel:bazel_coverage_java_test" - "-//src/test/shell/bazel:bazel_java_test_jdk21_toolchain_head" - "-//src/test/shell/bazel:starlark_git_repository_test" @@ -244,11 +239,7 @@ tasks: - "-//src/test/shell/integration:sandboxing_test" - "-//src/test/shell/bazel:bazel_java_test" - "-//src/test/py/bazel:bzlmod_query_test" - # - "-//src/test/shell/bazel/android:desugarer_integration_test" - # - "-//src/test/shell/bazel/android:desugarer_integration_test_with_head_android_tools" - "-//src/test/py/bazel:bazel_external_repository_test" - # - "-//src/test/shell/bazel/android:resource_processing_integration_test" - # - "-//src/test/shell/bazel/android:aar_integration_test_with_head_android_tools" - "-//src/tools/singlejar:zip64_test" - "-//src/test/py/bazel:launcher_test" - "-//src/test/shell/integration:bazel_worker_test" @@ -258,14 +249,20 @@ tasks: - "-//src/test/shell/bazel:path_mapping_test" - "-//src/test/shell/integration:toolchain_test" - "-//src/test/shell/integration:execution_phase_tests" - # - "-//src/test/shell/bazel/android:resource_processing_integration_test_with_head_android_tools" - "-//src/test/shell/integration:aquery_test" - "-//src/test/py/bazel:mod_command_test" + # ServerTests frequently runs into deadlocks on Intel Macs + - "-//src/test/java/com/google/devtools/build/lib/server:ServerTests" + # bazel_proto_library_test is timeout flaky on Intel Macs, which usually means a runtime of 2 hours or more + - "-//src/test/shell/bazel:bazel_proto_library_test" + # Takes too long on Intel VMs + - "-//src/test/shell/bazel:bazel_bootstrap_distfile_test" + - "-//src/test/shell/bazel:bazel_bootstrap_distfile_tar_test" include_json_profile: - build - test macos_arm64: - shards: 2 + shards: 5 shell_commands: - sed -i.bak -e 's/^# android_sdk_repository/android_sdk_repository/' -e 's/^# android_ndk_repository/android_ndk_repository/' WORKSPACE.bzlmod @@ -296,7 +293,7 @@ tasks: - "//tools/bash/..." # https://github.com/bazelbuild/bazel/issues/17410 - "-//src/test/java/com/google/devtools/build/lib/platform:SystemMemoryPressureEventTest" - # https://github.com/bazelbuild/bazel/issues/16521 & https://github.com/bazelbuild/bazel/issues/18776 + # Disable android tests since we are moving Android rules out of Bazel repo. - "-//src/test/shell/bazel/android/..." - "-//src/tools/android/java/com/google/devtools/build/android/..." - "-//src/test/java/com/google/devtools/build/android/dexer:AllTests" diff --git a/.gitattributes b/.gitattributes index e2836a93745d1f..aa333751d1b86b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,6 +2,13 @@ BUILD linguist-language=Python site/* linguist-documentation +# Github excludes files in 'build' directory from their search by default. +# This negates the exclusion. +# +# For more information, see: +# https://docs.github.com/en/search-github/searching-on-github/finding-files-on-github#customizing-excluded-files +**/build/** linguist-generated=false + # Files that should not use CRLF line endings, even on Windows. tools/genrule/genrule-setup.sh -text diff --git a/.github/ISSUE_TEMPLATE/incompatible_change.yml b/.github/ISSUE_TEMPLATE/incompatible_change.yml new file mode 100644 index 00000000000000..46ca69ad371609 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/incompatible_change.yml @@ -0,0 +1,92 @@ +name: Incompatible Change +description: Use this template to track the rollout of an incompatible change in Bazel +title: '[Incompatible Flag] OR [Incompatible Change Title]' +labels: ["untriaged", "incompatible-change"] +assignees: + - sgowroji + - iancha1992 + - satyanandak +body: + - type: markdown + attributes: + value: > + **Attention:** + Please read https://bazel.build/release/backward-compatibility + and https://bazel.build/contribute/breaking-changes for best practices of rolling out + incompatible changes in Bazel. + + - type: textarea + id: motivation + attributes: + label: Motivation + description: Describe the motivation behind this change. Why is this change needed? What problem does it solve? + validations: + required: true + + - type: textarea + id: description + attributes: + label: Description + description: Provide a clear and concise description of the incompatible change. Explain what the incompatible change is and what impact it will have. + validations: + required: true + + - type: input + id: incompatible_flag + attributes: + label: Incompatible Flag + description: The incompatible flag used to guard this change. If no incompatible flag is used, briefly explain why (e.g. minor incompatible change that's trivial to migrate for). + validations: + required: true + + - type: textarea + id: migration + attributes: + label: Migration Guide + description: Provide guidance on how users can migrate to the new behavior. Include examples, code snippets, or links to relevant documentation. + validations: + required: true + + - type: dropdown + id: bazel_version + attributes: + label: In which Bazel LTS version will this incompatible change be enabled? + multiple: false + options: + - Bazel 8 + - Bazel 9 + - Bazel 10 + + - type: textarea + id: additional_context + attributes: + label: Additional Context + description: Add any other context about the incompatible change here. + + - type: textarea + id: todo_list + attributes: + label: TODO List + description: > + Choose a todo list based on whether incompatible flag is used, feel free to edit based on your needs. + + + With incompatible flag (highly recommend for incompatible changes having large impact): + + \- \[ \] Implement the incompatible change at Bazel HEAD and guard it behind a flag. + + \- \[ \] Backport the incompatible flag to the latest LTS release. + + \- \[ \] Test the incompatible change with [downstream projects](https://github.com/bazelbuild/continuous-integration/blob/master/docs/downstream-testing.md#bazel-downstream-testing) and inform broken projects. + + \- \[ \] Flip the incompatible flag at Bazel HEAD. + + \- \[ \] Delete the incompatible flag and the old behavior at Bazel HEAD. + + + Without incompatible flag (good for minor incompatible changes that are trivial to migrate for): + + \- \[ \] Implement the incompatible change at Bazel HEAD. + + \- \[ \] Check the [downstream pipeline](https://github.com/bazelbuild/continuous-integration/blob/master/docs/downstream-testing.md#bazel-downstream-testing) result and inform broken projects. + diff --git a/.github/workflows/cherry-picker.yml b/.github/workflows/cherry-picker.yml index 39ac0d991e7f11..c31f00a28a8d81 100644 --- a/.github/workflows/cherry-picker.yml +++ b/.github/workflows/cherry-picker.yml @@ -18,19 +18,19 @@ jobs: runs-on: ubuntu-latest steps: - name: Harden Runner - uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 + uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 with: egress-policy: audit - if: github.event.pull_request name: Run cherrypicker on closed PR - uses: bazelbuild/continuous-integration/actions/cherry_picker@fcd3cdd216acf5b78449ae6c05da1c789f5ef76c + uses: bazelbuild/continuous-integration/actions/cherry_picker@ee5ea988681e086afabfe6677eef2dc1292f6b9d with: triggered-on: closed pr-number: ${{ github.event.number }} is-prod: True - if: github.event.issue name: Run cherrypicker on closed issue - uses: bazelbuild/continuous-integration/actions/cherry_picker@fcd3cdd216acf5b78449ae6c05da1c789f5ef76c + uses: bazelbuild/continuous-integration/actions/cherry_picker@ee5ea988681e086afabfe6677eef2dc1292f6b9d with: triggered-on: closed pr-number: ${{ github.event.issue.number }} @@ -40,12 +40,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Harden Runner - uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 + uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 with: egress-policy: audit - if: startsWith(github.event.issue.body, 'Forked from') name: Run cherrypicker on comment - uses: bazelbuild/continuous-integration/actions/cherry_picker@fcd3cdd216acf5b78449ae6c05da1c789f5ef76c + uses: bazelbuild/continuous-integration/actions/cherry_picker@ee5ea988681e086afabfe6677eef2dc1292f6b9d with: triggered-on: commented pr-number: ${{ github.event.issue.body }} @@ -54,7 +54,7 @@ jobs: is-prod: True - if: startsWith(github.event.issue.body, '### Commit IDs') name: Run cherrypicker on demand - uses: bazelbuild/continuous-integration/actions/cherry_picker@fcd3cdd216acf5b78449ae6c05da1c789f5ef76c + uses: bazelbuild/continuous-integration/actions/cherry_picker@ee5ea988681e086afabfe6677eef2dc1292f6b9d with: triggered-on: ondemand milestone-title: ${{ github.event.milestone.title }} diff --git a/.github/workflows/issue-labeler.yml b/.github/workflows/issue-labeler.yml index 43064c2129453c..bb32c8287b2124 100644 --- a/.github/workflows/issue-labeler.yml +++ b/.github/workflows/issue-labeler.yml @@ -17,9 +17,9 @@ jobs: template: [ bug_report.yml, feature_request.yml ] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Harden Runner - uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 # v2.8.0 + uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1 with: egress-policy: audit diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index 4486c72928dd62..26ad422b327206 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Harden Runner - uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 # v2.8.0 + uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1 with: egress-policy: audit diff --git a/.github/workflows/release-helper.yml b/.github/workflows/release-helper.yml index abe600120da9ea..82f9a681da13ed 100644 --- a/.github/workflows/release-helper.yml +++ b/.github/workflows/release-helper.yml @@ -13,11 +13,11 @@ jobs: issues: write steps: - name: Harden Runner - uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 # v2.8.0 + uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1 with: egress-policy: audit - name: Run helper - uses: bazelbuild/continuous-integration/actions/release-helper@fcd3cdd216acf5b78449ae6c05da1c789f5ef76c # master + uses: bazelbuild/continuous-integration/actions/release-helper@ee5ea988681e086afabfe6677eef2dc1292f6b9d # master with: token: ${{ secrets.BAZEL_IO_TOKEN }} diff --git a/.github/workflows/remove-labels.yml b/.github/workflows/remove-labels.yml index e4fffc1ba702c5..c8dd206d6a2d86 100644 --- a/.github/workflows/remove-labels.yml +++ b/.github/workflows/remove-labels.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Harden Runner - uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 # v2.8.0 + uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1 with: egress-policy: audit diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 2589042f24139d..5f8ee821603d1c 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -32,12 +32,12 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 # v2.8.0 + uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1 with: egress-policy: audit - name: "Checkout code" - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: persist-credentials: false @@ -72,6 +72,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@f079b8493333aace61c81488f8bd40919487bd9f # v3.25.7 + uses: github/codeql-action/upload-sarif@b611370bb5703a7efb587f9d136a52ea24c5c38c # v3.25.11 with: sarif_file: results.sarif diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index a2e31ce60b9364..e12d0265bc77f0 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@f086349bfa2bd1361f7909c78558e816508cdc10 # v2.8.0 + uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1 with: egress-policy: audit diff --git a/.github/workflows/update-lockfiles.yml b/.github/workflows/update-lockfiles.yml new file mode 100644 index 00000000000000..3a61fbfee79112 --- /dev/null +++ b/.github/workflows/update-lockfiles.yml @@ -0,0 +1,29 @@ +name: update-lockfiles + +on: + pull_request_target: + branches: + - "release-**" + types: + - closed + +permissions: + contents: read + +env: + GH_TOKEN: ${{ secrets.BAZEL_IO_TOKEN }} + +jobs: + update-lockfiles: + runs-on: ubuntu-latest + steps: + - name: Harden Runner + uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 + with: + egress-policy: audit + - name: Update lockfile(s) on closed PR + uses: bazelbuild/continuous-integration/actions/update-lockfile@122ce87694d0dd505a019321a04f8e64378bddbd + with: + release-branch: ${{ github.base_ref }} + is-prod: True + pr-number: ${{ github.event.number }} \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e81b5618d216e..506364b6b20a39 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,31 @@ +## Release 8.0.0-pre.20240701.1 (2024-07-12) + +``` +Baseline: 3f93d377d036d773fd505a18e084425a00fb94ea +``` + +Important changes: + + - Experimental support for path mapping `CppCompile` actions can be + enabled via + `--modify_execution_info=CppCompile=+supports-path-mapping`. + +This release contains contributions from many people at Google, as well as detailyang, Fabian Meumertzheim, Greg Roodt, hvd, Jason Schroeder, Laurent Le Brun, Simon Mavi Stewart, Xdng Yng. + +## Release 8.0.0-pre.20240618.2 (2024-07-02) + +``` +Baseline: 0c2ed165335327a2c5b534312324baa9ae6b6ebd + +Cherry picks: + + + 8d625289052b7d67fedbd50b1eb611deda4ea14a: + Automated rollback of commit + 17f6894346d2e200030dc08d131bf79a31a7c70c. +``` + +Initial release. + ## Release 7.2.1 (2024-06-25) ``` diff --git a/MODULE.bazel b/MODULE.bazel index 9b66e5d037853f..147739eeeeafaf 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -24,7 +24,7 @@ bazel_dep(name = "blake3", version = "1.5.1.bcr.1") bazel_dep(name = "sqlite3", version = "3.42.0.bcr.1") bazel_dep(name = "zlib", version = "1.3") bazel_dep(name = "rules_cc", version = "0.0.9") -bazel_dep(name = "rules_java", version = "7.6.5") +bazel_dep(name = "rules_java", version = "7.7.0") bazel_dep(name = "rules_graalvm", version = "0.11.1") bazel_dep(name = "rules_proto", version = "6.0.0") bazel_dep(name = "rules_jvm_external", version = "6.0") diff --git a/MODULE.bazel.lock b/MODULE.bazel.lock index 90ebaff035bf2a..3144a9468da894 100644 --- a/MODULE.bazel.lock +++ b/MODULE.bazel.lock @@ -86,8 +86,8 @@ "https://bcr.bazel.build/modules/rules_java/6.4.0/MODULE.bazel": "e986a9fe25aeaa84ac17ca093ef13a4637f6107375f64667a15999f77db6c8f6", "https://bcr.bazel.build/modules/rules_java/7.3.2/MODULE.bazel": "50dece891cfdf1741ea230d001aa9c14398062f2b7c066470accace78e412bc2", "https://bcr.bazel.build/modules/rules_java/7.6.1/MODULE.bazel": "2f14b7e8a1aa2f67ae92bc69d1ec0fa8d9f827c4e17ff5e5f02e91caa3b2d0fe", - "https://bcr.bazel.build/modules/rules_java/7.6.5/MODULE.bazel": "481164be5e02e4cab6e77a36927683263be56b7e36fef918b458d7a8a1ebadb1", - "https://bcr.bazel.build/modules/rules_java/7.6.5/source.json": "a805b889531d1690e3c72a7a7e47a870d00323186a9904b36af83aa3d053ee8d", + "https://bcr.bazel.build/modules/rules_java/7.7.0/MODULE.bazel": "93a198f955574cdbb0e0b089faa3d3a76b6bfffc553c13bd8a96e162e179ac87", + "https://bcr.bazel.build/modules/rules_java/7.7.0/source.json": "529d8a0496f2fb3ca5d120011e66fb17bdc09017d95bed893768c98ff48c8bba", "https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel": "a56b85e418c83eb1839819f0b515c431010160383306d13ec21959ac412d2fe7", "https://bcr.bazel.build/modules/rules_jvm_external/5.3/MODULE.bazel": "bf93870767689637164657731849fb887ad086739bd5d360d90007a581d5527d", "https://bcr.bazel.build/modules/rules_jvm_external/6.0/MODULE.bazel": "37c93a5a78d32e895d52f86a8d0416176e915daabd029ccb5594db422e87c495", diff --git a/site/en/docs/android-ndk.md b/site/en/docs/android-ndk.md index f386ff8ebeadcd..c3313c457fc393 100644 --- a/site/en/docs/android-ndk.md +++ b/site/en/docs/android-ndk.md @@ -191,7 +191,7 @@ OS constraint. To migrate the CPU constraint, check this chart: CPU Value | Platform ------------- | ------------------------------------------ -`armeabi-v7a` | `@platforms//cpu:arm` +`armeabi-v7a` | `@platforms//cpu:armv7` `arm64-v8a` | `@platforms//cpu:arm64` `x86` | `@platforms//cpu:x86_32` `x86_64` | `@platforms//cpu:x86_64` diff --git a/site/en/remote/creating.md b/site/en/remote/creating.md index 9469198f490e1e..dddf8307450671 100644 --- a/site/en/remote/creating.md +++ b/site/en/remote/creating.md @@ -70,11 +70,12 @@ The optional `sandbox_dir` field is used only by workers that support ### Work responses {:#work-responses} A `WorkResponse` contains a request id, a zero or nonzero exit code, and an -output string that describes any errors encountered in processing or executing -the request. The `output` field contains a short description; complete logs may -be written to the worker's `stderr`. Because workers may only write -`WorkResponses` to `stdout`, it's common for the worker to redirect the `stdout` -of any tools it uses to `stderr`. +output message describing any errors encountered in processing or executing +the request. A worker should capture the `stdout` and `stderr` of any tool it +calls and report them through the `WorkResponse`. Writing it to the `stdout` of +the worker process is unsafe, as it will interfere with the worker protocol. +Writing it to the `stderr` of the worker process is safe, but the result is +collected in a per-worker log file instead of ascribed to individual actions. ```json { diff --git a/site/en/versions/_project.yaml b/site/en/versions/_project.yaml deleted file mode 100644 index 8f6e5c6484092b..00000000000000 --- a/site/en/versions/_project.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Your product/API name (excluding "Google" when possible). This appears in the -# header for each page in your docset. -name: Bazel - -# An imperative sentence that says what developers can do with this product. -# This appears in the landing page header and search results. -description: > - Use the Bazel Open Source Project to scalably build and test massive, multi-language, multi-platform codebases. - -home_url: / -content_license: cc-apache -color: bazel-theme - -# Your Buganizer *component* where Googlers can file bugs about the docs. Not an -# individual bug ID. -buganizer_id: 1194991 # Dev Infrastructure > Build > Blaze > Open Source (Bazel) > Bazel Docs -keywords: -- product:Bazel diff --git a/src/MODULE.tools b/src/MODULE.tools index c8eafd40fec58e..0cd19e52d7ac6f 100644 --- a/src/MODULE.tools +++ b/src/MODULE.tools @@ -5,7 +5,7 @@ module(name = "bazel_tools") bazel_dep(name = "rules_cc", version = "0.0.9") -bazel_dep(name = "rules_java", version = "7.6.5") +bazel_dep(name = "rules_java", version = "7.7.0") bazel_dep(name = "rules_license", version = "0.0.3") bazel_dep(name = "rules_proto", version = "4.0.0") bazel_dep(name = "rules_python", version = "0.22.1") diff --git a/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/javac/plugins/dependency/StrictJavaDepsPlugin.java b/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/javac/plugins/dependency/StrictJavaDepsPlugin.java index dc3cd3da1c1961..cd1d1b81df75c9 100644 --- a/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/javac/plugins/dependency/StrictJavaDepsPlugin.java +++ b/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/javac/plugins/dependency/StrictJavaDepsPlugin.java @@ -262,7 +262,6 @@ private static class CheckingTreeScanner extends TreeScanner { private final Map> classpathOnlyDepPaths = new HashMap<>(); private final Name jspecifyAnnotationsPackage; - private final Name jspecifyNullnessPackage; public CheckingTreeScanner( DependencyModule dependencyModule, @@ -277,8 +276,7 @@ public CheckingTreeScanner( this.directDependenciesMap = dependencyModule.getExplicitDependenciesMap(); this.platformJars = platformJars; this.fileManager = fileManager; - jspecifyAnnotationsPackage = names.fromString("org.jspecify.annotations"); - jspecifyNullnessPackage = names.fromString("org.jspecify.nullness"); + this.jspecifyAnnotationsPackage = names.fromString("org.jspecify.annotations"); } Set getSeenClasses() { @@ -505,8 +503,7 @@ private NonPlatformJar getNonPlatformJar(ClassSymbol classSymbol, Set plat // Filter out classes from the system modules and bootclasspath if (path == null || platformJars.contains(path)) { // ...except the JSpecify annotations, which we treat specially. - if (classSymbol.packge().fullname.equals(jspecifyAnnotationsPackage) - || classSymbol.packge().fullname.equals(jspecifyNullnessPackage)) { + if (classSymbol.packge().fullname.equals(jspecifyAnnotationsPackage)) { Path classpathJar = findLookingOnlyInClasspath(classSymbol); return classpathJar != null ? NonPlatformJar.forClasspathJar(classpathJar) diff --git a/src/java_tools/import_deps_checker/java/com/google/devtools/build/importdeps/ClassCache.java b/src/java_tools/import_deps_checker/java/com/google/devtools/build/importdeps/ClassCache.java index dde1989f665f9e..447004b20b169a 100644 --- a/src/java_tools/import_deps_checker/java/com/google/devtools/build/importdeps/ClassCache.java +++ b/src/java_tools/import_deps_checker/java/com/google/devtools/build/importdeps/ClassCache.java @@ -357,9 +357,7 @@ private static ImmutableMap buildClassIndex( if (!name.endsWith(".class")) { return; // Not a class file. } - if (isBoot - && (name.startsWith("org/jspecify/annotations") - || name.startsWith("org/jspecify/nullness"))) { + if (isBoot && name.startsWith("org/jspecify/annotations")) { // For details on the JSpecify special case, see StrictJavaDepsPlugin. return; } diff --git a/src/main/java/com/google/devtools/build/lib/actions/Actions.java b/src/main/java/com/google/devtools/build/lib/actions/Actions.java index 3156abf8e04a8c..65c4b70f886638 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/Actions.java +++ b/src/main/java/com/google/devtools/build/lib/actions/Actions.java @@ -40,12 +40,14 @@ public final class Actions { private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); - private static final Escaper PATH_ESCAPER = Escapers.builder() - .addEscape('_', "_U") - .addEscape('/', "_S") - .addEscape('\\', "_B") - .addEscape(':', "_C") - .build(); + private static final Escaper PATH_ESCAPER = + Escapers.builder() + .addEscape('_', "_U") + .addEscape('/', "_S") + .addEscape('\\', "_B") + .addEscape(':', "_C") + .addEscape('@', "_A") + .build(); private Actions() {} @@ -371,7 +373,11 @@ public static String escapedPath(String path) { * that no other label maps to this string. */ public static String escapeLabel(Label label) { - return PATH_ESCAPER.escape(label.getPackageName() + ":" + label.getName()); + String path = label.getPackageName() + ":" + label.getName(); + if (!label.getRepository().isMain()) { + path = label.getRepository().getName() + "@" + path; + } + return PATH_ESCAPER.escape(path); } /** diff --git a/src/main/java/com/google/devtools/build/lib/actions/BUILD b/src/main/java/com/google/devtools/build/lib/actions/BUILD index 865b5b6961b2f6..66588843e33106 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/BUILD +++ b/src/main/java/com/google/devtools/build/lib/actions/BUILD @@ -471,6 +471,7 @@ java_library( ":artifacts", "//src/main/java/com/google/devtools/build/lib/skyframe:detailed_exceptions", "//src/main/java/com/google/devtools/build/lib/util:detailed_exit_code", + "//src/main/java/com/google/devtools/build/lib/vfs", "//src/main/java/com/google/devtools/build/lib/vfs:pathfragment", "//src/main/protobuf:failure_details_java_proto", "//third_party:guava", diff --git a/src/main/java/com/google/devtools/build/lib/actions/FilesetOutputSymlink.java b/src/main/java/com/google/devtools/build/lib/actions/FilesetOutputSymlink.java index 4ef74d128d8b1c..2c118ffa48f069 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/FilesetOutputSymlink.java +++ b/src/main/java/com/google/devtools/build/lib/actions/FilesetOutputSymlink.java @@ -53,11 +53,14 @@ public abstract class FilesetOutputSymlink { public abstract boolean isRelativeToExecRoot(); /** - * If this symlink points to a file inside a tree artifact, returns that file's {@linkplain - * Artifact#getParent parent} tree artifact. Otherwise, returns {@code null}. + * If this symlink points to a file inside a tree artifact, returns the exec path of that file's + * {@linkplain Artifact#getParent parent} tree artifact. Otherwise, returns {@code null}. + * + *

To simplify serialization, only the exec path is stored, not the whole {@link + * SpecialArtifact}. */ @Nullable - public abstract SpecialArtifact getEnclosingTreeArtifact(); + public abstract PathFragment getEnclosingTreeArtifactExecPath(); /** * Reconstitutes the original target path of this symlink. @@ -92,7 +95,11 @@ public static FilesetOutputSymlink createForTesting( public static FilesetOutputSymlink createAlreadyRelativizedForTesting( PathFragment name, PathFragment target, boolean isRelativeToExecRoot) { return createAlreadyRelativized( - name, target, HasDigest.EMPTY, isRelativeToExecRoot, /* enclosingTreeArtifact= */ null); + name, + target, + HasDigest.EMPTY, + isRelativeToExecRoot, + /* enclosingTreeArtifactExecPath= */ null); } /** @@ -124,8 +131,15 @@ public static FilesetOutputSymlink create( target = target.relativeTo(execRoot); isRelativeToExecRoot = true; } + PathFragment enclosingTreeArtifactExecPath; + if (enclosingTreeArtifact == null) { + enclosingTreeArtifactExecPath = null; + } else { + checkArgument(enclosingTreeArtifact.isTreeArtifact(), enclosingTreeArtifact); + enclosingTreeArtifactExecPath = enclosingTreeArtifact.getExecPath(); + } return createAlreadyRelativized( - name, target, metadata, isRelativeToExecRoot, enclosingTreeArtifact); + name, target, metadata, isRelativeToExecRoot, enclosingTreeArtifactExecPath); } /** @@ -137,11 +151,8 @@ public static FilesetOutputSymlink createAlreadyRelativized( PathFragment target, HasDigest metadata, boolean isRelativeToExecRoot, - @Nullable SpecialArtifact enclosingTreeArtifact) { - checkArgument( - enclosingTreeArtifact == null || enclosingTreeArtifact.isTreeArtifact(), - enclosingTreeArtifact); + @Nullable PathFragment enclosingTreeArtifactExecPath) { return new AutoValue_FilesetOutputSymlink( - name, target, metadata, isRelativeToExecRoot, enclosingTreeArtifact); + name, target, metadata, isRelativeToExecRoot, enclosingTreeArtifactExecPath); } } diff --git a/src/main/java/com/google/devtools/build/lib/actions/ImportantOutputHandler.java b/src/main/java/com/google/devtools/build/lib/actions/ImportantOutputHandler.java index 38dbb1098d1cbf..69ada7df5090eb 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/ImportantOutputHandler.java +++ b/src/main/java/com/google/devtools/build/lib/actions/ImportantOutputHandler.java @@ -17,7 +17,10 @@ import com.google.devtools.build.lib.server.FailureDetails.FailureDetail; import com.google.devtools.build.lib.skyframe.DetailedException; import com.google.devtools.build.lib.util.DetailedExitCode; +import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; +import java.time.Duration; +import java.util.List; import java.util.Map; /** Context to be informed of top-level outputs and their runfiles. */ @@ -68,6 +71,27 @@ ImmutableMap processRunfilesAndGetLostArtifacts( InputMetadataProvider metadataProvider) throws ImportantOutputException, InterruptedException; + /** + * Informs this handler of outputs from a completed test attempt. + * + *

The given paths are under the exec root and are backed by an {@link + * com.google.devtools.build.lib.vfs.OutputService#createActionFileSystem action filesystem} if + * applicable. + * + *

Test outputs should never be lost. Test actions are not shareable across servers (see {@link + * Actions#dependsOnBuildId}), so outputs passed to this method come from a just-executed test + * action. + */ + void processTestOutputs(List testOutputs) + throws ImportantOutputException, InterruptedException; + + /** + * A threshold to pass to {@link + * com.google.devtools.build.lib.profiler.GoogleAutoProfilerUtils#logged(String, Duration)} for + * profiling {@link ImportantOutputHandler} operations. + */ + Duration LOG_THRESHOLD = Duration.ofMillis(100); + /** Represents an exception encountered during processing of important outputs. */ final class ImportantOutputException extends Exception implements DetailedException { private final FailureDetail failureDetail; diff --git a/src/main/java/com/google/devtools/build/lib/actions/ResourceManager.java b/src/main/java/com/google/devtools/build/lib/actions/ResourceManager.java index b164bb64ad478e..71e7c36ca12eca 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/ResourceManager.java +++ b/src/main/java/com/google/devtools/build/lib/actions/ResourceManager.java @@ -488,7 +488,6 @@ void releaseResources(ResourceRequest request, @Nullable Worker worker) } } - // TODO (b/241066751) find better way to change resource ownership public void releaseResourceOwnership() { threadLocked.set(false); } diff --git a/src/main/java/com/google/devtools/build/lib/actions/cache/CompactPersistentActionCache.java b/src/main/java/com/google/devtools/build/lib/actions/cache/CompactPersistentActionCache.java index cb9ae97e453619..54bbe6471cd72f 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/cache/CompactPersistentActionCache.java +++ b/src/main/java/com/google/devtools/build/lib/actions/cache/CompactPersistentActionCache.java @@ -268,20 +268,18 @@ private static CompactPersistentActionCache logAndThrowOrRecurse( if (message != null) { e = new IOException(message, e); } - logger.atWarning().withCause(e).log("Failed to load action cache"); + logger.atWarning().withCause(e).log( + "Failed to load action cache, corrupted files to %s/*.bad", cacheRoot); reporterForInitializationErrors.handle( Event.error( "Error during action cache initialization: " + e.getMessage() - + ". Corrupted files were renamed to '" - + cacheRoot - + "/*.bad'. " - + "Bazel will now reset action cache data, potentially causing rebuilds")); + + ". Data will be reset, potentially causing target rebuilds")); if (alreadyFoundCorruption) { throw e; } return create( - cacheRoot, clock, reporterForInitializationErrors, /*alreadyFoundCorruption=*/ true); + cacheRoot, clock, reporterForInitializationErrors, /* alreadyFoundCorruption= */ true); } /** @@ -310,6 +308,7 @@ private static void renameCorruptedFiles(Path cacheRoot) { } private static final String FAILURE_PREFIX = "Failed action cache referential integrity check: "; + /** Throws IOException if indexer contains no data or integrity check has failed. */ private static void validateIntegrity(int indexerSize, byte[] validationRecord) throws IOException { @@ -546,7 +545,9 @@ private static RemoteFileArtifactValue decodeRemoteMetadata( digest, size, locationIndex, expireAtEpochMilli, materializationExecPath); } - /** @return action data encoded as a byte[] array. */ + /** + * @return action data encoded as a byte[] array. + */ private static byte[] encode(StringIndexer indexer, ActionCache.Entry entry) throws IOException { Preconditions.checkState(!entry.isCorrupted()); diff --git a/src/main/java/com/google/devtools/build/lib/analysis/AnalysisUtils.java b/src/main/java/com/google/devtools/build/lib/analysis/AnalysisUtils.java index aff6ab94683c78..f2dfa519191e4d 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/AnalysisUtils.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/AnalysisUtils.java @@ -138,16 +138,6 @@ public static Iterable f return Iterables.filter(prerequisites, target -> target.get(provider) != null); } - /** - * Returns the path of the associated manifest file for the path of a Fileset. Works for both - * exec paths and root relative paths. - */ - public static PathFragment getManifestPathFromFilesetPath(PathFragment filesetDir) { - PathFragment manifestDir = filesetDir.replaceName("_" + filesetDir.getBaseName()); - PathFragment outputManifestFrag = manifestDir.getRelative("MANIFEST"); - return outputManifestFrag; - } - /** * Returns a path fragment qualified by the rule name and unique fragment to disambiguate * artifacts produced from the source file appearing in multiple rules. diff --git a/src/main/java/com/google/devtools/build/lib/analysis/AspectResolutionHelpers.java b/src/main/java/com/google/devtools/build/lib/analysis/AspectResolutionHelpers.java index 220a9cab9fb073..b8ea56388c13df 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/AspectResolutionHelpers.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/AspectResolutionHelpers.java @@ -23,6 +23,7 @@ import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData; +import com.google.devtools.build.lib.skyframe.toolchains.UnloadedToolchainContext; import java.util.ArrayList; import javax.annotation.Nullable; @@ -48,7 +49,17 @@ private AspectResolutionHelpers() {} * #computeAspectCollection}. */ public static ImmutableList computePropagatingAspects( - DependencyKind kind, ImmutableList aspectsPath, Rule rule) { + DependencyKind kind, + ImmutableList aspectsPath, + Rule rule, + @Nullable ToolchainCollection baseTargetToolchainContext) { + if (DependencyKind.isBaseTargetToolchain(kind)) { + return computePropagatingAspectsToToolchainDep( + (DependencyKind.BaseTargetToolchainDependencyKind) kind, + aspectsPath, + baseTargetToolchainContext); + } + Attribute attribute = kind.getAttribute(); if (attribute == null) { return ImmutableList.of(); @@ -59,6 +70,42 @@ public static ImmutableList computePropagatingAspects( return aspectsBuilder.build(); } + /** + * Compute the set of aspects propagating to the given {@link BaseTargetToolchainDependencyKind} + * based on the {@code toolchains_aspects} of each aspect in the {@code aspectsPath}. + */ + private static ImmutableList computePropagatingAspectsToToolchainDep( + DependencyKind.BaseTargetToolchainDependencyKind kind, + ImmutableList aspectsPath, + @Nullable ToolchainCollection baseTargetToolchainContext) { + var toolchainContext = baseTargetToolchainContext.getToolchainContext(kind.getExecGroupName()); + var toolchainType = + toolchainContext.requestedLabelToToolchainType().get(kind.getToolchainType()); + + // Since the label of the toolchain type can be an alias, we need to get all the labels that + // point to the same toolchain type to compare them against the toolchain types that the aspects + // can propagate. + var allToolchainTypelabels = + toolchainContext.requestedLabelToToolchainType().asMultimap().inverse().get(toolchainType); + + var filteredAspectPath = new ArrayList(); + + int aspectsCount = aspectsPath.size(); + for (int i = aspectsCount - 1; i >= 0; i--) { + Aspect aspect = aspectsPath.get(i); + if (allToolchainTypelabels.stream() + .anyMatch(label -> aspect.getDefinition().canPropagateToToolchainType(label)) + || isAspectRequired(aspect, filteredAspectPath)) { + // Adds the aspect if it propagates to the toolchain type or it is + // required by an aspect already in the {@code filteredAspectPath}. + filteredAspectPath.add(aspect); + } + } + reverse(filteredAspectPath); + + return ImmutableList.copyOf(filteredAspectPath); + } + /** * Computes the way aspects should be computed for the direct dependencies. * diff --git a/src/main/java/com/google/devtools/build/lib/analysis/BUILD b/src/main/java/com/google/devtools/build/lib/analysis/BUILD index 5713b4f0334d53..e0c100c1ce7984 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/BUILD +++ b/src/main/java/com/google/devtools/build/lib/analysis/BUILD @@ -420,6 +420,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/skyframe/serialization:visible-for-serialization", "//src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec", "//src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec:serialization-constant", + "//src/main/java/com/google/devtools/build/lib/skyframe/toolchains:unloaded_toolchain_context", "//src/main/java/com/google/devtools/build/lib/starlarkbuildapi", "//src/main/java/com/google/devtools/build/lib/starlarkbuildapi/config:configuration_transition_api", "//src/main/java/com/google/devtools/build/lib/starlarkbuildapi/core", @@ -739,6 +740,7 @@ java_library( name = "dependency_kind", srcs = ["DependencyKind.java"], deps = [ + "//src/main/java/com/google/devtools/build/lib/cmdline", "//src/main/java/com/google/devtools/build/lib/packages", "//src/main/java/com/google/devtools/build/lib/packages:exec_group", "//third_party:auto_value", @@ -1355,6 +1357,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/actions:artifacts", "//src/main/java/com/google/devtools/build/lib/collect/nestedset", "//src/main/java/com/google/devtools/build/lib/events", + "//src/main/java/com/google/devtools/build/lib/profiler", "//third_party:guava", "//third_party:jsr305", ], diff --git a/src/main/java/com/google/devtools/build/lib/analysis/ConfiguredTargetFactory.java b/src/main/java/com/google/devtools/build/lib/analysis/ConfiguredTargetFactory.java index df2eb720828abd..9339b1ca412848 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/ConfiguredTargetFactory.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/ConfiguredTargetFactory.java @@ -602,6 +602,7 @@ public ConfiguredAspect createAspect( OrderedSetMultimap prerequisiteMap, ConfigConditions configConditions, @Nullable ToolchainCollection toolchainContexts, + @Nullable ToolchainCollection baseTargetToolchainContexts, @Nullable ExecGroupCollection.Builder execGroupCollectionBuilder, BuildConfigurationValue aspectConfiguration, @Nullable NestedSet transitivePackages, diff --git a/src/main/java/com/google/devtools/build/lib/analysis/DependencyKind.java b/src/main/java/com/google/devtools/build/lib/analysis/DependencyKind.java index eb5bbd3b99fb4c..2e8a1b55f972dd 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/DependencyKind.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/DependencyKind.java @@ -15,6 +15,7 @@ import com.google.auto.value.AutoValue; import com.google.common.base.Preconditions; +import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.packages.AspectClass; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.ExecGroup; @@ -76,25 +77,44 @@ public String toString() { } } - /** A dependency for a toolchain context, identified by the execution group name. */ - @AutoValue - abstract class ToolchainDependencyKind implements DependencyKind { + /** + * Represents a dependency on toolchain context whether it's the entity (target or aspect) owned + * toolchain or the base target toolchain in case of aspects. + */ + interface ToolchainDependencyKind extends DependencyKind { @Override - public Attribute getAttribute() { + public default Attribute getAttribute() { return null; } @Nullable @Override - public AspectClass getOwningAspect() { + public default AspectClass getOwningAspect() { throw new IllegalStateException(); } /** The name of the execution group represented by this dependency kind. */ - public abstract String getExecGroupName(); + public String getExecGroupName(); /** Returns true if this toolchain dependency is for the default exec group. */ - public abstract boolean isDefaultExecGroup(); + public boolean isDefaultExecGroup(); + } + + /** + * A dependency of an entity (target or aspect) on a toolchain context, identified by the + * execution group name. + */ + @AutoValue + abstract class ToolchainDependencyKindImpl implements ToolchainDependencyKind {} + + /** + * A dependency for the aspect on its target's toolchain context, used for aspects propagating to + * toolchains, identified by the execution group name and the toolchain type. + */ + @AutoValue + abstract class BaseTargetToolchainDependencyKind implements ToolchainDependencyKind { + /** The toolchain type of the toolchain dependency. */ + public abstract Label getToolchainType(); } /** Returns a {@link DependencyKind} for the given execution group. */ @@ -102,15 +122,26 @@ static DependencyKind forExecGroup(String execGroupName) { if (ExecGroup.DEFAULT_EXEC_GROUP_NAME.equals(execGroupName)) { return defaultExecGroupToolchain(); } - return new AutoValue_DependencyKind_ToolchainDependencyKind(execGroupName, false); + return new AutoValue_DependencyKind_ToolchainDependencyKindImpl(execGroupName, false); } /** Returns a {@link DependencyKind} for the default execution group. */ static DependencyKind defaultExecGroupToolchain() { - return new AutoValue_DependencyKind_ToolchainDependencyKind( + return new AutoValue_DependencyKind_ToolchainDependencyKindImpl( ExecGroup.DEFAULT_EXEC_GROUP_NAME, true); } + /** Returns a {@link DependencyKind} for the given execution group. */ + static DependencyKind forBaseTargetExecGroup(String execGroupName, Label toolchainType) { + return new AutoValue_DependencyKind_BaseTargetToolchainDependencyKind( + execGroupName, execGroupName.equals(ExecGroup.DEFAULT_EXEC_GROUP_NAME), toolchainType); + } + + /** Predicate to check if a dependency represents an aspect's base target toolchain. */ + static boolean isBaseTargetToolchain(DependencyKind dependencyKind) { + return dependencyKind instanceof BaseTargetToolchainDependencyKind; + } + /** Predicate to check if a dependency represents a toolchain. */ static boolean isToolchain(DependencyKind dependencyKind) { return dependencyKind instanceof ToolchainDependencyKind; diff --git a/src/main/java/com/google/devtools/build/lib/analysis/DependencyResolutionHelpers.java b/src/main/java/com/google/devtools/build/lib/analysis/DependencyResolutionHelpers.java index 916cf03e3a876b..7a5f7e6a1f5e9d 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/DependencyResolutionHelpers.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/DependencyResolutionHelpers.java @@ -48,6 +48,7 @@ import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.packages.Type; +import com.google.devtools.build.lib.skyframe.toolchains.UnloadedToolchainContext; import com.google.devtools.build.lib.util.OrderedSetMultimap; import java.util.HashSet; import java.util.List; @@ -91,7 +92,8 @@ public static DependencyLabels computeDependencyLabels( TargetAndConfiguration node, ImmutableList aspects, ImmutableMap configConditions, - @Nullable ToolchainCollection toolchainContexts) + @Nullable ToolchainCollection toolchainContexts, + @Nullable ToolchainCollection baseTargetUnloadedToolchainContexts) throws Failure { Target target = node.getTarget(); BuildConfigurationValue config = node.getConfiguration(); @@ -115,7 +117,13 @@ public static DependencyLabels computeDependencyLabels( } else if (target instanceof Rule rule) { fromRule = rule; attributeMap = ConfiguredAttributeMapper.of(fromRule, configConditions, config); - visitRule(node, aspects, attributeMap, toolchainContexts, outgoingLabels); + visitRule( + node, + aspects, + attributeMap, + toolchainContexts, + baseTargetUnloadedToolchainContexts, + outgoingLabels); } else if (target instanceof PackageGroup packageGroup) { outgoingLabels.putAll(VISIBILITY_DEPENDENCY, packageGroup.getIncludes()); } else { @@ -244,6 +252,7 @@ private static void visitRule( ImmutableList aspects, ConfiguredAttributeMapper attributeMap, @Nullable ToolchainCollection toolchainContexts, + @Nullable ToolchainCollection baseTargetUnloadedToolchainContexts, OrderedSetMultimap outgoingLabels) throws Failure { Preconditions.checkArgument(node.getTarget() instanceof Rule, node); @@ -302,6 +311,7 @@ private static void visitRule( } addToolchainDeps(toolchainContexts, outgoingLabels); + addBaseTargetToolchainDeps(baseTargetUnloadedToolchainContexts, outgoingLabels); } private static void addToolchainDeps( @@ -317,6 +327,29 @@ private static void addToolchainDeps( } } + private static void addBaseTargetToolchainDeps( + @Nullable ToolchainCollection toolchainContexts, + OrderedSetMultimap outgoingLabels) { + if (toolchainContexts == null) { + return; + } + for (Map.Entry execGroup : + toolchainContexts.getContextMap().entrySet()) { + for (var toolchainTypeToResolved : + execGroup.getValue().toolchainTypeToResolved().asMap().entrySet()) { + // map entries from (exec group, toolchain type) to resolved toolchain labels. We need to + // distinguish the resolved toolchains per type because aspects propagate on toolchains + // based on the types specified in `toolchains_aspects`. So even if 2 types resolved to the + // same toolchain target, their CT will be different if an aspect propagates to one type but + // not the other. + outgoingLabels.putAll( + DependencyKind.forBaseTargetExecGroup( + execGroup.getKey(), toolchainTypeToResolved.getKey().typeLabel()), + toolchainTypeToResolved.getValue()); + } + } + } + private static void resolveAttributes( Iterable attributeDependencyKinds, OrderedSetMultimap outgoingLabels, diff --git a/src/main/java/com/google/devtools/build/lib/analysis/PrerequisiteArtifacts.java b/src/main/java/com/google/devtools/build/lib/analysis/PrerequisiteArtifacts.java index ebcfaaeaf1c0c1..e347eb09651b6e 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/PrerequisiteArtifacts.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/PrerequisiteArtifacts.java @@ -61,10 +61,6 @@ static PrerequisiteArtifacts get(RuleContext ruleContext, String attributeName) return new PrerequisiteArtifacts(ruleContext, attributeName, result.build().asList()); } - public static NestedSet nestedSet(RuleContext ruleContext, String attributeName) { - return nestedSet(ruleContext.getOwningPrerequisitesCollection(attributeName), attributeName); - } - public static NestedSet nestedSet( PrerequisitesCollection prerequisitesCollection, String attributeName) { NestedSetBuilder result = NestedSetBuilder.stableOrder(); diff --git a/src/main/java/com/google/devtools/build/lib/analysis/RuleConfiguredTargetBuilder.java b/src/main/java/com/google/devtools/build/lib/analysis/RuleConfiguredTargetBuilder.java index 2fdd7e4f6b1692..1b1b69d589b317 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/RuleConfiguredTargetBuilder.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/RuleConfiguredTargetBuilder.java @@ -42,12 +42,14 @@ import com.google.devtools.build.lib.analysis.test.TestProvider.TestParams; import com.google.devtools.build.lib.analysis.test.TestTagsProvider; import com.google.devtools.build.lib.cmdline.Label; +import com.google.devtools.build.lib.cmdline.RepositoryMapping; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.packages.AllowlistChecker; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.BuildSetting; +import com.google.devtools.build.lib.packages.BuiltinRestriction; import com.google.devtools.build.lib.packages.Info; import com.google.devtools.build.lib.packages.Provider; import com.google.devtools.build.lib.packages.TargetUtils; @@ -349,7 +351,11 @@ private void propagateTransitiveValidationOutputGroups() { Label rdeLabel = ruleContext.getRule().getRuleClassObject().getRuleDefinitionEnvironmentLabel(); // only allow native and builtins to override transitive validation propagation - if (rdeLabel != null && !rdeLabel.getRepository().getName().equals("_builtins")) { + if (rdeLabel != null + && BuiltinRestriction.isNotAllowed( + rdeLabel, + RepositoryMapping.ALWAYS_FALLBACK, + BuiltinRestriction.INTERNAL_STARLARK_API_ALLOWLIST)) { ruleContext.ruleError(rdeLabel + " cannot access the _transitive_validation private API"); return; } diff --git a/src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java b/src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java index 4d308e6e809c34..33616dffae6ea3 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java @@ -1091,7 +1091,8 @@ private ResolvedToolchainContext getToolchainContext(String execGroup) { return toolchainContexts == null ? null : toolchainContexts.getToolchainContext(execGroup); } - private boolean isAutomaticExecGroup(String execGroupName) { + /** Returns true if the given exec group is an automatic exec group. */ + public boolean isAutomaticExecGroup(String execGroupName) { return !Identifier.isValid(execGroupName) && !execGroupName.equals(DEFAULT_EXEC_GROUP_NAME); } diff --git a/src/main/java/com/google/devtools/build/lib/analysis/RunfilesSupport.java b/src/main/java/com/google/devtools/build/lib/analysis/RunfilesSupport.java index cf73dadab76441..9d84183424ac7c 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/RunfilesSupport.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/RunfilesSupport.java @@ -14,8 +14,10 @@ package com.google.devtools.build.lib.analysis; +import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkState; + import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -217,7 +219,7 @@ private static RunfilesSupport create( Runfiles runfiles, CommandLine args, ActionEnvironment actionEnvironment) { - Artifact owningExecutable = Preconditions.checkNotNull(executable); + checkNotNull(executable); RunfileSymlinksMode runfileSymlinksMode = ruleContext.getConfiguration().getRunfileSymlinksMode(); boolean buildRunfileManifests = ruleContext.getConfiguration().buildRunfileManifests(); @@ -238,15 +240,15 @@ private static RunfilesSupport create( .merge(runfiles) .build(); } - Preconditions.checkState(!runfiles.isEmpty()); + checkState(!runfiles.isEmpty(), "Empty runfiles"); Artifact repoMappingManifest = - createRepoMappingManifestAction(ruleContext, runfiles, owningExecutable); + createRepoMappingManifestAction(ruleContext, runfiles, executable); Artifact runfilesInputManifest; Artifact runfilesManifest; if (buildRunfileManifests) { - runfilesInputManifest = createRunfilesInputManifestArtifact(ruleContext, owningExecutable); + runfilesInputManifest = createRunfilesInputManifestArtifact(ruleContext, executable); runfilesManifest = createRunfilesAction( ruleContext, runfiles, buildRunfileLinks, runfilesInputManifest, repoMappingManifest); @@ -255,13 +257,9 @@ private static RunfilesSupport create( runfilesManifest = null; } - PathFragment executablePath = owningExecutable.getExecPath(); - PathFragment runfilesExecPath = - executablePath.replaceName(executablePath.getBaseName() + RUNFILES_DIR_EXT); - RunfilesTreeImpl runfilesTree = new RunfilesTreeImpl( - runfilesExecPath, + runfilesDirExecPath(executable), runfiles, repoMappingManifest, buildRunfileLinks, @@ -270,14 +268,14 @@ private static RunfilesSupport create( Artifact runfilesMiddleman = createRunfilesMiddleman( - ruleContext, owningExecutable, runfilesTree, runfilesManifest, repoMappingManifest); + ruleContext, executable, runfilesTree, runfilesManifest, repoMappingManifest); return new RunfilesSupport( runfilesTree, runfilesInputManifest, runfilesManifest, runfilesMiddleman, - owningExecutable, + executable, args, actionEnvironment); } @@ -342,12 +340,9 @@ public Artifact getRunfilesInputManifest() { private static Artifact createRunfilesInputManifestArtifact( RuleContext context, Artifact owningExecutable) { - // The executable may be null for emptyRunfiles PathFragment relativePath = - (owningExecutable != null) - ? owningExecutable.getOutputDirRelativePath( - context.getConfiguration().isSiblingRepositoryLayout()) - : context.getPackageDirectory().getRelative(context.getLabel().getName()); + owningExecutable.getOutputDirRelativePath( + context.getConfiguration().isSiblingRepositoryLayout()); String basename = relativePath.getBaseName(); PathFragment inputManifestPath = relativePath.replaceName(basename + INPUT_MANIFEST_EXT); return context.getDerivedArtifact(inputManifestPath, context.getBinDirectory()); @@ -561,14 +556,33 @@ private static ActionEnvironment computeActionEnvironment(RuleContext ruleContex ImmutableMap.copyOf(fixedEnv), ImmutableSet.copyOf(inheritedEnv)); } - /** Returns the path of the input manifest of {@code runfilesDir}. */ - public static Path inputManifestPath(Path runfilesDir) { - return FileSystemUtils.replaceExtension(runfilesDir, INPUT_MANIFEST_EXT); + /** Returns the exec path of the {@code .runfiles} directory for the given executable. */ + public static PathFragment runfilesDirExecPath(Artifact executable) { + PathFragment executablePath = executable.getExecPath(); + return executablePath.replaceName(executablePath.getBaseName() + RUNFILES_DIR_EXT); } - /** Returns the path of the output manifest of {@code runfilesDir}. */ - public static Path outputManifestPath(Path runfilesDir) { - return runfilesDir.getRelative(OUTPUT_MANIFEST_BASENAME); + /** + * Returns the exec path of the corresponding {@code .runfiles_manifest} file for the given {@code + * .runfiles} directory. + * + *

The input manifest is produced by {@link SourceManifestAction} and is an input to {@link + * SymlinkTreeAction}. + */ + public static PathFragment inputManifestExecPath(PathFragment runfilesDirExecPath) { + return FileSystemUtils.replaceExtension(runfilesDirExecPath, INPUT_MANIFEST_EXT); + } + + /** + * Returns the exec path of the corresponding {@code MANIFEST} file for the given {@code + * .runfiles} directory. + * + *

The output manifest is a symlink to the {@linkplain #inputManifestExecPath input manifest}. + * It is located in the {@code .runfiles} directory and is the output of {@link + * SymlinkTreeAction}. + */ + public static PathFragment outputManifestExecPath(PathFragment runfilesDirExecPath) { + return runfilesDirExecPath.getRelative(OUTPUT_MANIFEST_BASENAME); } @Nullable diff --git a/src/main/java/com/google/devtools/build/lib/analysis/SourceManifestAction.java b/src/main/java/com/google/devtools/build/lib/analysis/SourceManifestAction.java index f359e6cb9ec27a..808d4bdd12aaa8 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/SourceManifestAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/SourceManifestAction.java @@ -68,7 +68,6 @@ public final class SourceManifestAction extends AbstractFileWriteAction * Interface for defining manifest formatting and reporting specifics. Implementations must be * immutable. */ - @VisibleForTesting interface ManifestWriter { /** diff --git a/src/main/java/com/google/devtools/build/lib/analysis/actions/AbstractFileWriteAction.java b/src/main/java/com/google/devtools/build/lib/analysis/actions/AbstractFileWriteAction.java index 90679951d02ab5..0528e6ec1ba8dd 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/actions/AbstractFileWriteAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/actions/AbstractFileWriteAction.java @@ -27,6 +27,8 @@ import com.google.devtools.build.lib.actions.SpawnResult; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.events.EventHandler; +import com.google.devtools.build.lib.profiler.Profiler; +import com.google.devtools.build.lib.profiler.SilentCloseable; import java.io.IOException; import javax.annotation.Nullable; @@ -54,7 +56,10 @@ public boolean makeExecutable() { public final ActionResult execute(ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { try { - DeterministicWriter deterministicWriter = newDeterministicWriter(actionExecutionContext); + DeterministicWriter deterministicWriter; + try (SilentCloseable c = Profiler.instance().profile("setupDeterministicWriter")) { + deterministicWriter = newDeterministicWriter(actionExecutionContext); + } FileWriteActionContext context = actionExecutionContext.getContext(FileWriteActionContext.class); ImmutableList result = diff --git a/src/main/java/com/google/devtools/build/lib/analysis/config/BuildOptions.java b/src/main/java/com/google/devtools/build/lib/analysis/config/BuildOptions.java index 0ee87396fba12d..ed4b96caebe0ac 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/config/BuildOptions.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/config/BuildOptions.java @@ -21,6 +21,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableCollection; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; @@ -94,6 +95,7 @@ public static BuildOptions of( } return builder .addStarlarkOptions(labelizeStarlarkOptions(provider.getStarlarkOptions())) + .addUserOptions(provider.getUserOptions()) .build(); } @@ -211,6 +213,14 @@ public ImmutableMap getStarlarkOptions() { return starlarkOptionsMap; } + /** + * Returns the list of options that were parsed from either a user blazerc file or the command + * line. + */ + public ImmutableList getUserOptions() { + return userOptions; + } + /** * Creates a copy of the BuildOptions object that contains copies of the FragmentOptions and * Starlark options. @@ -226,7 +236,8 @@ public BuildOptions clone() { e -> e.getValue().clone())); // Note that this assumes that starlark option values are immutable. ImmutableMap starlarkOptions = ImmutableMap.copyOf(starlarkOptionsMap); - return new BuildOptions(nativeOptions, starlarkOptions); + ImmutableList userOptions = this.userOptions; + return new BuildOptions(nativeOptions, starlarkOptions, userOptions); } @Override @@ -247,9 +258,13 @@ public int hashCode() { /** Maps options class definitions to FragmentOptions objects. */ private final ImmutableMap, FragmentOptions> fragmentOptionsMap; + /** Maps Starlark options names to Starlark options values. */ private final ImmutableMap starlarkOptionsMap; + /** The list of options that were parsed from either a user blazerc file or the command line. */ + private final ImmutableList userOptions; + // Lazily initialized both for performance and correctness - BuildOptions instances may be mutated // after construction but before consumption. Access via checksum() to ensure initialization. This // field is volatile as per https://errorprone.info/bugpattern/DoubleCheckedLocking, which @@ -258,9 +273,11 @@ public int hashCode() { private BuildOptions( ImmutableMap, FragmentOptions> fragmentOptionsMap, - ImmutableMap starlarkOptionsMap) { + ImmutableMap starlarkOptionsMap, + ImmutableList userOptions) { this.fragmentOptionsMap = fragmentOptionsMap; this.starlarkOptionsMap = starlarkOptionsMap; + this.userOptions = userOptions; } /** @@ -301,6 +318,11 @@ public BuildOptions applyParsingResult(OptionsParsingResult parsingResult) { builder.addStarlarkOption(starlarkOption.getKey(), starlarkOption.getValue()); } + // And update options set in user blazerc or command line + builder.userOptions.addAll( + parsingResult.getUserOptions() == null + ? ImmutableList.of() + : parsingResult.getUserOptions()); return builder.build(); } @@ -391,6 +413,7 @@ public Builder merge(BuildOptions options) { this.addFragmentOptions(fragment); } this.addStarlarkOptions(options.getStarlarkOptions()); + this.addUserOptions(options.getUserOptions()); return this; } @@ -447,15 +470,23 @@ public Builder removeStarlarkOption(Label key) { return this; } + @CanIgnoreReturnValue + public Builder addUserOptions(ImmutableList options) { + userOptions.addAll(options); + return this; + } + public BuildOptions build() { return new BuildOptions( ImmutableSortedMap.copyOf(fragmentOptions, LEXICAL_FRAGMENT_OPTIONS_COMPARATOR), - ImmutableSortedMap.copyOf(starlarkOptions)); + ImmutableSortedMap.copyOf(starlarkOptions), + userOptions.build()); } private final Map, FragmentOptions> fragmentOptions = new HashMap<>(); private final Map starlarkOptions = new HashMap<>(); + private final ImmutableList.Builder userOptions = new ImmutableList.Builder<>(); private Builder() {} } diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyMapProducer.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyMapProducer.java index aecb94e0dd2d65..e07782e24b3a40 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyMapProducer.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyMapProducer.java @@ -92,7 +92,11 @@ public StateMachine step(Tasks tasks) { // The list of aspects is evaluated here to be done once per attribute, rather than once per // dependency. ImmutableList aspects = - computePropagatingAspects(kind, parameters.aspects(), parameters.associatedRule()); + computePropagatingAspects( + kind, + parameters.aspects(), + parameters.associatedRule(), + parameters.baseTargetToolchainContexts()); for (var label : entry.getValue()) { tasks.enqueue( new DependencyProducer( diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyProducer.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyProducer.java index 6e98e52865ca27..7c81324808908d 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyProducer.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyProducer.java @@ -143,7 +143,8 @@ public StateMachine step(Tasks tasks) { return computePrerequisites( AttributeConfiguration.ofUnary(configurationKey), parameters.getExecutionPlatformLabel( - ((ToolchainDependencyKind) kind).getExecGroupName())); + ((ToolchainDependencyKind) kind).getExecGroupName(), + DependencyKind.isBaseTargetToolchain(kind))); } if (kind == OUTPUT_FILE_RULE_DEPENDENCY) { @@ -295,6 +296,10 @@ private boolean useBaseTargetPrerequisitesSupplier() { return false; } + if (DependencyKind.isBaseTargetToolchain(kind)) { + return true; + } + if (DependencyKind.isAttribute(kind)) { if (kind.getOwningAspect() == null) { return true; diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisiteParameters.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisiteParameters.java index 9c220753186684..eb79ff02275d58 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisiteParameters.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisiteParameters.java @@ -31,6 +31,7 @@ import com.google.devtools.build.lib.skyframe.BaseTargetPrerequisitesSupplier; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import com.google.devtools.build.lib.skyframe.config.BuildConfigurationKey; +import com.google.devtools.build.lib.skyframe.toolchains.UnloadedToolchainContext; import javax.annotation.Nullable; import net.starlark.java.syntax.Location; @@ -56,6 +57,14 @@ public final class PrerequisiteParameters { */ @Nullable private final BaseTargetPrerequisitesSupplier baseTargetPrerequisitesSupplier; + /** + * The {@link UnloadedToolchainContext}s for the base target of the aspect under evaluation. + * + *

This is only non-null during aspect evaluation if the aspects path can propagate to + * toolchains. + */ + @Nullable private final ToolchainCollection baseTargetToolchainContexts; + public PrerequisiteParameters( ConfiguredTargetKey configuredTargetKey, Target target, @@ -66,7 +75,8 @@ public PrerequisiteParameters( @Nullable ConfiguredAttributeMapper attributeMap, TransitiveDependencyState transitiveState, ExtendedEventHandler eventHandler, - @Nullable BaseTargetPrerequisitesSupplier baseTargetPrerequisitesSupplier) { + @Nullable BaseTargetPrerequisitesSupplier baseTargetPrerequisitesSupplier, + @Nullable ToolchainCollection baseTargetToolchainContexts) { this.configuredTargetKey = configuredTargetKey; this.target = target; this.aspects = ImmutableList.copyOf(aspects); @@ -77,6 +87,12 @@ public PrerequisiteParameters( this.transitiveState = transitiveState; this.eventHandler = eventHandler; this.baseTargetPrerequisitesSupplier = baseTargetPrerequisitesSupplier; + this.baseTargetToolchainContexts = baseTargetToolchainContexts; + } + + @Nullable + public ToolchainCollection baseTargetToolchainContexts() { + return baseTargetToolchainContexts; } @Nullable @@ -134,8 +150,10 @@ public BuildEventId eventId() { } @Nullable - public Label getExecutionPlatformLabel(String execGroup) { - var platform = toolchainContexts.getToolchainContext(execGroup).executionPlatform(); + public Label getExecutionPlatformLabel(String execGroup, boolean isBaseTargetToolchain) { + var context = isBaseTargetToolchain ? baseTargetToolchainContexts : toolchainContexts; + + var platform = context.getToolchainContext(execGroup).executionPlatform(); if (platform == null) { return null; } diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/UnloadedToolchainContextsProducer.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/UnloadedToolchainContextsProducer.java index bf6b4afdb1c676..a12473f12f111b 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/UnloadedToolchainContextsProducer.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/UnloadedToolchainContextsProducer.java @@ -17,6 +17,7 @@ import com.google.devtools.build.lib.analysis.ToolchainCollection; import com.google.devtools.build.lib.packages.ExecGroup; +import com.google.devtools.build.lib.skyframe.BaseTargetPrerequisitesSupplier; import com.google.devtools.build.lib.skyframe.toolchains.NoMatchingPlatformException; import com.google.devtools.build.lib.skyframe.toolchains.ToolchainContextKey; import com.google.devtools.build.lib.skyframe.toolchains.ToolchainException; @@ -26,8 +27,13 @@ import java.util.Map; import javax.annotation.Nullable; -final class UnloadedToolchainContextsProducer implements StateMachine { - interface ResultSink { +/** + * Determines {@code ToolchainCollection} from {@link + * UnloadedToolchainContextsInputs}. + */ +public final class UnloadedToolchainContextsProducer implements StateMachine { + /** Interface for accepting values produced by this class. */ + public interface ResultSink { void acceptUnloadedToolchainContexts( @Nullable ToolchainCollection unloadedToolchainContexts); @@ -37,6 +43,13 @@ void acceptUnloadedToolchainContexts( // -------------------- Input -------------------- private final UnloadedToolchainContextsInputs unloadedToolchainContextsInputs; + /** + * Cache for {@link UnloadedToolchainContext}. Not null only for aspects evaluation. + * + *

Check {@link AspectFunction#baseTargetPrerequisitesSupplier} for more details + */ + @Nullable private final BaseTargetPrerequisitesSupplier baseTargetPrerequisitesSupplier; + // -------------------- Output -------------------- private final ResultSink sink; @@ -54,10 +67,26 @@ void acceptUnloadedToolchainContexts( this.unloadedToolchainContextsInputs = unloadedToolchainContextsInputs; this.sink = sink; this.runAfter = runAfter; + this.baseTargetPrerequisitesSupplier = null; + } + + /** + * Constructor for {@link UnloadedToolchainContextsProducer} with {@code + * baseTargetPrerequisitesSupplier} used by {@link AspectFunction}. + */ + public UnloadedToolchainContextsProducer( + UnloadedToolchainContextsInputs unloadedToolchainContextsInputs, + BaseTargetPrerequisitesSupplier baseTargetPrerequisitesSupplier, + ResultSink sink, + StateMachine runAfter) { + this.unloadedToolchainContextsInputs = unloadedToolchainContextsInputs; + this.baseTargetPrerequisitesSupplier = baseTargetPrerequisitesSupplier; + this.sink = sink; + this.runAfter = runAfter; } @Override - public StateMachine step(Tasks tasks) { + public StateMachine step(Tasks tasks) throws InterruptedException { var defaultToolchainContextKey = unloadedToolchainContextsInputs.targetToolchainContextKey(); if (defaultToolchainContextKey == null) { // Doesn't use toolchain resolution and short-circuits. @@ -69,10 +98,11 @@ public StateMachine step(Tasks tasks) { ToolchainCollection.builderWithExpectedSize( unloadedToolchainContextsInputs.execGroups().size() + 1); - tasks.lookUp( + lookupToolchainContext( + baseTargetPrerequisitesSupplier, defaultToolchainContextKey, - ToolchainException.class, - new ToolchainContextLookupCallback(DEFAULT_EXEC_GROUP_NAME)); + DEFAULT_EXEC_GROUP_NAME, + tasks); var keyBuilder = ToolchainContextKey.key() @@ -82,18 +112,37 @@ public StateMachine step(Tasks tasks) { for (Map.Entry entry : unloadedToolchainContextsInputs.execGroups().entrySet()) { var execGroup = entry.getValue(); - tasks.lookUp( + var key = keyBuilder .toolchainTypes(execGroup.toolchainTypes()) .execConstraintLabels(execGroup.execCompatibleWith()) - .build(), - ToolchainException.class, - new ToolchainContextLookupCallback(entry.getKey())); + .build(); + lookupToolchainContext(baseTargetPrerequisitesSupplier, key, entry.getKey(), tasks); } return this::buildToolchainContexts; } + private void lookupToolchainContext( + @Nullable BaseTargetPrerequisitesSupplier baseTargetPrerequisitesSupplier, + ToolchainContextKey key, + String execGroupName, + Tasks tasks) + throws InterruptedException { + var toolchainContext = + baseTargetPrerequisitesSupplier == null + ? null + : baseTargetPrerequisitesSupplier.getUnloadedToolchainContext(key); + + if (toolchainContext != null) { + new ToolchainContextLookupCallback(execGroupName) + .acceptValueOrException(toolchainContext, null); + } else { + tasks.lookUp( + key, ToolchainException.class, new ToolchainContextLookupCallback(execGroupName)); + } + } + private class ToolchainContextLookupCallback implements StateMachine.ValueOrExceptionSink { private final String execGroupName; diff --git a/src/main/java/com/google/devtools/build/lib/analysis/starlark/StarlarkRuleContext.java b/src/main/java/com/google/devtools/build/lib/analysis/starlark/StarlarkRuleContext.java index 1ca3d120d26978..2b61b5fcf62bfc 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/starlark/StarlarkRuleContext.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/starlark/StarlarkRuleContext.java @@ -835,6 +835,7 @@ ImmutableSet

When building incrementally, any change to the value of the variable named by " - + "name will cause this repository to be re-fetched.", + """ + Returns the value of an environment variable name as a string if exists, \ + or default if it doesn't. \ +

When building incrementally, any change to the value of the variable named by \ + name will cause this repository to be re-fetched. + """, parameters = { @Param( name = "name", - doc = "name of desired environment variable", + doc = "Name of desired environment variable.", allowedTypes = {@ParamType(type = String.class)}), @Param( name = "default", - doc = "Default value to return if `name` is not found", + doc = "Default value to return if name is not found.", allowedTypes = {@ParamType(type = String.class), @ParamType(type = NoneType.class)}, defaultValue = "None") }, @@ -1292,12 +1326,14 @@ public String getEnvironmentValue(String name, Object defaultValue) @StarlarkMethod( name = "path", doc = - "Returns a path from a string, label or path. If the path is relative, it will resolve " - + "relative to the repository directory. If the path is a label, it will resolve to " - + "the path of the corresponding file. Note that remote repositories are executed " - + "during the analysis phase and thus cannot depends on a target result (the " - + "label should point to a non-generated file). If path is a path, it will return " - + "that path as is.", + """ + Returns a path from a string, label or path. If the path is relative, it will resolve \ + relative to the repository directory. If the path is a label, it will resolve to \ + the path of the corresponding file. Note that remote repositories are executed \ + during the analysis phase and thus cannot depends on a target result (the \ + label should point to a non-generated file). If path is a path, it will return \ + that path as is. + """, parameters = { @Param( name = "path", @@ -1306,7 +1342,9 @@ public String getEnvironmentValue(String name, Object defaultValue) @ParamType(type = Label.class), @ParamType(type = StarlarkPath.class) }, - doc = "string, label or path from which to create a path from") + doc = + "string, Label or path from which to create" + + " a path from.") }) public StarlarkPath path(Object path) throws EvalException, InterruptedException { return getPath("path()", path); @@ -1338,19 +1376,21 @@ protected StarlarkPath getPath(String method, Object path) @ParamType(type = Label.class), @ParamType(type = StarlarkPath.class) }, - doc = "path of the file to read from."), + doc = "Path of the file to read from."), @Param( name = "watch", defaultValue = "'auto'", positional = false, named = true, doc = - "whether to watch the file. Can be the string 'yes', 'no', " - + "or 'auto'. Passing 'yes' is equivalent to immediately invoking the " - + "watch() method; passing 'no' does not " - + "attempt to watch the file; passing 'auto' will only attempt to watch the " - + "file when it is legal to do so (see watch() docs for more " - + "information.") + """ + Whether to watch the file. Can be the string 'yes', 'no', \ + or 'auto'. Passing 'yes' is equivalent to immediately invoking the \ + watch() method; passing 'no' does not \ + attempt to watch the file; passing 'auto' will only attempt to watch the \ + file when it is legal to do so (see watch() docs for more \ + information. + """) }) public String readFile(Object path, String watch, StarlarkThread thread) throws RepositoryFunctionException, EvalException, InterruptedException { @@ -1483,18 +1523,20 @@ protected void maybeWatchDirents(Path path, ShouldWatch shouldWatch) @StarlarkMethod( name = "watch", doc = - "Tells Bazel to watch for changes to the given path, whether or not it exists, or " - + "whether it's a file or a directory. Any changes to the file or directory will " - + "invalidate this repository or module extension, and cause it to be refetched or " - + "re-evaluated next time.

\"Changes\" include changes to the contents of the file " - + "(if the path is a file); if the path was a file but is now a directory, or vice " - + "versa; and if the path starts or stops existing. Notably, this does not " - + "include changes to any files under the directory if the path is a directory. For " - + "that, use path.readdir() " - + "instead.

Note that attempting to watch paths inside the repo currently being " - + "fetched, or inside the working directory of the current module extension, will " - + "result in an error. A module extension attempting to watch a path outside the " - + "current Bazel workspace will also result in an error.", + """ + Tells Bazel to watch for changes to the given path, whether or not it exists, or \ + whether it's a file or a directory. Any changes to the file or directory will \ + invalidate this repository or module extension, and cause it to be refetched or \ + re-evaluated next time.

"Changes" include changes to the contents of the file \ + (if the path is a file); if the path was a file but is now a directory, or vice \ + versa; and if the path starts or stops existing. Notably, this does not \ + include changes to any files under the directory if the path is a directory. For \ + that, use path.readdir() \ + instead.

Note that attempting to watch paths inside the repo currently being \ + fetched, or inside the working directory of the current module extension, will \ + result in an error. A module extension attempting to watch a path outside the \ + current Bazel workspace will also result in an error. + """, parameters = { @Param( name = "path", @@ -1503,7 +1545,7 @@ protected void maybeWatchDirents(Path path, ShouldWatch shouldWatch) @ParamType(type = Label.class), @ParamType(type = StarlarkPath.class) }, - doc = "path of the file to watch."), + doc = "Path of the file to watch."), }) public void watchForStarlark(Object path) throws RepositoryFunctionException, EvalException, InterruptedException { @@ -1520,13 +1562,13 @@ protected static void makeDirectories(Path path) throws IOException { @StarlarkMethod( name = "report_progress", - doc = "Updates the progress status for the fetching of this repository or module extension", + doc = "Updates the progress status for the fetching of this repository or module extension.", parameters = { @Param( name = "status", defaultValue = "''", allowedTypes = {@ParamType(type = String.class)}, - doc = "string describing the current status of the fetch progress") + doc = "string describing the current status of the fetch progress.") }) public void reportProgress(String status) { env.getListener() @@ -1694,28 +1736,32 @@ private static String profileArgsDesc(String method, List args) { @StarlarkMethod( name = "execute", doc = - "Executes the command given by the list of arguments. The execution time of the command" - + " is limited by timeout (in seconds, default 600 seconds). This method" - + " returns an exec_result structure containing the output of the" - + " command. The environment map can be used to override some" - + " environment variables to be passed to the process.", + """ + Executes the command given by the list of arguments. The execution time of the command \ + is limited by timeout (in seconds, default 600 seconds). This method \ + returns an exec_result structure containing the output of the \ + command. The environment map can be used to override some \ + environment variables to be passed to the process. + """, useStarlarkThread = true, parameters = { @Param( name = "arguments", doc = - "List of arguments, the first element should be the path to the program to " - + "execute."), + """ + List of arguments, the first element should be the path to the program to \ + execute. + """), @Param( name = "timeout", named = true, defaultValue = "600", - doc = "maximum duration of the command in seconds (default is 600 seconds)."), + doc = "Maximum duration of the command in seconds (default is 600 seconds)."), @Param( name = "environment", defaultValue = "{}", named = true, - doc = "force some environment variables to be set to be passed to the process."), + doc = "Force some environment variables to be set to be passed to the process."), @Param( name = "quiet", defaultValue = "True", @@ -1726,9 +1772,11 @@ private static String profileArgsDesc(String method, List args) { defaultValue = "\"\"", named = true, doc = - "Working directory for command execution.\n" - + "Can be relative to the repository root or absolute.\n" - + "The default is the repository root."), + """ + Working directory for command execution. + Can be relative to the repository root or absolute. + The default is the repository root. + """), }) public StarlarkExecutionResult execute( Sequence arguments, // or or

NOTE: Retrieving an environment variable from this dictionary does not " - + "establish a dependency from a repository rule or module extension to the " - + "environment variable. To establish a dependency when looking up an " - + "environment variable, use either repository_ctx.getenv or " - + "module_ctx.getenv instead.") + """ + The dictionary of environment variables. \ +

NOTE: Retrieving an environment variable from this dictionary does not \ + establish a dependency from a repository rule or module extension to the \ + environment variable. To establish a dependency when looking up an \ + environment variable, use either repository_ctx.getenv or \ + module_ctx.getenv instead. + """) public ImmutableMap getEnvironmentVariables() { return environ; } @@ -60,8 +62,10 @@ public ImmutableMap getEnvironmentVariables() { name = "name", structField = true, doc = - "A string identifying the operating system Bazel is running on (the value of the" - + " \"os.name\" Java property converted to lower case).") + """ + A string identifying the operating system Bazel is running on (the value of the \ + "os.name" Java property converted to lower case). + """) public String getName() { return System.getProperty("os.name").toLowerCase(Locale.ROOT); } @@ -70,8 +74,10 @@ public String getName() { name = "arch", structField = true, doc = - "A string identifying the architecture Bazel is running on (the value of the \"os.arch\"" - + " Java property converted to lower case).") + """ + A string identifying the architecture Bazel is running on (the value of the \ + "os.arch" Java property converted to lower case). + """) public String getArch() { return System.getProperty("os.arch").toLowerCase(Locale.ROOT); } diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkPath.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkPath.java index 3e676ecfdb8d78..0fda30e10f1911 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkPath.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkPath.java @@ -84,8 +84,10 @@ public String getBasename() { @StarlarkMethod( name = "readdir", doc = - "Returns the list of entries in the directory denoted by this path. Each entry is a " - + "path object itself.", + """ + Returns the list of entries in the directory denoted by this path. Each entry is a \ + path object itself. + """, parameters = { @Param( name = "watch", @@ -93,15 +95,16 @@ public String getBasename() { positional = false, named = true, doc = - "whether Bazel should watch the list of entries in this directory and refetch the " - + "repository or re-evaluate the module extension next time when any changes " - + "are detected. Changes to detect include entry creation, deletion, and " - + "renaming. Note that this doesn't watch the contents of any entries " - + "in the directory.

Can be the string 'yes', 'no', or 'auto'. If set to " - + "'auto', Bazel will only watch this directory when it is legal to do so (see " - + "repository_ctx.watch() " - + "docs for more information)."), + """ + whether Bazel should watch the list of entries in this directory and refetch the \ + repository or re-evaluate the module extension next time when any changes \ + are detected. Changes to detect include entry creation, deletion, and \ + renaming. Note that this doesn't watch the contents of any entries \ + in the directory.

Can be the string 'yes', 'no', or 'auto'. If set to \ + 'auto', Bazel will only watch this directory when it is legal to do so (see \ + repository_ctx.watch() \ + docs for more information). + """), }) public ImmutableList readdir(String watch) throws EvalException, RepositoryFunctionException, InterruptedException { @@ -138,8 +141,10 @@ public StarlarkPath getDirname() { @Param( name = "relative_paths", doc = - "Zero or more relative path strings to append to this path with path separators" - + "added as needed.")) + """ + Zero or more relative path strings to append to this path with path separators \ + added as needed. + """)) public StarlarkPath getChild(Tuple relativePaths) throws EvalException { return new StarlarkPath( ctx, @@ -153,10 +158,12 @@ public StarlarkPath getChild(Tuple relativePaths) throws EvalException { name = "exists", structField = true, doc = - "Returns true if the file or directory denoted by this path exists.

Note that " - + "accessing this field does not cause the path to be watched. If you'd " - + "like the repo rule or module extension to be sensitive to the path's existence, " - + "use the watch() method on the context object.") + """ + Returns true if the file or directory denoted by this path exists.

Note that \ + accessing this field does not cause the path to be watched. If you'd \ + like the repo rule or module extension to be sensitive to the path's existence, \ + use the watch() method on the context object. + """) public boolean exists() { return path.exists(); } @@ -165,10 +172,12 @@ public boolean exists() { name = "is_dir", structField = true, doc = - "Returns true if this path points to a directory.

Note that accessing this field does " - + "not cause the path to be watched. If you'd like the repo rule or module " - + "extension to be sensitive to whether the path is a directory or a file, use the " - + "watch() method on the context object.") + """ + Returns true if this path points to a directory.

Note that accessing this field does \ + not cause the path to be watched. If you'd like the repo rule or module \ + extension to be sensitive to whether the path is a directory or a file, use the \ + watch() method on the context object. + """) public boolean isDir() { return path.isDirectory(); } @@ -177,8 +186,10 @@ public boolean isDir() { name = "realpath", structField = true, doc = - "Returns the canonical path for this path by repeatedly replacing all symbolic links " - + "with their referents.") + """ + Returns the canonical path for this path by repeatedly replacing all symbolic links \ + with their referents. + """) public StarlarkPath realpath() throws IOException { return new StarlarkPath(ctx, path.resolveSymbolicLinks()); } diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java index 6be53b3218e418..7baa95a878a6cc 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java @@ -69,10 +69,12 @@ name = "repository_ctx", category = DocCategory.BUILTIN, doc = - "The context of the repository rule containing" - + " helper functions and information about attributes. You get a repository_ctx object" - + " as an argument to the implementation function when you create a" - + " repository rule.") + """ + The context of the repository rule containing \ + helper functions and information about attributes. You get a repository_ctx object \ + as an argument to the implementation function when you create a \ + repository rule. + """) public class StarlarkRepositoryContext extends StarlarkBaseExternalContext { private final Rule rule; private final PathPackageLocator packageLocator; @@ -158,8 +160,10 @@ public StarlarkPath getWorkspaceRoot() { name = "attr", structField = true, doc = - "A struct to access the values of the attributes. The values are provided by " - + "the user (if not, a default value is used).") + """ + A struct to access the values of the attributes. The values are provided by \ + the user (if not, a default value is used). + """) public StructImpl getAttr() { return attrObject; } @@ -243,11 +247,13 @@ public void symlink(Object target, Object linkName, StarlarkThread thread) @StarlarkMethod( name = "template", doc = - "Generates a new file using a template. Every occurrence in " - + "template of a key of substitutions will be replaced by " - + "the corresponding value. The result is written in path. An optional" - + "executable argument (default to true) can be set to turn on or off" - + "the executable bit.", + """ + Generates a new file using a template. Every occurrence in \ + template of a key of substitutions will be replaced by \ + the corresponding value. The result is written in path. An optional \ + executable argument (default to true) can be set to turn on or off \ + the executable bit. + """, useStarlarkThread = true, parameters = { @Param( @@ -257,7 +263,7 @@ public void symlink(Object target, Object linkName, StarlarkThread thread) @ParamType(type = Label.class), @ParamType(type = StarlarkPath.class) }, - doc = "path of the file to create, relative to the repository directory."), + doc = "Path of the file to create, relative to the repository directory."), @Param( name = "template", allowedTypes = { @@ -265,29 +271,31 @@ public void symlink(Object target, Object linkName, StarlarkThread thread) @ParamType(type = Label.class), @ParamType(type = StarlarkPath.class) }, - doc = "path to the template file."), + doc = "Path to the template file."), @Param( name = "substitutions", defaultValue = "{}", named = true, - doc = "substitutions to make when expanding the template."), + doc = "Substitutions to make when expanding the template."), @Param( name = "executable", defaultValue = "True", named = true, - doc = "set the executable flag on the created file, true by default."), + doc = "Set the executable flag on the created file, true by default."), @Param( name = "watch_template", defaultValue = "'auto'", positional = false, named = true, doc = - "whether to watch the template file. Can be the string " - + "'yes', 'no', or 'auto'. Passing 'yes' is equivalent to immediately invoking " - + "the watch() method; passing 'no' does " - + "not attempt to watch the file; passing 'auto' will only attempt to watch " - + "the file when it is legal to do so (see watch() docs for more " - + "information."), + """ + Whether to watch the template file. Can be the string \ + 'yes', 'no', or 'auto'. Passing 'yes' is equivalent to immediately invoking \ + the watch() method; passing 'no' does \ + not attempt to watch the file; passing 'auto' will only attempt to watch \ + the file when it is legal to do so (see watch() docs for more \ + information. + """), }) public void createFileFromTemplate( Object path, @@ -356,16 +364,20 @@ protected ImmutableMap getRemoteExecProperties() throws EvalExce @StarlarkMethod( name = "delete", doc = - "Deletes a file or a directory. Returns a bool, indicating whether the file or directory" - + " was actually deleted by this call.", + """ + Deletes a file or a directory. Returns a bool, indicating whether the file or directory \ + was actually deleted by this call. + """, useStarlarkThread = true, parameters = { @Param( name = "path", allowedTypes = {@ParamType(type = String.class), @ParamType(type = StarlarkPath.class)}, doc = - "Path of the file to delete, relative to the repository directory, or absolute." - + " Can be a path or a string."), + """ + Path of the file to delete, relative to the repository directory, or absolute. \ + Can be a path or a string. + """), }) public boolean delete(Object pathObject, StarlarkThread thread) throws EvalException, RepositoryFunctionException, InterruptedException { @@ -386,12 +398,14 @@ public boolean delete(Object pathObject, StarlarkThread thread) @StarlarkMethod( name = "patch", doc = - "Apply a patch file to the root directory of external repository. " - + "The patch file should be a standard " - + "" - + "unified diff format file. " - + "The Bazel-native patch implementation doesn't support fuzz match and binary patch " - + "like the patch command line tool.", + """ + Apply a patch file to the root directory of external repository. \ + The patch file should be a standard \ + \ + unified diff format file. \ + The Bazel-native patch implementation doesn't support fuzz match and binary patch \ + like the patch command line tool. + """, useStarlarkThread = true, parameters = { @Param( @@ -402,25 +416,29 @@ public boolean delete(Object pathObject, StarlarkThread thread) @ParamType(type = StarlarkPath.class) }, doc = - "The patch file to apply, it can be label, relative path or absolute path. " - + "If it's a relative path, it will resolve to the repository directory."), + """ + The patch file to apply, it can be label, relative path or absolute path. \ + If it's a relative path, it will resolve to the repository directory. + """), @Param( name = "strip", named = true, defaultValue = "0", - doc = "strip the specified number of leading components from file names."), + doc = "Strip the specified number of leading components from file names."), @Param( name = "watch_patch", defaultValue = "'auto'", positional = false, named = true, doc = - "whether to watch the patch file. Can be the string " - + "'yes', 'no', or 'auto'. Passing 'yes' is equivalent to immediately invoking " - + "the watch() method; passing 'no' does " - + "not attempt to watch the file; passing 'auto' will only attempt to watch " - + "the file when it is legal to do so (see watch() docs for more " - + "information."), + """ + Whether to watch the patch file. Can be the string \ + 'yes', 'no', or 'auto'. Passing 'yes' is equivalent to immediately invoking \ + the watch() method; passing 'no' does \ + not attempt to watch the file; passing 'auto' will only attempt to watch \ + the file when it is legal to do so (see watch() docs for more \ + information. + """), }) public void patch(Object patchFile, StarlarkInt stripI, String watchPatch, StarlarkThread thread) throws EvalException, RepositoryFunctionException, InterruptedException { @@ -451,11 +469,13 @@ public void patch(Object patchFile, StarlarkInt stripI, String watchPatch, Starl @StarlarkMethod( name = "watch_tree", doc = - "Tells Bazel to watch for changes to any files or directories transitively under the " - + "given path. Any changes to the contents of files, the existence of files or " - + "directories, file names or directory names, will cause this repo to be " - + "refetched.

Note that attempting to watch paths inside the repo currently being " - + "fetched will result in an error. ", + """ + Tells Bazel to watch for changes to any files or directories transitively under the \ + given path. Any changes to the contents of files, the existence of files or \ + directories, file names or directory names, will cause this repo to be \ + refetched.

Note that attempting to watch paths inside the repo currently being \ + fetched will result in an error. + """, parameters = { @Param( name = "path", @@ -464,7 +484,7 @@ public void patch(Object patchFile, StarlarkInt stripI, String watchPatch, Starl @ParamType(type = Label.class), @ParamType(type = StarlarkPath.class) }, - doc = "path of the directory tree to watch."), + doc = "Path of the directory tree to watch."), }) public void watchTree(Object path) throws EvalException, InterruptedException, RepositoryFunctionException { diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryModule.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryModule.java index bc370372242a41..bf9c1a9910f0e5 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryModule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryModule.java @@ -137,9 +137,11 @@ public StarlarkCallable repositoryRule( name = "repository_rule", category = DocCategory.BUILTIN, doc = - "A callable value that may be invoked during evaluation of the WORKSPACE file or within" - + " the implementation function of a module extension to instantiate and return a" - + " repository rule.") + """ + A callable value that may be invoked during evaluation of the WORKSPACE file or within \ + the implementation function of a module extension to instantiate and return a \ + repository rule. + """) public static final class RepositoryRuleFunction implements StarlarkCallable, StarlarkExportable, RuleFunction { private final RuleClass.Builder builder; @@ -253,11 +255,7 @@ private Object createRuleLegacy(StarlarkThread thread, Dict kwar try { RuleClass ruleClass = builder.build(ruleClassName, ruleClassName); Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "repository rules"); - if (!pkgBuilder.isRepoRulePackage()) { - throw Starlark.errorf( - "repo rules may only be called from a WORKSPACE file or a macro loaded from there"); - } + Package.Builder.fromOrFailAllowWorkspaceOnly(thread, "repository rules"); // TODO(adonovan): is this cast safe? Check. String name = (String) kwargs.get("name"); diff --git a/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequest.java b/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequest.java index 2a8b2930241ecf..5496ae55d6d817 100644 --- a/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequest.java +++ b/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequest.java @@ -51,10 +51,9 @@ import javax.annotation.Nullable; /** - * A BuildRequest represents a single invocation of the build tool by a user. - * A request specifies a list of targets to be built for a single - * configuration, a pair of output/error streams, and additional options such - * as --keep_going, --jobs, etc. + * A BuildRequest represents a single invocation of the build tool by a user. A request specifies a + * list of targets to be built for a single configuration, a pair of output/error streams, and + * additional options such as --keep_going, --jobs, etc. */ public class BuildRequest implements OptionsProvider { public static final String VALIDATION_ASPECT_NAME = "ValidateTarget"; @@ -189,10 +188,7 @@ public BuildRequest build() { /** A human-readable description of all the non-default option settings. */ private final String optionsDescription; - /** - * The name of the Blaze command that the user invoked. - * Used for --announce. - */ + /** The name of the Blaze command that the user invoked. Used for --announce. */ private final String commandName; private final OutErr outErr; @@ -205,6 +201,7 @@ public BuildRequest build() { private final boolean runTests; private final boolean checkForActionConflicts; private final boolean reportIncompatibleTargets; + private final ImmutableList userOptions; private BuildRequest( String commandName, @@ -224,6 +221,8 @@ private BuildRequest( this.targets = targets; this.id = id; this.startTimeMillis = startTimeMillis; + this.userOptions = + options.getUserOptions() == null ? ImmutableList.of() : options.getUserOptions(); this.optionsCache = Caffeine.newBuilder() .build( @@ -278,15 +277,20 @@ public Map getExplicitStarlarkOptions( } /** - * Returns a unique identifier that universally identifies this build. + * Returns the list of options that were parsed from either a user blazerc file or the command + * line. */ + @Override + public ImmutableList getUserOptions() { + return userOptions; + } + + /** Returns a unique identifier that universally identifies this build. */ public UUID getId() { return id; } - /** - * Returns the name of the Blaze command that the user invoked. - */ + /** Returns the name of the Blaze command that the user invoked. */ public String getCommandName() { return commandName; } @@ -295,24 +299,19 @@ boolean isRunningInEmacs() { return runningInEmacs; } - /** - * Returns true if tests should be run by the build tool. - */ + /** Returns true if tests should be run by the build tool. */ public boolean shouldRunTests() { return runTests; } - /** - * Returns the (immutable) list of targets to build in commandline - * form. - */ + /** Returns the (immutable) list of targets to build in commandline form. */ public List getTargets() { return targets; } /** - * Returns the output/error streams to which errors and progress messages - * should be sent during the fulfillment of this request. + * Returns the output/error streams to which errors and progress messages should be sent during + * the fulfillment of this request. */ public OutErr getOutErr() { return outErr; @@ -324,10 +323,7 @@ public T getOptions(Class clazz) { return (T) optionsCache.get(clazz).orNull(); } - - /** - * Returns the set of command-line options specified for this request. - */ + /** Returns the set of command-line options specified for this request. */ public BuildRequestOptions getBuildOptions() { return getOptions(BuildRequestOptions.class); } @@ -337,17 +333,12 @@ public PackageOptions getPackageOptions() { return getOptions(PackageOptions.class); } - /** - * Returns the set of options related to the loading phase. - */ + /** Returns the set of options related to the loading phase. */ public LoadingOptions getLoadingOptions() { return getOptions(LoadingOptions.class); } - /** - * Returns the set of command-line options related to the view specified for - * this request. - */ + /** Returns the set of command-line options related to the view specified for this request. */ public AnalysisOptions getViewOptions() { return getOptions(AnalysisOptions.class); } @@ -361,24 +352,20 @@ public boolean getKeepGoing() { int getLoadingPhaseThreadCount() { return getOptions(LoadingPhaseThreadsOption.class).threads; } - /** - * Returns the set of execution options specified for this request. - */ + + /** Returns the set of execution options specified for this request. */ public ExecutionOptions getExecutionOptions() { return getOptions(ExecutionOptions.class); } - /** - * Returns the human-readable description of the non-default options - * for this build request. - */ + /** Returns the human-readable description of the non-default options for this build request. */ public String getOptionsDescription() { return optionsDescription; } /** - * Return the time (according to System.currentTimeMillis()) at which the - * service of this request was started. + * Return the time (according to System.currentTimeMillis()) at which the service of this request + * was started. */ public long getStartTime() { return startTimeMillis; @@ -403,8 +390,10 @@ public List validateOptions() { int jobs = getBuildOptions().jobs; if (localTestJobs > jobs) { warnings.add( - String.format("High value for --local_test_jobs: %d. This exceeds the value for --jobs: " - + "%d. Only up to %d local tests will run concurrently.", localTestJobs, jobs, jobs)); + String.format( + "High value for --local_test_jobs: %d. This exceeds the value for --jobs: " + + "%d. Only up to %d local tests will run concurrently.", + localTestJobs, jobs, jobs)); } // Validate other BuildRequest options. @@ -423,7 +412,7 @@ public TopLevelArtifactContext getTopLevelArtifactContext() { getOptions(BuildEventProtocolOptions.class).expandFilesets, getOptions(BuildEventProtocolOptions.class).fullyResolveFilesetSymlinks, OutputGroupInfo.determineOutputGroups( - buildOptions.outputGroups, validationMode(), /*shouldRunTests=*/ shouldRunTests())); + buildOptions.outputGroups, validationMode(), /* shouldRunTests= */ shouldRunTests())); } public ImmutableList getAspects() { diff --git a/src/main/java/com/google/devtools/build/lib/buildtool/BuildTool.java b/src/main/java/com/google/devtools/build/lib/buildtool/BuildTool.java index 9e8850f6ff5c33..838bafb6609074 100644 --- a/src/main/java/com/google/devtools/build/lib/buildtool/BuildTool.java +++ b/src/main/java/com/google/devtools/build/lib/buildtool/BuildTool.java @@ -194,6 +194,7 @@ public void buildTargets(BuildRequest request, BuildResult result, TargetValidat initializeOutputFilter(request); if (env.withMergedAnalysisAndExecutionSourceOfTruth()) { + // Skymeld is useful only for commands that perform execution. buildTargetsWithMergedAnalysisExecution(request, result, validator, buildOptions); return; } diff --git a/src/main/java/com/google/devtools/build/lib/exec/RunfilesTreeUpdater.java b/src/main/java/com/google/devtools/build/lib/exec/RunfilesTreeUpdater.java index e48be281141e4f..738358e3d3920e 100644 --- a/src/main/java/com/google/devtools/build/lib/exec/RunfilesTreeUpdater.java +++ b/src/main/java/com/google/devtools/build/lib/exec/RunfilesTreeUpdater.java @@ -113,17 +113,17 @@ public void updateRunfiles( } private void updateRunfilesTree( - RunfilesTree tree, - ImmutableMap env, - OutErr outErr) + RunfilesTree tree, ImmutableMap env, OutErr outErr) throws IOException, ExecException, InterruptedException { - Path runfilesDirPath = execRoot.getRelative(tree.getExecPath()); - Path inputManifest = RunfilesSupport.inputManifestPath(runfilesDirPath); + Path runfilesDir = execRoot.getRelative(tree.getExecPath()); + Path inputManifest = + execRoot.getRelative(RunfilesSupport.inputManifestExecPath(tree.getExecPath())); if (!inputManifest.exists()) { return; } - Path outputManifest = RunfilesSupport.outputManifestPath(runfilesDirPath); + Path outputManifest = + execRoot.getRelative(RunfilesSupport.outputManifestExecPath(tree.getExecPath())); try { // Avoid rebuilding the runfiles directory if the manifest in it matches the input manifest, // implying the symlinks exist and are already up to date. If the output manifest is a @@ -139,37 +139,33 @@ private void updateRunfilesTree( && Arrays.equals( DigestUtils.getDigestWithManualFallback(outputManifest, xattrProvider), DigestUtils.getDigestWithManualFallback(inputManifest, xattrProvider)) - && (OS.getCurrent() != OS.WINDOWS || isRunfilesDirectoryPopulated(runfilesDirPath))) { + && (OS.getCurrent() != OS.WINDOWS + || isRunfilesDirectoryPopulated(runfilesDir, outputManifest))) { return; } } catch (IOException e) { // Ignore it - we will just try to create runfiles directory. } - if (!runfilesDirPath.exists()) { - runfilesDirPath.createDirectoryAndParents(); + if (!runfilesDir.exists()) { + runfilesDir.createDirectoryAndParents(); } SymlinkTreeHelper helper = new SymlinkTreeHelper( - inputManifest, runfilesDirPath, /* filesetTree= */ false, tree.getWorkspaceName()); + inputManifest, runfilesDir, /* filesetTree= */ false, tree.getWorkspaceName()); switch (tree.getSymlinksMode()) { - case SKIP: - helper.clearRunfilesDirectory(); - break; - case EXTERNAL: - helper.createSymlinksUsingCommand(execRoot, binTools, env, outErr); - break; - case INTERNAL: - helper.createSymlinksDirectly(runfilesDirPath, tree.getMapping()); + case SKIP -> helper.clearRunfilesDirectory(); + case EXTERNAL -> helper.createSymlinksUsingCommand(execRoot, binTools, env, outErr); + case INTERNAL -> { + helper.createSymlinksDirectly(runfilesDir, tree.getMapping()); outputManifest.createSymbolicLink(inputManifest); - break; + } } } - private boolean isRunfilesDirectoryPopulated(Path runfilesDirPath) { - Path outputManifest = RunfilesSupport.outputManifestPath(runfilesDirPath); + private static boolean isRunfilesDirectoryPopulated(Path runfilesDir, Path outputManifest) { String relativeRunfilePath; try (BufferedReader reader = new BufferedReader(new InputStreamReader(outputManifest.getInputStream(), ISO_8859_1))) { @@ -180,6 +176,6 @@ private boolean isRunfilesDirectoryPopulated(Path runfilesDirPath) { return false; } // The runfile could be a dangling symlink. - return runfilesDirPath.getRelative(relativeRunfilePath).exists(Symlinks.NOFOLLOW); + return runfilesDir.getRelative(relativeRunfilePath).exists(Symlinks.NOFOLLOW); } } diff --git a/src/main/java/com/google/devtools/build/lib/metrics/PsInfoCollector.java b/src/main/java/com/google/devtools/build/lib/metrics/PsInfoCollector.java index f39bc70bf01370..c8c6d5d19a1ae1 100644 --- a/src/main/java/com/google/devtools/build/lib/metrics/PsInfoCollector.java +++ b/src/main/java/com/google/devtools/build/lib/metrics/PsInfoCollector.java @@ -82,7 +82,6 @@ public synchronized ResourceSnapshot collectResourceUsage( /** Updates current snapshot of all processes state, using ps command. */ private void updatePsSnapshot(Clock clock) { - // TODO(b/279003887): add exception if we couldn't collect the metrics. ImmutableMap pidToPsInfo = collectDataFromPs(); ImmutableSetMultimap pidToChildrenPsInfo = diff --git a/src/main/java/com/google/devtools/build/lib/packages/AspectDefinition.java b/src/main/java/com/google/devtools/build/lib/packages/AspectDefinition.java index ac384fba44ff70..dd8a0f76d40fcc 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/AspectDefinition.java +++ b/src/main/java/com/google/devtools/build/lib/packages/AspectDefinition.java @@ -192,6 +192,11 @@ public boolean canPropagateToToolchainType(Label toolchainType) { return propagateToToolchainsTypes.contains(toolchainType); } + /** Returns whether the aspect propagates to toolchains. */ + public boolean propagatesToToolchains() { + return !propagateToToolchainsTypes.isEmpty(); + } + /** Returns the set of configuration fragments required by this Aspect. */ public ConfigurationFragmentPolicy getConfigurationFragmentPolicy() { return configurationFragmentPolicy; diff --git a/src/main/java/com/google/devtools/build/lib/packages/BuildGlobals.java b/src/main/java/com/google/devtools/build/lib/packages/BuildGlobals.java index 6debb8c21f8b0e..03561df8b23470 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/BuildGlobals.java +++ b/src/main/java/com/google/devtools/build/lib/packages/BuildGlobals.java @@ -75,8 +75,7 @@ public NoneType environmentGroup( StarlarkThread thread) throws EvalException { Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "environment_group()"); - Package.Builder.fromOrFailDisallowingWorkspace(thread, "environment_group()"); + Package.Builder.fromOrFailAllowBuildOnly(thread, "environment_group()"); List

Use this method when implementing APIs that should not be accessible from symbolic macros, - * such as {@code glob()} or {@code existing_rule()}. + *

If {@code allowBuild} is false, this method also throws if we're currently executing a + * BUILD file (or legacy macro called from a BUILD file). + * + *

If {@code allowSymbolicMacros} is false, this method also throws if we're currently + * executing a symbolic macro implementation. (Legacy macros that are not called from within a + * symbolic macro are fine.) * - *

This method succeeds when called from a legacy macro (that is not itself called from any - * symbolic macro). + *

If {@code allowWorkspace} is false, this method also throws if we're currently executing a + * WORKSPACE file (or a legacy macro called from a WORKSPACE file). + * + *

It is not allowed for all three bool params to be false. */ @CanIgnoreReturnValue - public static Builder fromOrFailDisallowingSymbolicMacros(StarlarkThread thread, String what) + public static Builder fromOrFail( + StarlarkThread thread, + String what, + boolean allowBuild, + boolean allowSymbolicMacros, + boolean allowWorkspace) throws EvalException { + Preconditions.checkArgument(allowBuild || allowSymbolicMacros || allowWorkspace); + @Nullable StarlarkThreadContext ctx = thread.getThreadLocal(StarlarkThreadContext.class); + boolean bad = false; if (ctx instanceof Builder builder) { - if (builder.macroStack.isEmpty()) { + bad |= !allowBuild && !builder.isRepoRulePackage(); + bad |= !allowSymbolicMacros && !builder.macroStack.isEmpty(); + bad |= !allowWorkspace && builder.isRepoRulePackage(); + if (!bad) { return builder; } } - boolean macrosEnabled = + boolean symbolicMacrosEnabled = thread .getSemantics() .getBool(BuildLanguageOptions.EXPERIMENTAL_ENABLE_FIRST_CLASS_MACROS); + ArrayList allowedUses = new ArrayList<>(); + if (allowBuild) { + // Only disambiguate as "legacy" if the alternative, symbolic macros, are enabled. + allowedUses.add( + String.format("a BUILD file (or %smacro)", symbolicMacrosEnabled ? "legacy " : "")); + } + // Even if symbolic macros are allowed, don't mention them in the error message unless they + // are enabled. + if (allowSymbolicMacros && symbolicMacrosEnabled) { + allowedUses.add("a symbolic macro"); + } + if (allowWorkspace) { + allowedUses.add("a WORKSPACE file"); + } throw Starlark.errorf( - "%s can only be used while evaluating a BUILD file, a WORKSPACE file, or a %s loaded from" - + " there", - what, macrosEnabled ? "legacy macro" : "macro"); + "%s can only be used while evaluating %s", + what, StringUtil.joinEnglishList(allowedUses, "or")); + } + + /** Convenience method for {@link #fromOrFail} that permits any context with a Builder. */ + @CanIgnoreReturnValue + public static Builder fromOrFail(StarlarkThread thread, String what) throws EvalException { + return fromOrFail( + thread, + what, + /* allowBuild= */ true, + /* allowSymbolicMacros= */ true, + /* allowWorkspace= */ true); } /** - * Same as {@link #fromOrFail}, but also throws {@link EvalException} if we're currently - * evaluating a WORKSPACE file. - * - *

Use this method when implementing APIs that should not be accessible from symbolic macros, - * such as {@code glob()} or {@code package_name()}. + * Convenience method for {@link #fromOrFail} that permits only BUILD contexts (without symbolic + * macros). */ @CanIgnoreReturnValue - public static Builder fromOrFailDisallowingWorkspace(StarlarkThread thread, String what) + public static Builder fromOrFailAllowBuildOnly(StarlarkThread thread, String what) throws EvalException { - @Nullable StarlarkThreadContext ctx = thread.getThreadLocal(StarlarkThreadContext.class); - if (ctx instanceof Builder builder && !builder.isRepoRulePackage()) { - return builder; - } - throw Starlark.errorf( - "%s can only be used while evaluating a BUILD file, or a macro loaded from there", what); + return fromOrFail( + thread, + what, + /* allowBuild= */ true, + /* allowSymbolicMacros= */ false, + /* allowWorkspace= */ false); + } + + /** Convenience method for {@link #fromOrFail} that permits only WORKSPACE contexts. */ + @CanIgnoreReturnValue + public static Builder fromOrFailAllowWorkspaceOnly(StarlarkThread thread, String what) + throws EvalException { + return fromOrFail( + thread, + what, + /* allowBuild= */ false, + /* allowSymbolicMacros= */ false, + /* allowWorkspace= */ true); + } + + /** + * Convenience method for {@link #fromOrFail} that permits BUILD or WORKSPACE contexts (without + * symbolic macros). + */ + @CanIgnoreReturnValue + public static Builder fromOrFailDisallowSymbolicMacros(StarlarkThread thread, String what) + throws EvalException { + return fromOrFail( + thread, + what, + /* allowBuild= */ true, + /* allowSymbolicMacros= */ false, + /* allowWorkspace= */ true); + } + + /** Convenience method for {@link #fromOrFail} that permits BUILD or symbolic macro contexts. */ + @CanIgnoreReturnValue + public static Builder fromOrFailDisallowWorkspace(StarlarkThread thread, String what) + throws EvalException { + return fromOrFail( + thread, + what, + /* allowBuild= */ true, + /* allowSymbolicMacros= */ true, + /* allowWorkspace= */ false); } PackageIdentifier getPackageIdentifier() { diff --git a/src/main/java/com/google/devtools/build/lib/packages/PackageCallable.java b/src/main/java/com/google/devtools/build/lib/packages/PackageCallable.java index d72b0c05b57247..0a9efa721c0b07 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/PackageCallable.java +++ b/src/main/java/com/google/devtools/build/lib/packages/PackageCallable.java @@ -38,8 +38,7 @@ protected PackageCallable() {} useStarlarkThread = true) public Object packageCallable(Map kwargs, StarlarkThread thread) throws EvalException { - Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "package()"); + Package.Builder pkgBuilder = Package.Builder.fromOrFailAllowBuildOnly(thread, "package()"); if (pkgBuilder.isPackageFunctionUsed()) { throw new EvalException("'package' can only be used once per BUILD file"); } diff --git a/src/main/java/com/google/devtools/build/lib/packages/StarlarkNativeModule.java b/src/main/java/com/google/devtools/build/lib/packages/StarlarkNativeModule.java index 3fde1134bd5b17..4aeffeb9c00f5d 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/StarlarkNativeModule.java +++ b/src/main/java/com/google/devtools/build/lib/packages/StarlarkNativeModule.java @@ -95,9 +95,7 @@ public Sequence glob( Object allowEmptyArgument, StarlarkThread thread) throws EvalException, InterruptedException { - Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "glob()"); - Package.Builder.fromOrFailDisallowingWorkspace(thread, "glob()"); + Package.Builder pkgBuilder = Package.Builder.fromOrFailAllowBuildOnly(thread, "glob()"); List includes = Types.STRING_LIST.convert(include, "'glob' argument"); List excludes = Types.STRING_LIST.convert(exclude, "'glob' argument"); @@ -435,7 +433,7 @@ public Object existingRule(String name, StarlarkThread thread) throws EvalExcept return Starlark.NONE; } Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "existing_rule()"); + Package.Builder.fromOrFailDisallowSymbolicMacros(thread, "existing_rule()"); Target target = pkgBuilder.getTarget(name); if (target instanceof Rule /* `instanceof` also verifies that target != null */) { Rule rule = (Rule) target; @@ -508,7 +506,7 @@ public Object existingRules(StarlarkThread thread) throws EvalException { return Dict.empty(); } Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "existing_rules()"); + Package.Builder.fromOrFailDisallowSymbolicMacros(thread, "existing_rules()"); if (thread .getSemantics() .getBool(BuildLanguageOptions.INCOMPATIBLE_EXISTING_RULES_IMMUTABLE_VIEW)) { @@ -531,7 +529,7 @@ public NoneType packageGroup( String name, Sequence packagesO, Sequence includesO, StarlarkThread thread) throws EvalException { Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingWorkspace(thread, "package_group()"); + Package.Builder.fromOrFailDisallowWorkspace(thread, "package_group()"); List packages = Types.STRING_LIST.convert(packagesO, "'package_group.packages argument'"); @@ -567,7 +565,7 @@ public NoneType exportsFiles( Sequence srcs, Object visibilityO, Object licensesO, StarlarkThread thread) throws EvalException { Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingWorkspace(thread, "exports_files()"); + Package.Builder.fromOrFailDisallowWorkspace(thread, "exports_files()"); List files = Types.STRING_LIST.convert(srcs, "'exports_files' operand"); RuleVisibility visibility = @@ -608,21 +606,20 @@ public NoneType exportsFiles( @Override public String packageName(StarlarkThread thread) throws EvalException { Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingWorkspace(thread, "package_name()"); + Package.Builder.fromOrFailDisallowWorkspace(thread, "package_name()"); return pkgBuilder.getPackageIdentifier().getPackageFragment().getPathString(); } @Override public String repositoryName(StarlarkThread thread) throws EvalException { // for legacy reasons, this is prefixed with a single '@'. - Package.Builder.fromOrFailDisallowingWorkspace(thread, "repository_name()"); + Package.Builder.fromOrFailDisallowWorkspace(thread, "repository_name()"); return '@' + repoName(thread); } @Override public String repoName(StarlarkThread thread) throws EvalException { - Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingWorkspace(thread, "repo_name()"); + Package.Builder pkgBuilder = Package.Builder.fromOrFailDisallowWorkspace(thread, "repo_name()"); return pkgBuilder.getPackageIdentifier().getRepository().getName(); } @@ -632,7 +629,7 @@ public Label packageRelativeLabel(Object input, StarlarkThread thread) throws Ev LabelConverter labelConverter = thread.getThreadLocal(LabelConverter.class); if (labelConverter == null) { Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingWorkspace(thread, "package_relative_label()"); + Package.Builder.fromOrFailDisallowWorkspace(thread, "package_relative_label()"); labelConverter = pkgBuilder.getLabelConverter(); } if (input instanceof Label inputLabel) { @@ -649,7 +646,7 @@ public Label packageRelativeLabel(Object input, StarlarkThread thread) throws Ev @Nullable public String moduleName(StarlarkThread thread) throws EvalException { Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingWorkspace(thread, "module_name()"); + Package.Builder.fromOrFailDisallowWorkspace(thread, "module_name()"); return pkgBuilder.getAssociatedModuleName().orElse(null); } @@ -657,7 +654,7 @@ public String moduleName(StarlarkThread thread) throws EvalException { @Nullable public String moduleVersion(StarlarkThread thread) throws EvalException { Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingWorkspace(thread, "module_version()"); + Package.Builder.fromOrFailDisallowWorkspace(thread, "module_version()"); return pkgBuilder.getAssociatedModuleVersion().orElse(null); } @@ -838,9 +835,7 @@ private static Object starlarkifyValue(Mutability mu, Object val, Package pkg) { public Sequence subpackages( Sequence include, Sequence exclude, boolean allowEmpty, StarlarkThread thread) throws EvalException, InterruptedException { - Package.Builder pkgBuilder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "subpackages()"); - Package.Builder.fromOrFailDisallowingWorkspace(thread, "subpackages()"); + Package.Builder pkgBuilder = Package.Builder.fromOrFailAllowBuildOnly(thread, "subpackages()"); List includes = Types.STRING_LIST.convert(include, "'subpackages' argument"); List excludes = Types.STRING_LIST.convert(exclude, "'subpackages' argument"); diff --git a/src/main/java/com/google/devtools/build/lib/packages/WorkspaceFactory.java b/src/main/java/com/google/devtools/build/lib/packages/WorkspaceFactory.java index ebe2c32662de91..26b658ad146f33 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/WorkspaceFactory.java +++ b/src/main/java/com/google/devtools/build/lib/packages/WorkspaceFactory.java @@ -245,7 +245,7 @@ public Object call(StarlarkThread thread, Tuple args, Dict kwarg } try { Package.Builder builder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "repository rules"); + Package.Builder.fromOrFailAllowWorkspaceOnly(thread, "repository rules"); // TODO(adonovan): this cast doesn't look safe! String externalRepoName = (String) kwargs.get("name"); if (!allowOverride diff --git a/src/main/java/com/google/devtools/build/lib/packages/WorkspaceGlobals.java b/src/main/java/com/google/devtools/build/lib/packages/WorkspaceGlobals.java index 988d99dbd9b789..78a45ae14f01fb 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/WorkspaceGlobals.java +++ b/src/main/java/com/google/devtools/build/lib/packages/WorkspaceGlobals.java @@ -69,7 +69,7 @@ public void workspace( } // Add entry in repository map from "@name" --> "@" to avoid issue where bazel // treats references to @name as a separate external repo - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "workspace()") + Package.Builder.fromOrFailAllowWorkspaceOnly(thread, "workspace()") .setWorkspaceName(name) .addRepositoryMappingEntry(RepositoryName.MAIN, name, RepositoryName.MAIN); } @@ -106,8 +106,7 @@ public void registerExecutionPlatforms(Sequence platformLabels, StarlarkThrea throws EvalException { // Add to the package definition for later. Package.Builder builder = - Package.Builder.fromOrFailDisallowingSymbolicMacros( - thread, "register_execution_platforms()"); + Package.Builder.fromOrFailAllowWorkspaceOnly(thread, "register_execution_platforms()"); List patterns = Sequence.cast(platformLabels, String.class, "platform_labels"); builder.addRegisteredExecutionPlatforms(parsePatterns(patterns, builder, thread)); } @@ -117,7 +116,7 @@ public void registerToolchains(Sequence toolchainLabels, StarlarkThread threa throws EvalException { // Add to the package definition for later. Package.Builder builder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "register_toolchains()"); + Package.Builder.fromOrFailAllowWorkspaceOnly(thread, "register_toolchains()"); List patterns = Sequence.cast(toolchainLabels, String.class, "toolchain_labels"); ImmutableList targetPatterns = parsePatterns(patterns, builder, thread); @@ -154,8 +153,7 @@ public void bind(String name, Object actual, StarlarkThread thread) throw Starlark.errorf("%s", e.getMessage()); } try { - Package.Builder builder = - Package.Builder.fromOrFailDisallowingSymbolicMacros(thread, "bind()"); + Package.Builder builder = Package.Builder.fromOrFailAllowWorkspaceOnly(thread, "bind()"); RuleClass ruleClass = ruleClassMap.get("bind"); RepositoryName currentRepo = getCurrentRepoName(thread); WorkspaceFactoryHelper.addBindRule( diff --git a/src/main/java/com/google/devtools/build/lib/profiler/JsonTraceFileWriter.java b/src/main/java/com/google/devtools/build/lib/profiler/JsonTraceFileWriter.java index cdb3c12b0ac9ce..ae3cd9e8c3d73f 100644 --- a/src/main/java/com/google/devtools/build/lib/profiler/JsonTraceFileWriter.java +++ b/src/main/java/com/google/devtools/build/lib/profiler/JsonTraceFileWriter.java @@ -25,14 +25,26 @@ import java.time.Duration; import java.time.Instant; import java.util.HashMap; +import java.util.Queue; import java.util.UUID; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Semaphore; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.ReentrantLock; import javax.annotation.Nullable; +import javax.annotation.concurrent.GuardedBy; /** Writes the profile in Json Trace file format. */ class JsonTraceFileWriter implements Runnable { - protected final BlockingQueue queue; + protected final Queue queue; + private final ReentrantLock lock = new ReentrantLock(); + private final Condition condition = lock.newCondition(); + // 1_000_000 is a randomly chosen value that is large enough to ensure that: + // 1. If the speed of producers is slower than consumer (normal cases), they don't get overhead + // on posting new events. + // 2. Otherwise (e.g. with --noslim_profile and --record_full_profiler_data), it eventually + // slowed down the producers to avoid OOM. + private final Semaphore availableEventSlots = new Semaphore(1_000_000); protected final Thread thread; protected IOException savedException; @@ -57,7 +69,7 @@ class JsonTraceFileWriter implements Runnable { boolean slimProfile, String outputBase, UUID buildID) { - this.queue = new LinkedBlockingQueue<>(); + this.queue = new ConcurrentLinkedQueue<>(); this.thread = new Thread(this, "profile-writer-thread"); this.outStream = outStream; this.profileStartTimeNanos = profileStartTimeNanos; @@ -69,6 +81,8 @@ class JsonTraceFileWriter implements Runnable { public void shutdown() throws IOException { // Add poison pill to queue and then wait for writer thread to shut down. queue.add(POISON_PILL); + notifyConsumer(/* force= */ true); + try { thread.join(); } catch (InterruptedException e) { @@ -89,9 +103,18 @@ public void enqueue(TraceData data) { // at creation time. if (!Thread.currentThread().isVirtual() && !metadataPosted.get()) { metadataPosted.set(Boolean.TRUE); + availableEventSlots.acquireUninterruptibly(2); queue.add(new ThreadMetadata()); + } else { + availableEventSlots.acquireUninterruptibly(); } queue.add(data); + // Not forcing notification to avoid blocking on the lock. This might cause this signal fail to + // be sent if the consumer is holding the lock -- either it is consuming the event queue or + // starting to wait on the condition. For the former case, it's fine. For the latter case, we + // will fail to notify the consumer, but the assumption is that we have events in continuous so + // that the next event can notify the consumer. + notifyConsumer(/* force= */ false); } private static final class MergedEvent { @@ -161,12 +184,40 @@ private static boolean isCandidateForMerging(TaskData data) { && data.type != ProfilerTask.CRITICAL_PATH_COMPONENT; } + private void notifyConsumer(boolean force) { + boolean locked; + if (force) { + lock.lock(); + locked = true; + } else { + locked = lock.tryLock(); + } + if (locked) { + try { + condition.signal(); + } finally { + lock.unlock(); + } + } + } + + @GuardedBy("lock") + private TraceData takeData() throws InterruptedException { + TraceData data; + while ((data = queue.poll()) == null) { + condition.await(); + } + availableEventSlots.release(); + return data; + } + /** * Saves all gathered information from taskQueue queue to the file. Method is invoked internally * by the Timer-based thread and at the end of profiling session. */ @Override public void run() { + lock.lock(); try { boolean receivedPoisonPill = false; try (JsonWriter writer = @@ -195,7 +246,7 @@ public void run() { HashMap eventsPerThread = new HashMap<>(); int eventCount = 0; TraceData data; - while ((data = queue.take()) != POISON_PILL) { + while ((data = takeData()) != POISON_PILL) { Preconditions.checkNotNull(data); eventCount++; @@ -225,13 +276,15 @@ && isCandidateForMerging((TaskData) data)) { } catch (IOException e) { this.savedException = e; if (!receivedPoisonPill) { - while (queue.take() != POISON_PILL) { + while (takeData() != POISON_PILL) { // We keep emptying the queue, but we can't write anything. } } } } catch (InterruptedException e) { // Exit silently. + } finally { + lock.unlock(); } } } diff --git a/src/main/java/com/google/devtools/build/lib/profiler/Profiler.java b/src/main/java/com/google/devtools/build/lib/profiler/Profiler.java index 749233be2f82ee..4ac7135ecaa589 100644 --- a/src/main/java/com/google/devtools/build/lib/profiler/Profiler.java +++ b/src/main/java/com/google/devtools/build/lib/profiler/Profiler.java @@ -622,7 +622,7 @@ public void logCounters( * @param description task description. May be stored until end of build. */ private void logTask(long startTimeNanos, long duration, ProfilerTask type, String description) { - var threadId = borrowLaneAndGetLaneId(); + var lane = borrowLane(); try { checkNotNull(description); checkState(!description.isEmpty(), "No description -> not helpful"); @@ -641,7 +641,7 @@ private void logTask(long startTimeNanos, long duration, ProfilerTask type, Stri // #clear. JsonTraceFileWriter currentWriter = writerRef.get(); if (wasTaskSlowEnoughToRecord(type, duration)) { - TaskData data = new TaskData(threadId, startTimeNanos, type, description); + TaskData data = new TaskData(getLaneId(lane), startTimeNanos, type, description); data.durationNanos = duration; if (currentWriter != null) { currentWriter.enqueue(data); @@ -655,7 +655,7 @@ private void logTask(long startTimeNanos, long duration, ProfilerTask type, Stri } } } finally { - releaseLane(); + releaseLane(lane); } } @@ -719,12 +719,12 @@ void logEvent(ProfilerTask type, String description) { private SilentCloseable reallyProfile(ProfilerTask type, String description) { final long startTimeNanos = clock.nanoTime(); - long laneId = borrowLaneAndGetLaneId(); + var lane = borrowLane(); return () -> { try { - completeTask(laneId, startTimeNanos, type, description); + completeTask(getLaneId(lane), startTimeNanos, type, description); } finally { - releaseLane(); + releaseLane(lane); } }; } @@ -797,11 +797,11 @@ public SilentCloseable profileAction( checkNotNull(description); if (isActive() && isProfiling(type)) { final long startTimeNanos = clock.nanoTime(); - var laneId = borrowLaneAndGetLaneId(); + var lane = borrowLane(); return () -> { try { completeAction( - laneId, + getLaneId(lane), startTimeNanos, type, description, @@ -809,7 +809,7 @@ public SilentCloseable profileAction( includePrimaryOutput ? primaryOutput : null, includeTargetLabel ? targetLabel : null); } finally { - releaseLane(); + releaseLane(lane); } }; } else { @@ -829,11 +829,11 @@ private boolean countAction(ProfilerTask type) { } public void completeTask(long startTimeNanos, ProfilerTask type, String description) { - var laneId = borrowLaneAndGetLaneId(); + var lane = borrowLane(); try { - completeTask(laneId, startTimeNanos, type, description); + completeTask(getLaneId(lane), startTimeNanos, type, description); } finally { - releaseLane(); + releaseLane(lane); } } @@ -925,9 +925,9 @@ private Lane acquire(String prefix) { return laneGenerator.acquire(); } - private void release(String prefix, Lane lane) { + private void release(Lane lane) { checkState(isActive()); - var laneGenerator = checkNotNull(laneGenerators.get(prefix)); + var laneGenerator = lane.laneGenerator; laneGenerator.release(lane); } @@ -937,10 +937,12 @@ private void reset() { } private static class Lane implements Comparable { + private final LaneGenerator laneGenerator; private final long id; private int refCount; - private Lane(long id) { + private Lane(LaneGenerator laneGenerator, long id) { + this.laneGenerator = laneGenerator; this.id = id; } @@ -965,15 +967,15 @@ public Lane acquire() { var lane = availableLanes.poll(); // It might create more virtual lanes, but it's fine for our purpose. if (lane == null) { - long newLaneId = nextLaneId.getAndIncrement(); + lane = new Lane(this, nextLaneId.getAndIncrement()); + int newLaneIndex = count.getAndIncrement(); String newLaneName = prefix + newLaneIndex + " (Virtual)"; - var threadMetadata = new ThreadMetadata(newLaneName, newLaneId); + var threadMetadata = new ThreadMetadata(newLaneName, lane.id); var writer = Profiler.this.writerRef.get(); if (writer != null) { writer.enqueue(threadMetadata); } - lane = new Lane(newLaneId); } return lane; } @@ -994,31 +996,32 @@ public void release(Lane lane) { return lane; }); - private long borrowLaneAndGetLaneId() { - var currentThread = Thread.currentThread(); - var threadId = currentThread.threadId(); - if (!currentThread.isVirtual() || !isActive()) { - return threadId; + @Nullable + private Lane borrowLane() { + if (!Thread.currentThread().isVirtual() || !isActive()) { + return null; } var lane = borrowedLane.get(); lane.refCount += 1; + return lane; + } + + private long getLaneId(@Nullable Lane lane) { + if (lane == null) { + return Thread.currentThread().threadId(); + } return lane.id; } - private void releaseLane() { - var currentThread = Thread.currentThread(); - if (!currentThread.isVirtual() || !isActive()) { + private void releaseLane(@Nullable Lane lane) { + if (lane == null) { return; } - - var lane = borrowedLane.get(); lane.refCount -= 1; - checkState(lane.refCount >= 0); if (lane.refCount == 0) { borrowedLane.remove(); - var prefix = virtualThreadPrefix.get(); - multiLaneGenerator.release(prefix, lane); + multiLaneGenerator.release(lane); } } diff --git a/src/main/java/com/google/devtools/build/lib/remote/RemoteModule.java b/src/main/java/com/google/devtools/build/lib/remote/RemoteModule.java index df24abe8b66777..9534f9389e6bce 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/RemoteModule.java +++ b/src/main/java/com/google/devtools/build/lib/remote/RemoteModule.java @@ -64,6 +64,7 @@ import com.google.devtools.build.lib.remote.common.RemoteCacheClient; import com.google.devtools.build.lib.remote.common.RemoteExecutionClient; import com.google.devtools.build.lib.remote.downloader.GrpcRemoteDownloader; +import com.google.devtools.build.lib.remote.http.DownloadTimeoutException; import com.google.devtools.build.lib.remote.http.HttpException; import com.google.devtools.build.lib.remote.logging.LoggingInterceptor; import com.google.devtools.build.lib.remote.logging.RemoteExecutionLog.LogEntry; @@ -191,6 +192,8 @@ private static boolean shouldEnableRemoteDownloader(RemoteOptions options) { boolean retry = false; if (e instanceof ClosedChannelException) { retry = true; + } else if (e instanceof DownloadTimeoutException) { + retry = true; } else if (e instanceof HttpException httpException) { int status = httpException.response().status().code(); retry = diff --git a/src/main/java/com/google/devtools/build/lib/remote/http/DownloadTimeoutException.java b/src/main/java/com/google/devtools/build/lib/remote/http/DownloadTimeoutException.java index ec406759f67c03..c6484e7b8ef2d4 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/http/DownloadTimeoutException.java +++ b/src/main/java/com/google/devtools/build/lib/remote/http/DownloadTimeoutException.java @@ -16,7 +16,8 @@ import java.io.IOException; -class DownloadTimeoutException extends IOException { +/** Exception thrown when a HTTP download times out. */ +public class DownloadTimeoutException extends IOException { public DownloadTimeoutException(String url, long bytesReceived, long contentLength) { super(buildMessage(url, bytesReceived, contentLength)); diff --git a/src/main/java/com/google/devtools/build/lib/rules/android/AndroidSemantics.java b/src/main/java/com/google/devtools/build/lib/rules/android/AndroidSemantics.java index 4a4f548f32e2bd..8262e5f7be5a79 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/android/AndroidSemantics.java +++ b/src/main/java/com/google/devtools/build/lib/rules/android/AndroidSemantics.java @@ -238,7 +238,9 @@ default BootClassPathInfo getBootClassPathInfo(RuleContext ruleContext) NestedSetBuilder bootclasspath = NestedSetBuilder.stableOrder(); if (ruleContext.getConfiguration().getFragment(AndroidConfiguration.class).desugarJava8()) { bootclasspath.addTransitive( - PrerequisiteArtifacts.nestedSet(ruleContext, "$desugar_java8_extra_bootclasspath")); + PrerequisiteArtifacts.nestedSet( + ruleContext.getRulePrerequisitesCollection(), + "$desugar_java8_extra_bootclasspath")); } bootclasspath.add(androidSdkProvider.getAndroidJar()); bootClassPathInfo = BootClassPathInfo.create(ruleContext, bootclasspath.build()); diff --git a/src/main/java/com/google/devtools/build/lib/rules/android/AndroidStarlarkCommon.java b/src/main/java/com/google/devtools/build/lib/rules/android/AndroidStarlarkCommon.java index 28b3054546d8cc..31b8f43be19f53 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/android/AndroidStarlarkCommon.java +++ b/src/main/java/com/google/devtools/build/lib/rules/android/AndroidStarlarkCommon.java @@ -26,7 +26,6 @@ import com.google.devtools.build.lib.starlarkbuildapi.android.AndroidSdkProviderApi; import com.google.devtools.build.lib.starlarkbuildapi.android.AndroidSplitTransitionApi; import com.google.devtools.build.lib.starlarkbuildapi.android.AndroidStarlarkCommonApi; -import com.google.devtools.build.lib.starlarkbuildapi.config.ConfigurationTransitionApi; import net.starlark.java.annot.StarlarkMethod; import net.starlark.java.eval.EvalException; import net.starlark.java.eval.Sequence; @@ -47,11 +46,6 @@ public AndroidSplitTransitionApi getAndroidSplitTransition() { return AndroidSplitTransition.FACTORY; } - @Override - public ConfigurationTransitionApi getAndroidPlatformsTransition() { - return new AndroidPlatformsTransition.AndroidPlatformsTransitionFactory(); - } - /** * TODO(b/14473160): Provides a Starlark compatibility layer for the sourceless deps bug. When a * sourceless target is defined, the deps of the target are implicitly exported. Specifically only diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CcModule.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CcModule.java index c6648df717e197..07d88afb468c6d 100755 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CcModule.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CcModule.java @@ -2301,6 +2301,7 @@ public CcLinkingOutputs link( Object mainOutputObject, Object useShareableArtifactFactory, Object buildConfig, + Object emitInterfaceSharedLibrary, StarlarkThread thread) throws InterruptedException, EvalException { // TODO(bazel-team): Rename always_link to alwayslink before delisting. Also it looks like the @@ -2385,7 +2386,8 @@ public CcLinkingOutputs link( convertFromNoneable(onlyForDynamicLibsObject, false)) .emitInterfaceSharedLibraries( dynamicLinkTargetType == LinkTargetType.DYNAMIC_LIBRARY - && actualFeatureConfiguration.isEnabled(CppRuleClasses.TARGETS_WINDOWS) + && (convertFromNoneable(emitInterfaceSharedLibrary, false) + || actualFeatureConfiguration.isEnabled(CppRuleClasses.TARGETS_WINDOWS)) && CppHelper.useInterfaceSharedLibraries( ccToolchainProvider.getCppConfiguration(), actualFeatureConfiguration)) .setLinkerOutputArtifact(convertFromNoneable(mainOutput, null)) diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java index 49162ddc9a4b87..0c0d0f373ee5da 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java @@ -586,6 +586,10 @@ boolean canSplitCommandLine() throws EvalException { // On Windows, We can always split the command line when building DLL. case NODEPS_DYNAMIC_LIBRARY: case DYNAMIC_LIBRARY: + // TODO(bazel-team): interfaceOutput != null should not block the creation of parameter + // files. After change #652438084, this might become a problem for dynamic libraries with + // a very large number of linker inputs since the command line may exceed the maximum + // length. return (interfaceOutput == null || featureConfiguration.isEnabled(CppRuleClasses.TARGETS_WINDOWS)); case EXECUTABLE: diff --git a/src/main/java/com/google/devtools/build/lib/rules/filegroup/Filegroup.java b/src/main/java/com/google/devtools/build/lib/rules/filegroup/Filegroup.java index 12cee0165f9165..34c4f673b7bb70 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/filegroup/Filegroup.java +++ b/src/main/java/com/google/devtools/build/lib/rules/filegroup/Filegroup.java @@ -61,7 +61,7 @@ public ConfiguredTarget create(RuleContext ruleContext) NestedSet filesToBuild = outputGroupName.isEmpty() - ? PrerequisiteArtifacts.nestedSet(ruleContext, "srcs") + ? PrerequisiteArtifacts.nestedSet(ruleContext.getRulePrerequisitesCollection(), "srcs") : getArtifactsForOutputGroup(outputGroupName, ruleContext.getPrerequisites("srcs")); InstrumentedFilesInfo instrumentedFilesProvider = diff --git a/src/main/java/com/google/devtools/build/lib/rules/java/JavaCommon.java b/src/main/java/com/google/devtools/build/lib/rules/java/JavaCommon.java index 303b7f1b844300..d26c46c6b31b6f 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/java/JavaCommon.java +++ b/src/main/java/com/google/devtools/build/lib/rules/java/JavaCommon.java @@ -469,7 +469,8 @@ public JavaTargetAttributes.Builder initCommon( "resource_jars are not supported; use java_import and deps or runtime_deps instead."); } javaTargetAttributes.addResourceJars( - PrerequisiteArtifacts.nestedSet(ruleContext, "resource_jars")); + PrerequisiteArtifacts.nestedSet( + ruleContext.getRulePrerequisitesCollection(), "resource_jars")); } addPlugins(javaTargetAttributes); diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/CompilationAttributes.java b/src/main/java/com/google/devtools/build/lib/rules/objc/CompilationAttributes.java index f3ae69ec44e81a..e096d3c9c179de 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/CompilationAttributes.java +++ b/src/main/java/com/google/devtools/build/lib/rules/objc/CompilationAttributes.java @@ -197,7 +197,9 @@ static void addHeadersFromRuleContext(Builder builder, RuleContext ruleContext) } if (ruleContext.attributes().has("textual_hdrs", BuildType.LABEL_LIST)) { - builder.addTextualHdrs(PrerequisiteArtifacts.nestedSet(ruleContext, "textual_hdrs")); + builder.addTextualHdrs( + PrerequisiteArtifacts.nestedSet( + ruleContext.getRulePrerequisitesCollection(), "textual_hdrs")); } } diff --git a/src/main/java/com/google/devtools/build/lib/rules/repository/RepositoryDirectoryValue.java b/src/main/java/com/google/devtools/build/lib/rules/repository/RepositoryDirectoryValue.java index 38a830c77a9d1c..c801b1448571a6 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/repository/RepositoryDirectoryValue.java +++ b/src/main/java/com/google/devtools/build/lib/rules/repository/RepositoryDirectoryValue.java @@ -124,7 +124,8 @@ public boolean equals(Object other) { return Objects.equal(path, otherValue.path) && Objects.equal(sourceDir, otherValue.sourceDir) && Arrays.equals(digest, otherValue.digest) - && Objects.equal(fileValues, otherValue.fileValues); + && Objects.equal(fileValues, otherValue.fileValues) + && Objects.equal(excludeFromVendoring, otherValue.excludeFromVendoring); } return false; } diff --git a/src/main/java/com/google/devtools/build/lib/runtime/BlazeCommandDispatcher.java b/src/main/java/com/google/devtools/build/lib/runtime/BlazeCommandDispatcher.java index 3bc3371e3159da..114695995d49a1 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/BlazeCommandDispatcher.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/BlazeCommandDispatcher.java @@ -374,7 +374,8 @@ private BlazeCommandResult execExclusively( if (commonOptions.enableTracer == TriState.YES) { tracerEnabled = true; } else if (commonOptions.enableTracer == TriState.AUTO) { - boolean commandSupportsProfile = commandName.equals("query") || env.getCommand().builds(); + boolean commandSupportsProfile = + commandName.equals("query") || commandAnnotation.buildPhase().analyzes(); tracerEnabled = commandSupportsProfile || commonOptions.profilePath != null; } @@ -572,7 +573,7 @@ private BlazeCommandResult execExclusively( } } - if (env.getCommand().builds()) { + if (env.getCommand().buildPhase().analyzes()) { try { env.syncPackageLoading(options); } catch (InterruptedException e) { diff --git a/src/main/java/com/google/devtools/build/lib/runtime/BlazeOptionHandler.java b/src/main/java/com/google/devtools/build/lib/runtime/BlazeOptionHandler.java index 97e4dfec17e5e1..8632758cbde056 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/BlazeOptionHandler.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/BlazeOptionHandler.java @@ -222,6 +222,16 @@ void parseRcOptions( "%s:\n %s'%s' options: %s", source, inherited, commandToParse, Joiner.on(' ').join(rcArgs.getArgs()))); } + PriorityCategory priorityCategory; + // There's not a separate PriorityCategory for "client" options, so treat them as global + // rcfile options. Client options are passed via the wrapper script. + if ((workspace.getWorkspace() != null + && rcArgs.getRcFile().contains(workspace.getWorkspace().toString())) + || rcArgs.getRcFile().equals("client")) { + priorityCategory = PriorityCategory.GLOBAL_RC_FILE; + } else { + priorityCategory = PriorityCategory.RC_FILE; + } if (commandToParse.equals(COMMON_PSEUDO_COMMAND)) { // Pass in options data for all commands supported by the runtime so that options that // apply to some but not the current command can be ignored. @@ -235,7 +245,7 @@ void parseRcOptions( // pseudo command can be parsed unambiguously. ImmutableList ignoredArgs = optionsParser.parseWithSourceFunction( - PriorityCategory.RC_FILE, + priorityCategory, o -> rcArgs.getRcFile(), rcArgs.getArgs(), OptionsParser.getFallbackOptionsData(allOptionsClasses)); @@ -250,7 +260,7 @@ void parseRcOptions( rcfileNotes.set(index, note); } } else { - optionsParser.parse(PriorityCategory.RC_FILE, rcArgs.getRcFile(), rcArgs.getArgs()); + optionsParser.parse(priorityCategory, rcArgs.getRcFile(), rcArgs.getArgs()); } } } @@ -307,8 +317,8 @@ private void parseArgsAndConfigs(List args, ExtendedEventHandler eventHa remainingCmdLine.build(), /* fallbackData= */ null); - if (commandAnnotation.builds()) { - // splits project files from targets in the traditional sense + if (commandAnnotation.buildPhase().analyzes()) { + // split project files from targets in the traditional sense. ProjectFileSupport.handleProjectFiles( eventHandler, runtime.getProjectFileProvider(), @@ -410,7 +420,7 @@ public Target loadBuildSetting(String targetLabel) DetailedExitCode parseStarlarkOptions(CommandEnvironment env) { // For now, restrict starlark options to commands that already build to ensure that loading // will work. We may want to open this up to other commands in the future. - if (!commandAnnotation.builds()) { + if (!commandAnnotation.buildPhase().analyzes()) { return DetailedExitCode.success(); } try { diff --git a/src/main/java/com/google/devtools/build/lib/runtime/Command.java b/src/main/java/com/google/devtools/build/lib/runtime/Command.java index 71cfb68b279bce..40730566f5db0b 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/Command.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/Command.java @@ -54,9 +54,66 @@ boolean usesConfigurationOptions() default false; /** - * True if the command runs a build. + * The build phase associated with this command. + * + *

Use the enum helper methods to check the hierarchical effects of each command, like {@link + * BuildPhase#executes()}, {@link BuildPhase#loads()}, instead of checking the enum value + * directly. + */ + BuildPhase buildPhase() default BuildPhase.NONE; + + /** + * Build phases that can be associated with a command. + * + *

The effects are hierarchical: {@code EXECUTES} implies {@code ANALYZES}, but {@code LOADS} + * does not imply {@code ANALYZES}. Use the helper methods to check this hierarchy. */ - boolean builds() default false; + enum BuildPhase { + /** + * Use when this command does not have a build phase. Can also be used for commands that resets + * state. Commands may produce effects to the terminal or output files, e.g. writing logs or + * printing the help message. + */ + NONE, + + /** + * Use when this command loads BUILD and bzl files to produce the target graph, or MODULE.bazel + * and WORKSPACE files for external dependencies. + */ + LOADS, + + /** + * Use when this command produces the configured target/aspect/action graphs. + * + *

Implies LOADS. + */ + ANALYZES, + + /** + * Use when this command executes actions. + * + *

Implies LOADS, ANALYZES. + */ + EXECUTES; + + /* True if this command executes actions. */ + public final boolean executes() { + return this == EXECUTES; + } + + /* True if this command analyzes and creates the configured target and action graphs. */ + public final boolean analyzes() { + return this == ANALYZES || this == EXECUTES; + } + + /** + * Use when this command loads BUILD and bzl files to produce the target graph, or MODULE.bazel + * and WORKSPACE files for external dependencies. + */ + public final boolean loads() { + return this == LOADS || this == ANALYZES || this == EXECUTES; + } + } /** * True if the command should not be shown in the output of 'blaze help'. diff --git a/src/main/java/com/google/devtools/build/lib/runtime/CommandEnvironment.java b/src/main/java/com/google/devtools/build/lib/runtime/CommandEnvironment.java index 730e76cbfa2134..a6d9f796ca034b 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/CommandEnvironment.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/CommandEnvironment.java @@ -46,6 +46,7 @@ import com.google.devtools.build.lib.runtime.proto.InvocationPolicyOuterClass.InvocationPolicy; import com.google.devtools.build.lib.server.FailureDetails; import com.google.devtools.build.lib.server.FailureDetails.FailureDetail; +import com.google.devtools.build.lib.server.FailureDetails.Skyfocus; import com.google.devtools.build.lib.skyframe.BuildResultListener; import com.google.devtools.build.lib.skyframe.SkyfocusOptions; import com.google.devtools.build.lib.skyframe.SkyframeBuildView; @@ -269,7 +270,7 @@ public void exit(AbruptExitException exception) { : UUID.randomUUID().toString(); this.repoEnv.putAll(clientEnv); - if (command.builds() || command.name().equals("info")) { + if (command.buildPhase().analyzes() || command.name().equals("info")) { // Compute the set of environment variables that are allowlisted on the commandline // for inheritance. for (Map.Entry entry : @@ -614,8 +615,8 @@ public Path getWorkingDirectory() { } /** - * Returns the {@link OutputService} to use, or {@code null} if this is not a {@linkplain - * Command#builds build command}. + * Returns the {@link OutputService} to use, or {@code null} if this is not a command that + * performs analysis according to {@linkplain Command#buildPhase()}. */ @Nullable public OutputService getOutputService() { @@ -791,7 +792,11 @@ public void beforeCommand(InvocationPolicy invocationPolicy) throws AbruptExitEx outputService = null; BlazeModule outputModule = null; - if (command.builds() || command.name().equals("clean")) { + if (command.buildPhase().analyzes() || command.name().equals("clean")) { + // Output service should only affect commands that execute actions, but due to the legacy + // wiring of BuildTool.java, this covers analysis-only commands as well. + // + // TODO: fix this. for (BlazeModule module : runtime.getBlazeModules()) { OutputService moduleService = module.getOutputService(); if (moduleService != null) { @@ -822,13 +827,28 @@ public void beforeCommand(InvocationPolicy invocationPolicy) throws AbruptExitEx // Modules that are subscribed to CommandStartEvent may create pending exceptions. throwPendingException(); - if (getCommand().builds()) { - // Need to determine if Skyfocus will run for this command. If so, the evaluator - // will need to be configured to remember additional state (e.g. root keys) that it - // otherwise doesn't need to for a non-Skyfocus build. Alternately, it might reset - // the evaluator, which is why this runs before injecting precomputed values below. + // Determine if Skyfocus will run for this command: Skyfocus runs only for commands that + // execute actions. Throw an error if this is a command that is not guaranteed to work + // correctly on a focused Skyframe graph. + if (getCommand().buildPhase().executes()) { skyframeExecutor.prepareForSkyfocus( options.getOptions(SkyfocusOptions.class), reporter, runtime.getProductName()); + } else if (getCommand().buildPhase().loads() + && !getSkyframeExecutor().getSkyfocusState().workingSet().isEmpty()) { + // A non-empty working set implies a focused Skyframe state. + throw new AbruptExitException( + DetailedExitCode.of( + FailureDetail.newBuilder() + .setMessage( + command.name() + + " is not supported after using Skyfocus because it can" + + " return partial/incorrect results. Run clean or shutdown and try" + + " again.") + .setSkyfocus( + Skyfocus.newBuilder() + .setCode(Skyfocus.Code.DISALLOWED_OPERATION_ON_FOCUSED_GRAPH) + .build()) + .build())); } } @@ -839,7 +859,9 @@ public String determineOutputFileSystem() { // precomputed by our BlazeWorkspace. try (SilentCloseable c = Profiler.instance().profile(ProfilerTask.INFO, "Finding output file system")) { - return outputService.getFileSystemName(workspace.getOutputBaseFilesystemTypeName()); + return outputService == null + ? "" + : outputService.getFileSystemName(workspace.getOutputBaseFilesystemTypeName()); } } diff --git a/src/main/java/com/google/devtools/build/lib/runtime/CommandLineEvent.java b/src/main/java/com/google/devtools/build/lib/runtime/CommandLineEvent.java index 9267fdc395c0ff..d37d8d7bb9c1a8 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/CommandLineEvent.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/CommandLineEvent.java @@ -190,9 +190,14 @@ CommandLineSection getResidual() { CommandLineSection.Builder builder = CommandLineSection.newBuilder().setSectionLabel("residual"); if (commandName.equals("run") - && !commandOptions.getOptions(BuildEventProtocolOptions.class) - .includeResidueInRunBepEvent) { - builder.setChunkList(ChunkList.newBuilder().addChunk("REDACTED")); + && !commandOptions.getOptions(BuildEventProtocolOptions.class).includeResidueInRunBepEvent + && !commandOptions.getResidue().isEmpty()) { + String target = commandOptions.getResidue().get(0); + ChunkList.Builder residual = ChunkList.newBuilder().addChunk(target); + if (commandOptions.getResidue().size() > 1) { + residual.addChunk("REDACTED"); + } + builder.setChunkList(residual); } else { builder.setChunkList(ChunkList.newBuilder().addAllChunk(commandOptions.getResidue())); } diff --git a/src/main/java/com/google/devtools/build/lib/runtime/ExecutionGraphModule.java b/src/main/java/com/google/devtools/build/lib/runtime/ExecutionGraphModule.java index 37b2179ab16985..f931f4bf0c81a4 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/ExecutionGraphModule.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/ExecutionGraphModule.java @@ -213,7 +213,7 @@ void setNanosToMillis(NanosToMillisSinceEpochConverter nanosToMillis) { public void beforeCommand(CommandEnvironment env) { this.env = env; - if (env.getCommand().builds()) { + if (env.getCommand().buildPhase().executes()) { ExecutionGraphOptions options = checkNotNull( env.getOptions().getOptions(ExecutionGraphOptions.class), diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/AqueryCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/AqueryCommand.java index c4fbefaa805607..d4e3f05bf0e2a4 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/AqueryCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/AqueryCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.ANALYZES; + import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -52,7 +54,7 @@ /** Handles the 'aquery' command on the Blaze command line. */ @Command( name = "aquery", - builds = true, + buildPhase = ANALYZES, inheritsOptionsFrom = {BuildCommand.class}, options = {AqueryOptions.class}, usesConfigurationOptions = true, diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/BuildCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/BuildCommand.java index f9aac0d9c974b8..2ad82d7133474b 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/BuildCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/BuildCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.EXECUTES; + import com.google.devtools.build.lib.analysis.AnalysisOptions; import com.google.devtools.build.lib.buildeventstream.BuildEventProtocolOptions; import com.google.devtools.build.lib.buildtool.BuildRequest; @@ -43,7 +45,7 @@ */ @Command( name = "build", - builds = true, + buildPhase = EXECUTES, options = { BuildRequestOptions.class, ExecutionOptions.class, diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/CanonicalizeCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/CanonicalizeCommand.java index 8e93db3152b1bc..8a5d0d083ea148 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/CanonicalizeCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/CanonicalizeCommand.java @@ -13,6 +13,7 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; import static com.google.devtools.common.options.Converters.BLAZE_ALIASING_FLAG; import com.google.common.base.Joiner; @@ -55,6 +56,7 @@ /** The 'blaze canonicalize-flags' command. */ @Command( name = "canonicalize-flags", + buildPhase = NONE, options = {CanonicalizeCommand.Options.class, PackageOptions.class}, // inherits from build to get proper package loading options and rc flag aliases. inheritsOptionsFrom = {BuildCommand.class}, diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/CleanCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/CleanCommand.java index b4266332fa8db0..eebaea4d23a60f 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/CleanCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/CleanCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; + import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Strings; @@ -54,6 +56,7 @@ /** Implements 'blaze clean'. */ @Command( name = "clean", + buildPhase = NONE, allowResidue = true, // Does not, but need to allow so we can ignore Starlark options. writeCommandLog = false, // Do not create a command.log, otherwise we couldn't delete it. options = {CleanCommand.Options.class}, diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/ConfigCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/ConfigCommand.java index 7fbe6274e99e55..992c4e1018e93f 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/ConfigCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/ConfigCommand.java @@ -15,6 +15,7 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSortedMap.toImmutableSortedMap; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Comparator.comparing; import static java.util.stream.Collectors.joining; @@ -71,7 +72,7 @@ /** Handles the 'config' command on the Blaze command line. */ @Command( name = "config", - builds = true, + buildPhase = NONE, inheritsOptionsFrom = {BuildCommand.class}, options = {ConfigOptions.class}, usesConfigurationOptions = true, diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/CoverageCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/CoverageCommand.java index 34a620aa09ffe7..ae7483da906eca 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/CoverageCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/CoverageCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.EXECUTES; + import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.packages.TestTimeout; import com.google.devtools.build.lib.runtime.Command; @@ -104,7 +106,7 @@ */ @Command( name = "coverage", - builds = true, + buildPhase = EXECUTES, inheritsOptionsFrom = {TestCommand.class}, shortDescription = "Generates code coverage report for specified test targets.", completion = "label-test", diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/CqueryCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/CqueryCommand.java index d8f88537ac9c18..f17599ebeb7a60 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/CqueryCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/CqueryCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.ANALYZES; + import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.config.CoreOptions.IncludeConfigFragmentsEnum; import com.google.devtools.build.lib.buildtool.BuildRequest; @@ -56,7 +58,7 @@ /** Handles the 'cquery' command on the Blaze command line. */ @Command( name = "cquery", - builds = true, + buildPhase = ANALYZES, // We inherit from TestCommand so that we pick up changes like `test --test_arg=foo` in .bazelrc // files. // Without doing this, there is no easy way to use the output of cquery to determine whether a diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/DumpCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/DumpCommand.java index 0ae5d7f594cd7a..3af02d0c293403 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/DumpCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/DumpCommand.java @@ -14,6 +14,8 @@ package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; + import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -79,7 +81,9 @@ /** Implementation of the dump command. */ @Command( + name = "dump", mustRunInWorkspace = false, + buildPhase = NONE, options = {DumpCommand.DumpOptions.class}, help = "Usage: %{product} dump \n" @@ -87,7 +91,6 @@ + " as an aid to debugging, not as a stable interface, so users should not try to parse" + " the output; instead, use 'query' or 'info' for this purpose.\n" + "%{options}", - name = "dump", shortDescription = "Dumps the internal state of the %{product} server process.", binaryStdOut = true) public class DumpCommand implements BlazeCommand { @@ -271,9 +274,7 @@ public static class DumpOptions extends OptionsBase { converter = SkyframeDumpEnumConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_SELECTION, effectTags = {OptionEffectTag.BAZEL_MONITORING}, - help = - "Dump Skyframe graph: 'off', 'summary', 'count', 'value', 'deps', 'rdeps', or" - + " 'function_graph'.") + help = "Dump the Skyframe graph.") public SkyframeDumpOption dumpSkyframe; @Option( @@ -306,6 +307,8 @@ public enum SkyframeDumpOption { DEPS, RDEPS, FUNCTION_GRAPH, + WORKING_SET, + WORKING_SET_FRONTIER_DEPS, } /** Enum converter for SkyframeDumpOption. */ @@ -401,6 +404,9 @@ public BlazeCommandResult exec(CommandEnvironment env, OptionsParsingResult opti case DEPS -> evaluator.dumpDeps(out, dumpOptions.skyKeyFilter); case RDEPS -> evaluator.dumpRdeps(out, dumpOptions.skyKeyFilter); case FUNCTION_GRAPH -> evaluator.dumpFunctionGraph(out, dumpOptions.skyKeyFilter); + case WORKING_SET -> env.getSkyframeExecutor().getSkyfocusState().dumpWorkingSet(out); + case WORKING_SET_FRONTIER_DEPS -> + env.getSkyframeExecutor().getSkyfocusState().dumpFrontierSet(out); case OFF -> {} } diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/HelpCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/HelpCommand.java index 3266e26a07d326..b8920a44f76b67 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/HelpCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/HelpCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; + import com.google.common.base.Ascii; import com.google.common.base.CaseFormat; import com.google.common.base.Joiner; @@ -72,6 +74,7 @@ /** The 'blaze help' command, which prints all available commands as well as specific help pages. */ @Command( name = "help", + buildPhase = NONE, options = {HelpCommand.Options.class}, allowResidue = true, mustRunInWorkspace = false, diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/InfoCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/InfoCommand.java index d5973686db258e..54d804cb592ab8 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/InfoCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/InfoCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; + import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; @@ -56,7 +58,6 @@ import com.google.devtools.build.lib.runtime.commands.info.ReleaseInfoItem; import com.google.devtools.build.lib.runtime.commands.info.ServerLogInfoItem; import com.google.devtools.build.lib.runtime.commands.info.ServerPidInfoItem; -import com.google.devtools.build.lib.runtime.commands.info.SkyfocusWorkingSetItem; import com.google.devtools.build.lib.runtime.commands.info.StarlarkSemanticsInfoItem; import com.google.devtools.build.lib.runtime.commands.info.UsedHeapSizeAfterGcInfoItem; import com.google.devtools.build.lib.runtime.commands.info.UsedHeapSizeInfoItem; @@ -85,6 +86,7 @@ /** Implementation of 'blaze info'. */ @Command( name = "info", + buildPhase = NONE, allowResidue = true, binaryStdOut = true, help = "resource:info.txt", @@ -296,8 +298,7 @@ private static Map getHardwiredInfoItemMap( new DefaultPackagePathInfoItem(commandOptions), new StarlarkSemanticsInfoItem(commandOptions), new WorkerMetricsInfoItem(), - new LocalResourcesInfoItem(), - new SkyfocusWorkingSetItem()); + new LocalResourcesInfoItem()); ImmutableMap.Builder result = new ImmutableMap.Builder<>(); for (InfoItem item : hardwiredInfoItems) { result.put(item.getName(), item); diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/LicenseCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/LicenseCommand.java index 58b910ba3c5c0c..008a8840f52e79 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/LicenseCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/LicenseCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; + import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.NoBuildEvent; import com.google.devtools.build.lib.runtime.BlazeCommand; @@ -32,12 +34,12 @@ /** A command that prints an embedded license text. */ @Command( - name = "license", - allowResidue = true, - mustRunInWorkspace = false, - shortDescription = "Prints the license of this software.", - help = "Prints the license of this software.\n\n%{options}" -) + name = "license", + buildPhase = NONE, + allowResidue = true, + mustRunInWorkspace = false, + shortDescription = "Prints the license of this software.", + help = "Prints the license of this software.\n\n%{options}") public class LicenseCommand implements BlazeCommand { private static final ImmutableSet JAVA_LICENSE_FILES = diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/PrintActionCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/PrintActionCommand.java index 0b575226c581e9..79b7c3a2245fbd 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/PrintActionCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/PrintActionCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.ANALYZES; + import com.google.common.base.Predicate; import com.google.common.collect.Sets; import com.google.devtools.build.lib.actions.Action; @@ -68,7 +70,7 @@ /** Implements 'blaze print_action' by finding the Configured target[s] for the file[s] listed. */ @Command( name = "print_action", - builds = true, + buildPhase = ANALYZES, inheritsOptionsFrom = {BuildCommand.class}, options = {PrintActionCommand.PrintActionOptions.class}, help = "resource:print_action.txt", diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/ProfileCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/ProfileCommand.java index d5d68a17c1e674..49392dcce2cbb2 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/ProfileCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/ProfileCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; + import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.Reporter; import com.google.devtools.build.lib.profiler.JsonProfile; @@ -41,14 +43,14 @@ /** Command line wrapper for analyzing Blaze build profiles. */ @Command( - name = "analyze-profile", - options = {ProfileCommand.ProfileOptions.class}, - shortDescription = "Analyzes build profile data.", - help = "resource:analyze-profile.txt", - allowResidue = true, - completion = "path", - mustRunInWorkspace = false -) + name = "analyze-profile", + buildPhase = NONE, + options = {ProfileCommand.ProfileOptions.class}, + shortDescription = "Analyzes build profile data.", + help = "resource:analyze-profile.txt", + allowResidue = true, + completion = "path", + mustRunInWorkspace = false) public final class ProfileCommand implements BlazeCommand { public static class DumpConverter extends Converters.StringSetConverter { diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/QueryCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/QueryCommand.java index b710d44ac58d55..458d16d3ce8d51 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/QueryCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/QueryCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.LOADS; + import com.google.common.hash.HashFunction; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.packages.Target; @@ -54,6 +56,7 @@ /** Command line wrapper for executing a query with blaze. */ @Command( name = "query", + buildPhase = LOADS, options = { PackageOptions.class, QueryOptions.class, diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/RunCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/RunCommand.java index d8669d2ee56a91..977d7831228884 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/RunCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/RunCommand.java @@ -15,6 +15,7 @@ package com.google.devtools.build.lib.runtime.commands; import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.EXECUTES; import static java.nio.charset.StandardCharsets.ISO_8859_1; import com.google.common.base.Joiner; @@ -109,7 +110,7 @@ /** Builds and run a target with the given command line arguments. */ @Command( name = "run", - builds = true, + buildPhase = EXECUTES, options = {RunCommand.RunOptions.class}, inheritsOptionsFrom = {BuildCommand.class}, shortDescription = "Runs the specified target.", diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/ShutdownCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/ShutdownCommand.java index 8fd6c3267c47a4..7e3e36b8dc1443 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/ShutdownCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/ShutdownCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; + import com.google.devtools.build.lib.runtime.BlazeCommand; import com.google.devtools.build.lib.runtime.BlazeCommandResult; import com.google.devtools.build.lib.runtime.Command; @@ -24,16 +26,16 @@ import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsParsingResult; -/** - * The 'blaze shutdown' command. - */ -@Command(name = "shutdown", - options = { ShutdownCommand.Options.class }, - allowResidue = false, - mustRunInWorkspace = false, - shortDescription = "Stops the %{product} server.", - help = "This command shuts down the memory resident %{product} server process.\n" - + "%{options}") +/** The 'blaze shutdown' command. */ +@Command( + name = "shutdown", + buildPhase = NONE, + options = {ShutdownCommand.Options.class}, + allowResidue = false, + mustRunInWorkspace = false, + shortDescription = "Stops the %{product} server.", + help = + "This command shuts down the memory resident %{product} server process.\n" + "%{options}") public final class ShutdownCommand implements BlazeCommand { public static class Options extends OptionsBase { diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/TestCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/TestCommand.java index 7e1db2312a6286..17816ad875c7f8 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/TestCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/TestCommand.java @@ -14,6 +14,8 @@ package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.EXECUTES; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -62,7 +64,7 @@ /** Handles the 'test' command on the Blaze command line. */ @Command( name = "test", - builds = true, + buildPhase = EXECUTES, inheritsOptionsFrom = {BuildCommand.class}, options = {TestSummaryOptions.class}, shortDescription = "Builds and runs the specified test targets.", diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/VersionCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/VersionCommand.java index 031763c5822f15..10af9f1109bbc6 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/VersionCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/commands/VersionCommand.java @@ -13,6 +13,8 @@ // limitations under the License. package com.google.devtools.build.lib.runtime.commands; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; + import com.google.common.annotations.VisibleForTesting; import com.google.devtools.build.lib.analysis.BlazeVersionInfo; import com.google.devtools.build.lib.analysis.NoBuildEvent; @@ -32,16 +34,15 @@ import com.google.devtools.common.options.OptionsParsingResult; import java.util.Optional; -/** - * The 'blaze version' command, which informs users about the blaze version - * information. - */ -@Command(name = "version", - options = { VersionCommand.VersionOptions.class }, - allowResidue = false, - mustRunInWorkspace = false, - help = "resource:version.txt", - shortDescription = "Prints version information for %{product}.") +/** The 'blaze version' command, which informs users about the blaze version information. */ +@Command( + name = "version", + buildPhase = NONE, + options = {VersionCommand.VersionOptions.class}, + allowResidue = false, + mustRunInWorkspace = false, + help = "resource:version.txt", + shortDescription = "Prints version information for %{product}.") public final class VersionCommand implements BlazeCommand { /** Options for the "version" command. */ public static class VersionOptions extends OptionsBase { diff --git a/src/main/java/com/google/devtools/build/lib/runtime/commands/info/SkyfocusWorkingSetItem.java b/src/main/java/com/google/devtools/build/lib/runtime/commands/info/SkyfocusWorkingSetItem.java deleted file mode 100644 index 8710dd71b86e6f..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/runtime/commands/info/SkyfocusWorkingSetItem.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2024 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.runtime.commands.info; - -import com.google.common.base.Supplier; -import com.google.common.collect.ImmutableSet; -import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; -import com.google.devtools.build.lib.runtime.CommandEnvironment; -import com.google.devtools.build.lib.runtime.InfoItem; -import com.google.devtools.build.lib.util.AbruptExitException; -import java.util.TreeSet; - -/** - * The info entry to print out the working set of files used for Skyfocus. See also {@link - * com.google.devtools.build.lib.skyframe.SkyframeFocuser}. - */ -public class SkyfocusWorkingSetItem extends InfoItem { - - public SkyfocusWorkingSetItem() { - super("working_set", "Skyfocus working set", false); - } - - @Override - public byte[] get(Supplier configurationSupplier, CommandEnvironment env) - throws AbruptExitException, InterruptedException { - - ImmutableSet workingSet = - env.getSkyframeExecutor().getSkyfocusState().workingSetStrings(); - - if (workingSet.isEmpty()) { - return print("No working set found."); - } - - return print(String.join("\n", new TreeSet<>(workingSet))); - } -} diff --git a/src/main/java/com/google/devtools/build/lib/runtime/mobileinstall/MobileInstallCommand.java b/src/main/java/com/google/devtools/build/lib/runtime/mobileinstall/MobileInstallCommand.java index 86901d48c24901..091582c9d52658 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/mobileinstall/MobileInstallCommand.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/mobileinstall/MobileInstallCommand.java @@ -15,6 +15,7 @@ package com.google.devtools.build.lib.runtime.mobileinstall; import static com.google.devtools.build.lib.analysis.OutputGroupInfo.INTERNAL_SUFFIX; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.EXECUTES; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; @@ -67,7 +68,7 @@ /** Implementation of the 'mobile-install' command. */ @Command( name = "mobile-install", - builds = true, + buildPhase = EXECUTES, options = {MobileInstallCommand.Options.class, WriteAdbArgsAction.Options.class}, inheritsOptionsFrom = {BuildCommand.class}, shortDescription = "Installs targets to mobile devices.", diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/AsynchronousTreeDeleter.java b/src/main/java/com/google/devtools/build/lib/sandbox/AsynchronousTreeDeleter.java index ce809f3ad6c9d4..39396f68db2af0 100644 --- a/src/main/java/com/google/devtools/build/lib/sandbox/AsynchronousTreeDeleter.java +++ b/src/main/java/com/google/devtools/build/lib/sandbox/AsynchronousTreeDeleter.java @@ -20,6 +20,8 @@ import com.google.common.flogger.GoogleLogger; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.devtools.build.lib.exec.TreeDeleter; +import com.google.devtools.build.lib.profiler.Profiler; +import com.google.devtools.build.lib.profiler.SilentCloseable; import com.google.devtools.build.lib.vfs.Path; import java.io.IOException; import java.util.concurrent.LinkedBlockingQueue; @@ -93,7 +95,7 @@ public void deleteTree(Path path) throws IOException { checkNotNull(service, "Cannot call deleteTree after shutdown") .execute( () -> { - try { + try (SilentCloseable c = Profiler.instance().profile("trashPath.deleteTree")) { trashPath.deleteTree(); } catch (IOException e) { logger.atWarning().withCause(e).log( diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/BUILD b/src/main/java/com/google/devtools/build/lib/sandbox/BUILD index 5e201e5ece9e5e..5f28b59fa3bd84 100644 --- a/src/main/java/com/google/devtools/build/lib/sandbox/BUILD +++ b/src/main/java/com/google/devtools/build/lib/sandbox/BUILD @@ -135,6 +135,7 @@ java_library( ], deps = [ "//src/main/java/com/google/devtools/build/lib/exec:tree_deleter", + "//src/main/java/com/google/devtools/build/lib/profiler", "//src/main/java/com/google/devtools/build/lib/vfs", "//third_party:flogger", "//third_party:guava", diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java index 7f962e049fcfae..2978e30359a120 100644 --- a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java +++ b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxHelpers.java @@ -447,6 +447,7 @@ public static void createDirectories( // createDirectoryAndParentsInSandboxRoot. knownDirectories.add(dir); knownDirectories.add(dir.getParentDirectory()); + knownDirectories.add(getTmpDirPath(dir)); for (PathFragment path : dirsToCreate) { if (Thread.interrupted()) { @@ -644,6 +645,19 @@ private static Optional getExpectedSymlinkDestinationForSymlinks( return Optional.ofNullable(inputs.getSymlinks().get(fragment)); } + /** + * Returns the path to the tmp directory of the given workDir of worker. + * + *

The structure of the worker directories should look like this: / + * |__bazel-workers/ |__worker--/ |__worker---tmp/ + */ + public static Path getTmpDirPath(Path workDir) { + return workDir + .getParentDirectory() + .getParentDirectory() + .getChild(workDir.getParentDirectory().getBaseName() + "-tmp"); + } + /** * Returns true if the build options are set in a way that requires network access for all * actions. This is separate from {@link diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxModule.java b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxModule.java index 064a964b4b3615..e4f41f8583beb7 100644 --- a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxModule.java +++ b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxModule.java @@ -229,14 +229,16 @@ private void setup(CommandEnvironment cmdEnv, SpawnStrategyRegistry.Builder buil firstBuild = true; } } - SandboxStash.initialize(env.getWorkspaceName(), sandboxBase, options, treeDeleter); + try (SilentCloseable c = Profiler.instance().profile("SandboxStash.initialize")) { + SandboxStash.initialize(env.getWorkspaceName(), sandboxBase, options, treeDeleter); + } // SpawnExecutionPolicy#getId returns unique base directories for each sandboxed action during // the life of a Bazel server instance so we don't need to worry about stale directories from // previous builds. However, on the very first build of an instance of the server, we must // wipe old contents to avoid reusing stale directories. if (firstBuild && sandboxBase.exists()) { - try { + try (SilentCloseable c = Profiler.instance().profile("clean sandbox on first build")) { if (trashBase.exists()) { // Delete stale trash from a previous server instance. Path staleTrash = getStaleTrashDir(trashBase); diff --git a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxStash.java b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxStash.java index ff0ae5da1f0887..f7a1e4e4b2dbed 100644 --- a/src/main/java/com/google/devtools/build/lib/sandbox/SandboxStash.java +++ b/src/main/java/com/google/devtools/build/lib/sandbox/SandboxStash.java @@ -23,6 +23,8 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.exec.TreeDeleter; +import com.google.devtools.build.lib.profiler.Profiler; +import com.google.devtools.build.lib.profiler.SilentCloseable; import com.google.devtools.build.lib.sandbox.SandboxHelpers.SandboxOutputs; import com.google.devtools.build.lib.sandbox.SandboxHelpers.StashContents; import com.google.devtools.build.lib.vfs.Dirent; @@ -369,7 +371,7 @@ public static void initialize( } else { if (!Objects.equals(workspaceName, instance.workspaceName)) { Path stashBase = getStashBase(instance.sandboxBase); - try { + try (SilentCloseable c = Profiler.instance().profile("treeDeleter.deleteTree")) { for (Path directoryEntry : stashBase.getDirectoryEntries()) { treeDeleter.deleteTree(directoryEntry); } diff --git a/src/main/java/com/google/devtools/build/lib/shell/JavaSubprocessFactory.java b/src/main/java/com/google/devtools/build/lib/shell/JavaSubprocessFactory.java index 649647c5f2ee24..e2d8968b845a12 100644 --- a/src/main/java/com/google/devtools/build/lib/shell/JavaSubprocessFactory.java +++ b/src/main/java/com/google/devtools/build/lib/shell/JavaSubprocessFactory.java @@ -78,21 +78,13 @@ public boolean timedout() { @Override public void waitFor() throws InterruptedException { - if (deadlineMillis > 0) { - // Careful: I originally used Long.MAX_VALUE if there's no timeout. This is safe with - // Process, but not for the UNIXProcess subclass, which has an integer overflow for very - // large timeouts. As of this writing, it converts the passed in value to nanos (which - // saturates at Long.MAX_VALUE), then adds 999999 to round up (which overflows), converts - // back to millis, and then calls Object.wait with a negative timeout, which throws. - long waitTimeMillis = deadlineMillis - System.currentTimeMillis(); - boolean exitedInTime = process.waitFor(waitTimeMillis, TimeUnit.MILLISECONDS); - if (!exitedInTime && deadlineExceeded.compareAndSet(false, true)) { - process.destroy(); - // The destroy call returns immediately, so we still need to wait for the actual exit. The - // sole caller assumes that waitFor only exits when the process is gone (or throws). - process.waitFor(); - } - } else { + var waitTimeMillis = + (deadlineMillis > 0) ? deadlineMillis - System.currentTimeMillis() : Long.MAX_VALUE; + var exitedInTime = process.waitFor(waitTimeMillis, TimeUnit.MILLISECONDS); + if (!exitedInTime && deadlineExceeded.compareAndSet(false, true)) { + process.destroy(); + // The destroy call returns immediately, so we still need to wait for the actual exit. The + // sole caller assumes that waitFor only exits when the process is gone (or throws). process.waitFor(); } } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/AspectFunction.java b/src/main/java/com/google/devtools/build/lib/skyframe/AspectFunction.java index fb148eb1683382..ce3897238cb7b2 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/AspectFunction.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/AspectFunction.java @@ -51,6 +51,7 @@ import com.google.devtools.build.lib.analysis.producers.DependencyContext; import com.google.devtools.build.lib.analysis.producers.DependencyContextProducer; import com.google.devtools.build.lib.analysis.producers.UnloadedToolchainContextsInputs; +import com.google.devtools.build.lib.analysis.producers.UnloadedToolchainContextsProducer; import com.google.devtools.build.lib.analysis.starlark.StarlarkAttributeTransitionProvider; import com.google.devtools.build.lib.bugreport.BugReport; import com.google.devtools.build.lib.causes.Cause; @@ -72,6 +73,7 @@ import com.google.devtools.build.lib.packages.Package; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; +import com.google.devtools.build.lib.packages.RuleClassProvider; import com.google.devtools.build.lib.packages.StarlarkAspectClass; import com.google.devtools.build.lib.packages.StarlarkDefinedAspect; import com.google.devtools.build.lib.packages.Target; @@ -90,6 +92,7 @@ import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.SkyframeLookupResult; import com.google.devtools.build.skyframe.state.Driver; +import com.google.devtools.build.skyframe.state.StateMachine; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; @@ -117,6 +120,8 @@ */ final class AspectFunction implements SkyFunction { private final BuildViewProvider buildViewProvider; + private final RuleClassProvider ruleClassProvider; + /** * Indicates whether the set of packages transitively loaded for a given {@link AspectValue} will * be needed later (see {@link @@ -153,25 +158,59 @@ final class AspectFunction implements SkyFunction { AspectFunction( BuildViewProvider buildViewProvider, + RuleClassProvider ruleClassProvider, boolean storeTransitivePackages, PrerequisitePackageFunction prerequisitePackages, BaseTargetPrerequisitesSupplier baseTargetPrerequisitesSupplier) { this.buildViewProvider = buildViewProvider; + this.ruleClassProvider = ruleClassProvider; this.storeTransitivePackages = storeTransitivePackages; this.prerequisitePackages = prerequisitePackages; this.baseTargetPrerequisitesSupplier = baseTargetPrerequisitesSupplier; } - static class State implements SkyKeyComputeState { + static class State implements SkyKeyComputeState, UnloadedToolchainContextsProducer.ResultSink { @Nullable InitialValues initialValues; final DependencyResolver.State computeDependenciesState; + /** + * Computes the {@link UnloadedToolchainContext} collection for the underlying target of the + * aspect. + * + *

One of {@link #baseTargetUnloadedToolchainContexts}, {@link + * #baseTargetUnloadedToolchainContextsError} or {@link #baseTargetHasNoToolchains} will be set + * upon completion. + */ + @Nullable // Non-null when in-flight. + Driver baseTargetUnloadedToolchainContextsProducer; + + @Nullable ToolchainCollection baseTargetUnloadedToolchainContexts; + + @Nullable ToolchainException baseTargetUnloadedToolchainContextsError; + + // Will be true if the target doesn't require toolchain resolution. + boolean baseTargetHasNoToolchains; + private State( boolean storeTransitivePackages, PrerequisitePackageFunction prerequisitePackages) { this.computeDependenciesState = new DependencyResolver.State(storeTransitivePackages, prerequisitePackages); } + + @Override + public void acceptUnloadedToolchainContexts( + @Nullable ToolchainCollection value) { + this.baseTargetUnloadedToolchainContexts = value; + if (this.baseTargetUnloadedToolchainContexts == null) { + this.baseTargetHasNoToolchains = true; + } + } + + @Override + public void acceptUnloadedToolchainContextsError(ToolchainException error) { + this.baseTargetUnloadedToolchainContextsError = error; + } } private static class InitialValues { @@ -309,6 +348,17 @@ public SkyValue compute(SkyKey skyKey, Environment env) return null; } + ToolchainCollection baseTargetUnloadedToolchainContexts = null; + if (!state.baseTargetHasNoToolchains + && canAspectsPropagateToToolchains(topologicalAspectPath, target)) { + baseTargetUnloadedToolchainContexts = + getBaseTargetUnloadedToolchainContexts( + state, targetAndConfiguration, key.getBaseConfiguredTargetKey(), env); + if (baseTargetUnloadedToolchainContexts == null) { + return null; // Need Skyframe deps. + } + } + Optional starlarkExecTransition; try { starlarkExecTransition = @@ -334,7 +384,8 @@ public SkyValue compute(SkyKey skyKey, Environment env) starlarkExecTransition.orElse(null), env, env.getListener(), - baseTargetPrerequisitesSupplier); + baseTargetPrerequisitesSupplier, + baseTargetUnloadedToolchainContexts); if (!computeDependenciesState.transitiveRootCauses().isEmpty()) { NestedSet causes = computeDependenciesState.transitiveRootCauses().build(); throw new AspectFunctionException( @@ -367,6 +418,10 @@ public SkyValue compute(SkyKey skyKey, Environment env) toolchainContexts = contextsBuilder.build(); } + // TODO(b/288421584): In the following step, load the base target resolved toolchains targets + // and pass them to the AspectContext. + ToolchainCollection baseTargetToolchainContexts = null; + return createAspect( env, key, @@ -378,6 +433,7 @@ public SkyValue compute(SkyKey skyKey, Environment env) configuration, dependencyContext.configConditions(), toolchainContexts, + baseTargetToolchainContexts, computeDependenciesState.execGroupCollectionBuilder, depValueMap, computeDependenciesState.transitiveState, @@ -411,6 +467,68 @@ public SkyValue compute(SkyKey skyKey, Environment env) } } + /** + * Returns the {@link ToolchainCollection} of {@link UnloadedToolchainContext}s for the base + * target. + */ + private ToolchainCollection getBaseTargetUnloadedToolchainContexts( + State state, + TargetAndConfiguration targetAndConfiguration, + ConfiguredTargetKey configuredTargetKey, + Environment env) + throws InterruptedException, ToolchainException { + + if (state.baseTargetUnloadedToolchainContexts != null) { + return state.baseTargetUnloadedToolchainContexts; + } + + if (state.baseTargetUnloadedToolchainContextsProducer == null) { + UnloadedToolchainContextsInputs unloadedToolchainContextsInputs = + DependencyResolver.getUnloadedToolchainContextsInputs( + targetAndConfiguration, + configuredTargetKey.getExecutionPlatformLabel(), + ruleClassProvider, + env.getListener()); + state.baseTargetUnloadedToolchainContextsProducer = + new Driver( + new UnloadedToolchainContextsProducer( + unloadedToolchainContextsInputs, + baseTargetPrerequisitesSupplier, + (UnloadedToolchainContextsProducer.ResultSink) state, + t -> { + return StateMachine.DONE; + })); + } + if (state.baseTargetUnloadedToolchainContextsProducer.drive(env)) { + state.baseTargetUnloadedToolchainContextsProducer = null; + } + var error = state.baseTargetUnloadedToolchainContextsError; + if (error != null) { + throw error; + } + + return state.baseTargetUnloadedToolchainContexts; + } + + /** + * Returns true if it is possible to propagate the aspects to the target's toolchains based on the + * conditions: + * + *

The base target is a rule. + * + *

At least one of the aspects in the aspects path propagates to toolchains. + */ + private static boolean canAspectsPropagateToToolchains( + ImmutableList topologicalAspectPath, Target baseTarget) { + Rule rule = baseTarget.getAssociatedRule(); + if (rule == null) { + return false; + } + + return topologicalAspectPath.stream() + .anyMatch(aspect -> aspect.getDefinition().propagatesToToolchains()); + } + /** Populates {@code state.execGroupCollection} as a side effect. */ @Nullable // Null if a Skyframe restart is needed. private DependencyContext getDependencyContext( @@ -750,6 +868,7 @@ private AspectValue createAspect( BuildConfigurationValue configuration, ConfigConditions configConditions, @Nullable ToolchainCollection toolchainContexts, + @Nullable ToolchainCollection baseTargetToolchainContexts, @Nullable ExecGroupCollection.Builder execGroupCollectionBuilder, OrderedSetMultimap directDeps, TransitiveDependencyState transitiveState, @@ -790,6 +909,7 @@ private AspectValue createAspect( directDeps, configConditions, toolchainContexts, + baseTargetToolchainContexts, execGroupCollectionBuilder, configuration, transitiveState.transitivePackages(), diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/BUILD b/src/main/java/com/google/devtools/build/lib/skyframe/BUILD index a980ecb9c43359..a41b278783d28f 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/BUILD +++ b/src/main/java/com/google/devtools/build/lib/skyframe/BUILD @@ -2060,6 +2060,8 @@ java_library( "//src/main/java/com/google/devtools/build/lib/analysis:config/build_configuration", "//src/main/java/com/google/devtools/build/lib/analysis:configured_target_value", "//src/main/java/com/google/devtools/build/lib/skyframe/config", + "//src/main/java/com/google/devtools/build/lib/skyframe/toolchains:toolchain_context_key", + "//src/main/java/com/google/devtools/build/lib/skyframe/toolchains:unloaded_toolchain_context", "//third_party:jsr305", ], ) diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/BaseTargetPrerequisitesSupplier.java b/src/main/java/com/google/devtools/build/lib/skyframe/BaseTargetPrerequisitesSupplier.java index 3c6aa296437c94..fa52ebb8895df1 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/BaseTargetPrerequisitesSupplier.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/BaseTargetPrerequisitesSupplier.java @@ -16,6 +16,8 @@ import com.google.devtools.build.lib.analysis.ConfiguredTargetValue; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.skyframe.config.BuildConfigurationKey; +import com.google.devtools.build.lib.skyframe.toolchains.ToolchainContextKey; +import com.google.devtools.build.lib.skyframe.toolchains.UnloadedToolchainContext; import javax.annotation.Nullable; /** @@ -35,4 +37,11 @@ public interface BaseTargetPrerequisitesSupplier { @Nullable BuildConfigurationValue getPrerequisiteConfiguration(BuildConfigurationKey key) throws InterruptedException; + + /** + * Directly retrieves unloaded toolchain contexts from Skyframe without adding a dependency edge. + */ + @Nullable + UnloadedToolchainContext getUnloadedToolchainContext(ToolchainContextKey key) + throws InterruptedException; } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/BzlCompileValue.java b/src/main/java/com/google/devtools/build/lib/skyframe/BzlCompileValue.java index b310edad5e0876..3ae4acabf485cc 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/BzlCompileValue.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/BzlCompileValue.java @@ -14,6 +14,7 @@ package com.google.devtools.build.lib.skyframe; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.skyframe.serialization.VisibleForSerialization; @@ -252,7 +253,8 @@ public static Key keyForBuiltins(Root root, Label label) { } /** Constructs a key for loading the prelude .bzl. */ - static Key keyForBuildPrelude(Root root, Label label) { + @VisibleForTesting + public static Key keyForBuildPrelude(Root root, Label label) { return Key.create(root, label, Kind.PRELUDE); } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/CompletionFunction.java b/src/main/java/com/google/devtools/build/lib/skyframe/CompletionFunction.java index 458f79598e4965..4561dff00262b7 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/CompletionFunction.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/CompletionFunction.java @@ -75,7 +75,6 @@ import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.SkyframeLookupResult; -import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -141,8 +140,6 @@ EventReportingArtifacts createSucceeded( throws InterruptedException; } - private static final Duration IMPORTANT_OUTPUT_HANDLER_LOGGING_THRESHOLD = Duration.ofMillis(100); - private final PathResolverFactory pathResolverFactory; private final Completor completor; private final SkyframeActionExecutor skyframeActionExecutor; @@ -542,7 +539,7 @@ private Reset informImportantOutputHandler( try (var ignored = GoogleAutoProfilerUtils.logged( "Informing important output handler of top-level outputs for " + label, - IMPORTANT_OUTPUT_HANDLER_LOGGING_THRESHOLD)) { + ImportantOutputHandler.LOG_THRESHOLD)) { lostOutputs = importantOutputHandler.processOutputsAndGetLostArtifacts( key.topLevelArtifactContext().expandFilesets() diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/DependencyResolver.java b/src/main/java/com/google/devtools/build/lib/skyframe/DependencyResolver.java index 7b61a67e2fd0fb..6efcad67d0ccac 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/DependencyResolver.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/DependencyResolver.java @@ -396,7 +396,8 @@ public boolean evaluate( starlarkExecTransition.orElse(null), env, listener, - /* baseTargetPrerequisitesSupplier= */ null); + /* baseTargetPrerequisitesSupplier= */ null, + /* baseTargetUnloadedToolchainContexts= */ null); if (!transitiveRootCauses.isEmpty()) { NestedSet causes = transitiveRootCauses.build(); // TODO(bazel-team): consider reporting the error in this class vs. exporting it for @@ -602,6 +603,9 @@ public static ImmutableSet

Most of the complexity in the implementation results from wanting incremental correctness in * the presence of symlinks, esp. ancestor directory symlinks. + * + *

For an overview of the problem space and our approach, see the https://youtu.be/EoYdWmMcqDs + * talk from BazelCon 2019 (slides: + * https://docs.google.com/presentation/d/e/2PACX-1vQWq1DUhl92dDs_okNxM7Qy9zX72tp7hMsGosGxmjhBLZ5e02IJf9dySK_6lEU2j6u_NOEaUCQGxEFh/pub). */ public class FileFunction implements SkyFunction { private final AtomicReference pkgLocator; diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/FilesystemValueChecker.java b/src/main/java/com/google/devtools/build/lib/skyframe/FilesystemValueChecker.java index da4ff7ff6111c4..60a08ddf688d9d 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/FilesystemValueChecker.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/FilesystemValueChecker.java @@ -395,17 +395,10 @@ && shouldCheckFile(knownModifiedOutputFiles, artifact)) { FileArtifactValue newData = ActionOutputMetadataStore.fileArtifactValueFromArtifact( artifact, stat, xattrProviderOverrider.getXattrProvider(syscallCache), tsgm); - // TODO: b/345207297 - Remove fine logging after flakiness is diagnosed. if (newData.couldBeModifiedSince(lastKnownData)) { - logger.atFine().log( - "Modified output %s (%s -> %s), stat: %s", artifact, lastKnownData, newData, stat); modifiedOutputsReceiver.reportModifiedOutputFile( stat != null ? stat.getLastChangeTime() : -1, artifact); dirtyKeys.add(key); - } else { - logger.atFine().log( - "Unmodified output %s (%s -> %s), stat: %s", - artifact, lastKnownData, newData, stat); } } catch (IOException e) { logger.atWarning().withCause(e).log( diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/GlobsFunction.java b/src/main/java/com/google/devtools/build/lib/skyframe/GlobsFunction.java index 34724f5208bdb6..f60a16da1851ac 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/GlobsFunction.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/GlobsFunction.java @@ -167,12 +167,17 @@ public SkyValue compute(SkyKey skyKey, Environment env) Runnable drainStateMachineQueue = () -> { Runnable next; + boolean isInterrupted = false; while ((next = stateMachineRunnablesQueue.poll()) != null) { + if (isInterrupted) { + countDownLatch.countDown(); + continue; + } next.run(); if (Thread.interrupted()) { + isInterrupted = true; possibleInterruptedExceptionRef.compareAndSet( /* expectedValue= */ null, new InterruptedException()); - return; } } }; diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/LocalDiffAwareness.java b/src/main/java/com/google/devtools/build/lib/skyframe/LocalDiffAwareness.java index 9a9502a18f0edc..c5d696a96c2031 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/LocalDiffAwareness.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/LocalDiffAwareness.java @@ -27,6 +27,7 @@ import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionsBase; +import com.google.devtools.common.options.OptionsProvider; import java.io.IOException; import java.nio.file.FileSystems; import java.nio.file.Path; @@ -88,7 +89,9 @@ public Factory(ImmutableList excludedNetworkFileSystemsPrefixes) { @Override @Nullable public DiffAwareness maybeCreate( - Root pathEntry, ImmutableSet ignoredPaths) { + Root pathEntry, + ImmutableSet ignoredPaths, + OptionsProvider optionsProvider) { com.google.devtools.build.lib.vfs.Path resolvedPathEntry; try { resolvedPathEntry = pathEntry.asPath().resolveSymbolicLinks(); @@ -179,8 +182,12 @@ protected SequentialView newView(Set modifiedAbsolutePaths) { } @Override - public ModifiedFileSet getDiff(View oldView, View newView) + public ModifiedFileSet getDiff(@Nullable View oldView, View newView) throws IncompatibleViewException, BrokenDiffAwarenessException { + if (oldView == null) { + return ModifiedFileSet.EVERYTHING_MODIFIED; + } + SequentialView oldSequentialView; SequentialView newSequentialView; try { diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyfocusState.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyfocusState.java index 540e125995cf52..0598bb6a14a14d 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyfocusState.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyfocusState.java @@ -21,6 +21,7 @@ import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.skyframe.SkyKey; +import java.io.PrintStream; import javax.annotation.Nullable; /** @@ -36,6 +37,8 @@ * line flag, or automatically derived. Although the working set is represented as {@link * FileStateKey}, the presence of a directory path's {@code FileStateKey} is sufficient to * represent the corresponding directory listing state node. + * @param frontierSet {@link SkyKey}s for nodes that are in the DIRECT deps of the UTC of the + * working set. The values of these nodes are sufficient to build the working set. * @param verificationSet The set of files/dirs that are not in the working set, but is in the * transitive closure of focusedTargetLabels. * @param options The latest instance of {@link SkyfocusOptions}. @@ -47,10 +50,19 @@ public record SkyfocusState( ImmutableSet

It's called at the end of the execution of a Blaze command and if the command builds, before + * the execution phase starts. In the latter case, the invocation at the end of the command will + * be a no-op so that the event about changed files is posted only once. + * + *

The reason why the event about changed files is posted early if the command builds is that + * it's used in the execution phase. + */ public void drainChangedFiles() { - incrementalBuildMonitor.alertListeners(getEventBus()); - incrementalBuildMonitor = null; + if (incrementalBuildMonitor != null) { + incrementalBuildMonitor.alertListeners(getEventBus()); + incrementalBuildMonitor = null; + } } /** @@ -947,7 +962,6 @@ protected final void init() { progressReceiver = newSkyframeProgressReceiver(); memoizingEvaluator = createEvaluator(skyFunctions(), progressReceiver, emittedEventState); skyframeExecutorConsumerOnInit.accept(this); - } @ForOverride @@ -1009,6 +1023,7 @@ public void resetEvaluator() { */ public void notifyCommandComplete(ExtendedEventHandler eventHandler) throws InterruptedException { try { + drainChangedFiles(); memoizingEvaluator.noteEvaluationsAtSameVersionMayBeFinished(eventHandler); } finally { globFunction.complete(); @@ -1337,8 +1352,8 @@ public ImmutableList getWorkspaceStatusArtifacts(ExtendedEventHandler EvaluationResult result = evaluate( ImmutableList.of(WorkspaceStatusValue.BUILD_INFO_KEY), - /*keepGoing=*/ true, - /*numThreads=*/ 1, + /* keepGoing= */ true, + /* numThreads= */ 1, eventHandler); WorkspaceStatusValue value = checkNotNull(result.get(WorkspaceStatusValue.BUILD_INFO_KEY)); return ImmutableList.of(value.getStableArtifact(), value.getVolatileArtifact()); @@ -1658,7 +1673,7 @@ public EvaluationResult buildArtifacts( Iterable aspectKeys = AspectCompletionValue.keys(aspects, topLevelArtifactContext); Iterable testKeys = TestCompletionValue.keys( - parallelTests, topLevelArtifactContext, /*exclusiveTesting=*/ false); + parallelTests, topLevelArtifactContext, /* exclusiveTesting= */ false); EvaluationContext evaluationContext = newEvaluationContextBuilder() .setKeepGoing(options.getOptions(KeepGoingOption.class).keepGoing) @@ -1721,11 +1736,13 @@ public EvaluationResult runExclusiveTest( try { Iterable testKeys = TestCompletionValue.keys( - ImmutableSet.of(exclusiveTest), topLevelArtifactContext, /*exclusiveTesting=*/ true); + ImmutableSet.of(exclusiveTest), + topLevelArtifactContext, + /* exclusiveTesting= */ true); return evaluate( testKeys, - /*keepGoing=*/ options.getOptions(KeepGoingOption.class).keepGoing, - /*numThreads=*/ options.getOptions(BuildRequestOptions.class).jobs, + /* keepGoing= */ options.getOptions(KeepGoingOption.class).keepGoing, + /* numThreads= */ options.getOptions(BuildRequestOptions.class).jobs, reporter); } finally { // Also releases thread locks. @@ -2242,8 +2259,8 @@ TopLevelActionConflictReport filterActionConflictsForConfiguredTargetsAndAspects EvaluationResult result = evaluate( TopLevelActionLookupConflictFindingFunction.keys(keys, topLevelArtifactContext), - /*keepGoing=*/ true, - /*numThreads=*/ ResourceUsage.getAvailableProcessors(), + /* keepGoing= */ true, + /* numThreads= */ ResourceUsage.getAvailableProcessors(), eventHandler); // Remove top-level action-conflict detection values for memory efficiency. Non-top-level ones @@ -2359,8 +2376,8 @@ public Predicate filterActionConflictsForTopLevelArtifacts( EvaluationResult result = evaluate( Iterables.transform(artifacts, ActionLookupConflictFindingValue::key), - /*keepGoing=*/ true, - /*numThreads=*/ ResourceUsage.getAvailableProcessors(), + /* keepGoing= */ true, + /* numThreads= */ ResourceUsage.getAvailableProcessors(), eventHandler); // Remove remaining action-conflict detection values immediately for memory efficiency. @@ -2381,7 +2398,7 @@ public Predicate filterActionConflictsForTopLevelArtifacts( public final EvaluationResult prepareAndGet( Set roots, EvaluationContext evaluationContext) throws InterruptedException { EvaluationContext evaluationContextToUse = - evaluationContext.builder().setKeepGoing(/*keepGoing=*/ true).build(); + evaluationContext.builder().setKeepGoing(/* keepGoing= */ true).build(); return memoizingEvaluator.evaluate(roots, evaluationContextToUse); } @@ -2525,7 +2542,8 @@ Package getPackage(ExtendedEventHandler eventHandler, PackageIdentifier pkgName) // overall is in nokeep_going mode: the worst that happens is we parse some unnecessary // .bzl files. result = - evaluate(keys, /*keepGoing=*/ true, /*numThreads=*/ DEFAULT_THREAD_COUNT, eventHandler); + evaluate( + keys, /* keepGoing= */ true, /* numThreads= */ DEFAULT_THREAD_COUNT, eventHandler); } ErrorInfo error = result.getError(pkgName); if (error != null) { @@ -3188,6 +3206,11 @@ public ImmutableMap getExplicitStarlarkOptions( java.util.function.Predicate filter) { return ImmutableMap.of(); } + + @Override + public ImmutableList getUserOptions() { + return ImmutableList.of(); + } }); } @@ -3981,6 +4004,13 @@ public BuildConfigurationValue getPrerequisiteConfiguration(BuildConfigurationKe throws InterruptedException { return (BuildConfigurationValue) memoizingEvaluator.getExistingValue(key); } + + @Override + @Nullable + public UnloadedToolchainContext getUnloadedToolchainContext(ToolchainContextKey key) + throws InterruptedException { + return (UnloadedToolchainContext) memoizingEvaluator.getExistingValue(key); + } } /** @@ -4011,7 +4041,7 @@ public final void prepareForSkyfocus( + StringUtilities.capitalize(productName) + " will reclaim memory not needed to build the working set. Run '" + productName - + " info working_set' to show the working set.")); + + " dump --skyframe=working_set' to show the working set, after this command.")); if (skyfocusOptions.handlingStrategy.equals(SkyfocusHandlingStrategy.STRICT)) { reporter.handle(Event.warn("Changes outside of the working set will cause a build error.")); @@ -4102,7 +4132,10 @@ public final void runSkyfocus( actionCache); skyfocusState = - newSkyfocusState.toBuilder().verificationSet(focusResult.verificationSet()).build(); + newSkyfocusState.toBuilder() + .frontierSet(focusResult.deps()) + .verificationSet(focusResult.verificationSet()) + .build(); // Shouldn't result in an empty graph. checkState(!focusResult.deps().isEmpty(), "FocusResult deps should not be empty"); diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/config/FlagSetFunction.java b/src/main/java/com/google/devtools/build/lib/skyframe/config/FlagSetFunction.java index 63eb6028d338d5..9010210dd483ef 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/config/FlagSetFunction.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/config/FlagSetFunction.java @@ -13,11 +13,16 @@ // limitations under the License. package com.google.devtools.build.lib.skyframe.config; +import static com.google.common.collect.ImmutableSet.toImmutableSet; + import com.google.common.base.Preconditions; +import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.BuildOptionsView; +import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.analysis.config.transitions.PatchTransition; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.Label.RepoContext; @@ -81,6 +86,9 @@ public SkyValue compute(SkyKey skyKey, Environment env) ImmutableList sclConfigAsStarlarkList = getSclConfig( key.getProjectFile(), projectValue, key.getSclConfig(), key.enforceCanonical()); + if (key.enforceCanonical()) { + validateNoExtraFlagsSet(key.getTargetOptions()); + } ParsedFlagsValue parsedFlags = parseFlags(sclConfigAsStarlarkList, env); if (parsedFlags == null) { return null; @@ -125,11 +133,40 @@ private ImmutableList getSclConfig( "--scl_config=%s is not a valid configuration for this project.%s", sclConfigName, supportedConfigsDesc(projectFile, supportedConfigs))), Transience.PERSISTENT); - } + } return ImmutableList.copyOf(sclConfigValue); } + private void validateNoExtraFlagsSet(BuildOptions targetOptions) throws FlagSetFunctionException { + ImmutableList.Builder allOptionsAsStringsBuilder = new ImmutableList.Builder<>(); + targetOptions.getStarlarkOptions().keySet().stream() + .map(Object::toString) + .forEach(allOptionsAsStringsBuilder::add); + for (FragmentOptions fragmentOptions : targetOptions.getNativeOptions()) { + fragmentOptions.asMap().keySet().forEach(allOptionsAsStringsBuilder::add); + } + ImmutableList allOptionsAsStrings = allOptionsAsStringsBuilder.build(); + ImmutableSet overlap = + targetOptions.getUserOptions().stream() + .filter( + option -> + allOptionsAsStrings.contains( + Iterables.get(Splitter.on("=").split(option), 0).replaceFirst("--", ""))) + .filter(option -> !option.startsWith("--scl_config")) + .collect(toImmutableSet()); + if (!overlap.isEmpty()) { + throw new FlagSetFunctionException( + new UnsupportedConfigException( + String.format( + "When --enforce_project_configs is set, --scl_config must be the only" + + " configuration-affecting flag in the build. Found %s in the command line" + + " or user blazerc", + overlap)), + Transience.PERSISTENT); + } + } + /** Returns a user-friendly description of project-supported configurations. */ private static String supportedConfigsDesc( Label projectFile, Dict supportedConfigs) { diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/ArrayProcessor.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/ArrayProcessor.java index 4973233053b461..31eb1b346d28ff 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/ArrayProcessor.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/ArrayProcessor.java @@ -18,14 +18,12 @@ import static com.google.devtools.build.lib.skyframe.serialization.CodecHelpers.readShort; import static com.google.devtools.build.lib.skyframe.serialization.CodecHelpers.writeChar; import static com.google.devtools.build.lib.skyframe.serialization.CodecHelpers.writeShort; -import static com.google.devtools.build.lib.unsafe.UnsafeProvider.unsafe; -import static sun.misc.Unsafe.ARRAY_OBJECT_BASE_OFFSET; -import static sun.misc.Unsafe.ARRAY_OBJECT_INDEX_SCALE; import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import java.io.IOException; import java.lang.reflect.Array; +import javax.annotation.Nullable; /** * Stateless class that encodes and decodes arrays that may be multi-dimensional. @@ -44,21 +42,13 @@ void serialize( throws IOException, SerializationException; /** - * Deserializes an array into {@code obj} at {@code offset}. + * Deserializes an array of type {@code arrayType} from {@code codedIn}. * - *

A {@code (obj, offset)} tuple specifies where to write the array. Note that this - * representation works whether {@code obj} is an array or non-array object. - * - * @param type the type of the array. - * @param obj the object to contain the array, that could be an array itself. - * @param offset offset within obj to write the deserialized value. + * @return the array object. {@link Object} is the most specific common type ancestor of {@code + * Object[]} and {@code int[]}. */ - void deserialize( - AsyncDeserializationContext context, - CodedInputStream codedIn, - Class type, - Object obj, - long offset) + Object deserialize( + AsyncDeserializationContext context, CodedInputStream codedIn, Class arrayType) throws IOException, SerializationException; static ArrayProcessor forType(Class type) { @@ -144,39 +134,31 @@ public abstract void serializeArrayData(CodedOutputStream codedOut, Object untyp throws IOException; @Override - public final void deserialize( - AsyncDeserializationContext context, - CodedInputStream codedIn, - Class type, - Object obj, - long offset) - throws IOException { - deserialize(codedIn, type, obj, offset); + public Object deserialize( + AsyncDeserializationContext context, CodedInputStream codedIn, Class arrayType) + throws IOException, SerializationException { + return deserialize(codedIn, arrayType); } - public final void deserialize(CodedInputStream codedIn, Class type, Object obj, long offset) - throws IOException { + /** Primitive arrays can be deserialized without an {@link AsyncDeserializationContext}. */ + @Nullable + private Object deserialize(CodedInputStream codedIn, Class arrayType) throws IOException { int length = codedIn.readInt32(); if (length == 0) { - return; // It was null. + return null; // It was null. } length--; // Shifts the length back. It was shifted to allow 0 to be used for null. - Class componentType = type.getComponentType(); - if (componentType.isArray()) { - Object arr = Array.newInstance(componentType, length); - unsafe().putObject(obj, offset, arr); - for (int i = 0; i < length; ++i) { - deserialize( - codedIn, - componentType, - arr, - ARRAY_OBJECT_BASE_OFFSET + ARRAY_OBJECT_INDEX_SCALE * i); - } - return; + Class componentType = arrayType.getComponentType(); + if (!componentType.isArray()) { + return deserializeArrayData(codedIn, length); } - unsafe().putObject(obj, offset, deserializeArrayData(codedIn, length)); + var arr = (Object[]) Array.newInstance(componentType, length); + for (int i = 0; i < length; ++i) { + arr[i] = deserialize(codedIn, componentType); + } + return arr; } public abstract Object deserializeArrayData(CodedInputStream codedIn, int length) @@ -404,41 +386,29 @@ public void serialize( } @Override - public void deserialize( - AsyncDeserializationContext context, - CodedInputStream codedIn, - Class type, - Object obj, - long offset) + @Nullable + public Object deserialize( + AsyncDeserializationContext context, CodedInputStream codedIn, Class arrayType) throws IOException, SerializationException { int length = codedIn.readInt32(); if (length == 0) { - return; // It was null. + return null; // It was null. } length--; // Shifts the length back. It was shifted to allow 0 to be used for null. - Class componentType = type.getComponentType(); - Object arr = Array.newInstance(componentType, length); - unsafe().putObject(obj, offset, arr); - - if (length == 0) { - return; // Empty array. - } + Class componentType = arrayType.getComponentType(); + var arr = (Object[]) Array.newInstance(componentType, length); - // It's a non-empty array if this is reached. - if (componentType.isArray()) { - for (int i = 0; i < length; ++i) { - deserialize( - context, - codedIn, - componentType, - arr, - ARRAY_OBJECT_BASE_OFFSET + ARRAY_OBJECT_INDEX_SCALE * i); + if (length > 0) { + if (componentType.isArray()) { + for (int i = 0; i < length; ++i) { + arr[i] = deserialize(context, codedIn, componentType); + } + } else { + deserializeObjectArray(context, codedIn, arr, length); } - return; } - - deserializeObjectArray(context, codedIn, arr, length); + return arr; } }; @@ -454,15 +424,16 @@ static void serializeObjectArray( } /** - * Deserializes {@code length} objects into the untyped {@code Object[]} in {@code arr}. + * Deserializes {@code length} objects into {@code arr}. * *

Partially deserialized values may be visible to the caller. */ + @SuppressWarnings("AvoidObjectArrays") // explicit, low-level array handling static void deserializeObjectArray( - AsyncDeserializationContext context, CodedInputStream codedIn, Object arr, int length) + AsyncDeserializationContext context, CodedInputStream codedIn, Object[] arr, int length) throws IOException, SerializationException { for (int i = 0; i < length; ++i) { - context.deserialize(codedIn, arr, ARRAY_OBJECT_BASE_OFFSET + ARRAY_OBJECT_INDEX_SCALE * i); + context.deserializeArrayElement(codedIn, arr, i); } } } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/AsyncDeserializationContext.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/AsyncDeserializationContext.java index 0de9be767fb08c..6f08fd9cc5366e 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/AsyncDeserializationContext.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/AsyncDeserializationContext.java @@ -77,6 +77,7 @@ void deserialize(CodedInputStream codedIn, T obj, FieldSetter set * deserializing an array. It has similar behavior. The result can be written asynchronously or * not at all if its value was null. */ + @Deprecated // TODO: b/331765692 - delete this method void deserialize(CodedInputStream codedIn, Object obj, long offset) throws IOException, SerializationException; @@ -88,9 +89,22 @@ void deserialize(CodedInputStream codedIn, Object obj, long offset) * container codecs that perform reference counting. The {@code done} callback is always called, * even if the deserialized value is null. */ + @Deprecated // TODO: b/331765692 - delete this method void deserialize(CodedInputStream codedIn, Object obj, long offset, Runnable done) throws IOException, SerializationException; + /** + * Parses the next object from {@code codedIn} and writes it into {@code arr} at {@code index}. + * + *

Deserialization may complete asynchronously, for example, when the input requires a Skyframe + * lookup to compute. + * + *

No write is performed when the resulting value is null. + */ + @SuppressWarnings("AvoidObjectArrays") // explicit, low-level array handling + void deserializeArrayElement(CodedInputStream codedIn, Object[] arr, int index) + throws IOException, SerializationException; + /** * Reads a value from key value storage into {@code obj}. * diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/DeserializationContext.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/DeserializationContext.java index f429d5225a80a1..ec3962376842b5 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/DeserializationContext.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/DeserializationContext.java @@ -64,6 +64,7 @@ public void deserialize(CodedInputStream codedIn, T parent, FieldSetter void getSharedValue( CodedInputStream codedIn, diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/DynamicCodec.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/DynamicCodec.java index d9aa1151442a80..978f42c505fcf0 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/DynamicCodec.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/DynamicCodec.java @@ -367,7 +367,7 @@ public void serialize(SerializationContext context, CodedOutputStream codedOut, public void deserialize( AsyncDeserializationContext context, CodedInputStream codedIn, Object obj) throws IOException, SerializationException { - arrayProcessor.deserialize(context, codedIn, type, obj, offset); + unsafe().putObject(obj, offset, arrayProcessor.deserialize(context, codedIn, type)); } } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/ImmutableSerializationContext.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/ImmutableSerializationContext.java index 3f9f08d01a56e4..bd79204ad18f85 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/ImmutableSerializationContext.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/ImmutableSerializationContext.java @@ -70,7 +70,7 @@ public void serializeLeaf( } @Override - boolean writeBackReferenceIfMemoized(Object obj, CodedOutputStream codedOut) { + boolean writeBackReferenceIfMemoized(Object obj, CodedOutputStream codedOut, boolean isLeafType) { return false; } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/LeafObjectCodec.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/LeafObjectCodec.java index b6ccbfa38aa487..a56094317c7df2 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/LeafObjectCodec.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/LeafObjectCodec.java @@ -23,6 +23,9 @@ *

{@link LeafObjectCodec}s may only delegate to other {@link LeafObjectCodec}s and are * restricted from using any asynchronous features. By construction, they can only be used to * serialize acyclic values and are always synchronous. + * + *

Values using this codec will be memoized using {@link Object#hashCode} and {@link + * Object#equals}. */ public abstract class LeafObjectCodec implements ObjectCodec { @Override diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/MemoizingSerializationContext.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/MemoizingSerializationContext.java index 99ae2565e6f6b2..492702452dc039 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/MemoizingSerializationContext.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/MemoizingSerializationContext.java @@ -139,8 +139,8 @@ abstract class MemoizingSerializationContext extends SerializationContext { private final Reference2IntOpenHashMap table = new Reference2IntOpenHashMap<>(); - /** Table for types memoized using values equality, currently only {@link String}. */ - private final Object2IntOpenHashMap valuesTable = new Object2IntOpenHashMap<>(); + /** Table for types serialized with {@link LeafObjectCodec}, using value-based equality. */ + private final Object2IntOpenHashMap leafTable = new Object2IntOpenHashMap<>(); private final Set> explicitlyAllowedClasses = new HashSet<>(); @@ -154,7 +154,7 @@ static MemoizingSerializationContext createForTesting( ObjectCodecRegistry codecRegistry, ImmutableClassToInstanceMap dependencies) { super(codecRegistry, dependencies); table.defaultReturnValue(NO_VALUE); - valuesTable.defaultReturnValue(NO_VALUE); + leafTable.defaultReturnValue(NO_VALUE); } static byte[] serializeToBytes( @@ -188,7 +188,7 @@ public final void serializeLeaf( if (writeIfNullOrConstant(obj, codedOut)) { return; } - int maybePrevious = getMemoizedIndex(obj); + int maybePrevious = getMemoizedIndex(obj, /* isLeafType= */ true); if (maybePrevious != NO_VALUE) { // There was a previous entry. Writes a backreference, subtracting 2 to avoid 0 (which // indicates null), and -1 (which indicates an immediate value). @@ -200,7 +200,7 @@ public final void serializeLeaf( codec.serialize((LeafSerializationContext) this, obj, codedOut); // By necessity, a LeafCodec is treated like MEMOIZE_AFTER because when deserializing, the // value will only be available as a backreference after its deserialization is complete. - int unusedId = memoize(obj); + int unusedId = memoize(obj, /* isLeafType= */ true); } @Override @@ -227,19 +227,20 @@ final void serializeWithCodec(ObjectCodec codec, Object obj, CodedOutput switch (codec.getStrategy()) { case MEMOIZE_BEFORE: { - // Deserialization determines the value of this tag based on the size of its memo table. - memoize(obj); + // Deserialization can determine the value of the tag from the size of its memo table so + // the tag does not need to be written to the stream. + memoize(obj, /* isLeafType= */ false); // LeafObjectCodec is always MEMOIZE_AFTER. codec.serialize(this, obj, codedOut); break; } case MEMOIZE_AFTER: { codec.serialize(this, obj, codedOut); + boolean isLeafType = codec instanceof LeafObjectCodec; // If serializing the children caused the parent object itself to be serialized due to a - // cycle, then there's now a memo entry for the parent. Don't overwrite it with a new - // id. - int cylicallyCreatedId = getMemoizedIndex(obj); - int id = (cylicallyCreatedId != NO_VALUE) ? cylicallyCreatedId : memoize(obj); + // cycle, then there's now a memo entry for the parent. Don't overwrite it with a new id. + int cylicallyCreatedId = getMemoizedIndex(obj, isLeafType); + int id = (cylicallyCreatedId != NO_VALUE) ? cylicallyCreatedId : memoize(obj, isLeafType); codedOut.writeInt32NoTag(id); break; } @@ -247,9 +248,9 @@ final void serializeWithCodec(ObjectCodec codec, Object obj, CodedOutput } @Override - final boolean writeBackReferenceIfMemoized(Object obj, CodedOutputStream codedOut) - throws IOException { - int memoizedIndex = getMemoizedIndex(obj); + final boolean writeBackReferenceIfMemoized( + Object obj, CodedOutputStream codedOut, boolean isLeafType) throws IOException { + int memoizedIndex = getMemoizedIndex(obj, isLeafType); if (memoizedIndex == NO_VALUE) { return false; } @@ -267,8 +268,8 @@ public final boolean isMemoizing() { * If the value is already memoized, return its on-the-wire id; otherwise returns {@link * #NO_VALUE}. */ - private int getMemoizedIndex(Object value) { - return isValueType(value) ? valuesTable.getInt(value) : table.getInt(value); + private int getMemoizedIndex(Object value, boolean isLeafType) { + return isLeafType ? leafTable.getInt(value) : table.getInt(value); } /** @@ -277,19 +278,14 @@ private int getMemoizedIndex(Object value) { *

{@code value} must not already be present. */ @CanIgnoreReturnValue // may be called for side effect - private int memoize(Object value) { + private int memoize(Object value, boolean isLeafType) { // Ids count sequentially from 0. - int newId = table.size() + valuesTable.size(); - int maybePrevious = - isValueType(value) ? valuesTable.put(value, newId) : table.put(value, newId); + int newId = table.size() + leafTable.size(); + int maybePrevious = isLeafType ? leafTable.put(value, newId) : table.put(value, newId); checkState(maybePrevious == NO_VALUE, "Memoized object '%s' multiple times", value); return newId; } - private boolean isValueType(Object value) { - return value instanceof String; - } - private static void serializeToStream( ObjectCodecRegistry codecRegistry, ImmutableClassToInstanceMap dependencies, diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/MultimapCodecs.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/MultimapCodecs.java index 202ceeb18c0210..4355f6ad3ad770 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/MultimapCodecs.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/MultimapCodecs.java @@ -14,8 +14,6 @@ package com.google.devtools.build.lib.skyframe.serialization; import static com.google.devtools.build.lib.skyframe.serialization.ArrayProcessor.deserializeObjectArray; -import static sun.misc.Unsafe.ARRAY_OBJECT_BASE_OFFSET; -import static sun.misc.Unsafe.ARRAY_OBJECT_INDEX_SCALE; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableSetMultimap; @@ -62,8 +60,7 @@ public DeferredValue deserializeDeferred( ImmutableListMultimapBuffer buffer = new ImmutableListMultimapBuffer(size); for (int i = 0; i < size; i++) { - context.deserialize( - codedIn, buffer.keys, ARRAY_OBJECT_BASE_OFFSET + i * ARRAY_OBJECT_INDEX_SCALE); + context.deserializeArrayElement(codedIn, buffer.keys, i); int valuesCount = codedIn.readInt32(); Object[] values = new Object[valuesCount]; buffer.values[i] = values; @@ -156,8 +153,7 @@ private static void deserializeSetMultimap( AsyncDeserializationContext context, CodedInputStream codedIn, MultimapBuffer buffer) throws SerializationException, IOException { for (int i = 0; i < buffer.size(); i++) { - context.deserialize( - codedIn, buffer.keys, ARRAY_OBJECT_BASE_OFFSET + i * ARRAY_OBJECT_INDEX_SCALE); + context.deserializeArrayElement(codedIn, buffer.keys, i); int valuesCount = codedIn.readInt32(); Object[] values = new Object[valuesCount]; diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/SerializationContext.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/SerializationContext.java index 86eb6ecadfa5fa..65c36dfe648f5d 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/SerializationContext.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/SerializationContext.java @@ -51,14 +51,13 @@ public final void serialize(@Nullable Object object, CodedOutputStream codedOut) if (writeIfNullOrConstant(object, codedOut)) { return; } - if (writeBackReferenceIfMemoized(object, codedOut)) { - return; - } CodecDescriptor descriptor = codecRegistry.getCodecDescriptorForObject(object); - codedOut.writeSInt32NoTag(descriptor.getTag()); - @SuppressWarnings("unchecked") ObjectCodec castCodec = (ObjectCodec) descriptor.getCodec(); + if (writeBackReferenceIfMemoized(object, codedOut, castCodec instanceof LeafObjectCodec)) { + return; + } + codedOut.writeSInt32NoTag(descriptor.getTag()); serializeWithCodec(castCodec, object, codedOut); } @@ -171,11 +170,13 @@ abstract void serializeWithCodec( * *

Never succeeds if memoization is disabled. * + * @param isLeafType true if the codec used for {@code obj} would be an instance of {@link + * LeafObjectCodec} * @return true if {@code obj} was serialized to {@code codedOut} as a backreference */ @ForOverride - abstract boolean writeBackReferenceIfMemoized(Object obj, CodedOutputStream codedOut) - throws IOException; + abstract boolean writeBackReferenceIfMemoized( + Object obj, CodedOutputStream codedOut, boolean isLeafType) throws IOException; public abstract boolean isMemoizing(); diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/SharedValueDeserializationContext.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/SharedValueDeserializationContext.java index 6ab8ac488bbf3c..5ea4179d945c62 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/SharedValueDeserializationContext.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/SharedValueDeserializationContext.java @@ -172,6 +172,7 @@ static Object deserializeWithSkyframe( } @Override + @SuppressWarnings("SunApi") // TODO: b/331765692 - delete this public void deserialize(CodedInputStream codedIn, Object parent, long offset) throws IOException, SerializationException { Object result = processTagAndDeserialize(codedIn); @@ -222,6 +223,7 @@ public void deserialize(CodedInputStream codedIn, T parent, FieldSetter futureResult) { + addReadStatusFuture( + Futures.transform(futureResult, value -> arr[index] = value, directExecutor())); + return; + } + + arr[index] = result; + } + @Override public void getSharedValue( CodedInputStream codedIn, @@ -410,20 +429,41 @@ Object deserializeAndMaybeHandleDeferredValues(ObjectCodec codec, CodedInputS throws SerializationException, IOException { int startingReadCount = readStatusFutures == null ? 0 : readStatusFutures.size(); - Object value; - if (codec instanceof DeferredObjectCodec deferredCodec) { - // On other analogous codepaths, `ObjectCodec.safeCast' is applied to the resulting value. - // Not all codecs have this property, notably DynamicCodec, but DeferredObjectCodec's type - // parameters guarantee type of the deserialized value. - value = deferredCodec.deserializeDeferred(this, codedIn); - } else { - value = codec.safeCast(codec.deserialize(this, codedIn)); - } + Object value = + switch (codec) { + // On other analogous codepaths, `ObjectCodec.safeCast' is applied to the resulting value. + // Not all codecs have this property, notably DynamicCodec, but DeferredObjectCodec's type + // parameters guarantee type of the deserialized value. + case DeferredObjectCodec deferredCodec -> + deferredCodec.deserializeDeferred(this, codedIn); + case InterningObjectCodec interningCodec -> { + Object initialValue = interningCodec.deserializeInterned(this, codedIn); + @SuppressWarnings("unchecked") + InterningObjectCodec castCodec = (InterningObjectCodec) interningCodec; + yield new InterningDeferredValue(castCodec, codec.safeCast(initialValue)); + } + default -> codec.safeCast(codec.deserialize(this, codedIn)); + }; this.lastStartingReadCount = startingReadCount; return value; } + private static class InterningDeferredValue implements DeferredValue { + private final InterningObjectCodec codec; + private final Object value; + + private InterningDeferredValue(InterningObjectCodec codec, Object value) { + this.codec = codec; + this.value = value; + } + + @Override + public Object call() { + return codec.intern(value); + } + } + @Override @SuppressWarnings("FutureReturnValueIgnored") Object combineValueWithReadFutures(Object value) { diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/AutoCodecProcessor.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/AutoCodecProcessor.java index 2654bcf7f82192..7e323bbe065bee 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/AutoCodecProcessor.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/AutoCodecProcessor.java @@ -17,6 +17,7 @@ import static com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodecProcessor.InstantiatorKind.CONSTRUCTOR; import static com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodecProcessor.InstantiatorKind.FACTORY_METHOD; import static com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodecProcessor.InstantiatorKind.INTERNER; +import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.findRelationWithGenerics; import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.getErasure; import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.getErasureAsMirror; import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.sanitizeTypeParameter; @@ -30,6 +31,7 @@ import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec.Instantiator; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec.Interner; import com.google.devtools.build.lib.skyframe.serialization.autocodec.SerializationCodeGenerator.Marshaller; +import com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.Relation; import com.google.devtools.build.lib.unsafe.UnsafeProvider; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.JavaFile; @@ -41,7 +43,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import javax.annotation.Nullable; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.ProcessingEnvironment; import javax.annotation.processing.Processor; @@ -53,7 +54,6 @@ import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; -import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.ElementFilter; @@ -266,56 +266,12 @@ private static boolean hasInternerAnnotation(ExecutableElement elt) { return elt.getAnnotation(Interner.class) != null; } - private enum Relation { - INSTANCE_OF, - EQUAL_TO, - SUPERTYPE_OF, - UNRELATED_TO - } - - @Nullable - private Relation findRelationWithGenerics(TypeMirror type1, TypeMirror type2) { - if (type1.getKind() == TypeKind.TYPEVAR - || type1.getKind() == TypeKind.WILDCARD - || type2.getKind() == TypeKind.TYPEVAR - || type2.getKind() == TypeKind.WILDCARD) { - return Relation.EQUAL_TO; - } - if (env.getTypeUtils().isAssignable(type1, type2)) { - if (env.getTypeUtils().isAssignable(type2, type1)) { - return Relation.EQUAL_TO; - } - return Relation.INSTANCE_OF; - } - if (env.getTypeUtils().isAssignable(type2, type1)) { - return Relation.SUPERTYPE_OF; - } - // From here on out, we can't detect subtype/supertype, we're only checking for equality. - TypeMirror erasedType1 = env.getTypeUtils().erasure(type1); - TypeMirror erasedType2 = env.getTypeUtils().erasure(type2); - if (!env.getTypeUtils().isSameType(erasedType1, erasedType2)) { - // Technically, there could be a relationship, but it's too hard to figure out for now. - return Relation.UNRELATED_TO; - } - List genericTypes1 = ((DeclaredType) type1).getTypeArguments(); - List genericTypes2 = ((DeclaredType) type2).getTypeArguments(); - if (genericTypes1.size() != genericTypes2.size()) { - return null; - } - for (int i = 0; i < genericTypes1.size(); i++) { - Relation result = findRelationWithGenerics(genericTypes1.get(i), genericTypes2.get(i)); - if (result != Relation.EQUAL_TO) { - return Relation.UNRELATED_TO; - } - } - return Relation.EQUAL_TO; - } - private void verifyFactoryMethod(TypeElement encodedType, ExecutableElement elt) throws SerializationProcessingException { boolean success = elt.getModifiers().contains(Modifier.STATIC); if (success) { - Relation equalityTest = findRelationWithGenerics(elt.getReturnType(), encodedType.asType()); + Relation equalityTest = + findRelationWithGenerics(elt.getReturnType(), encodedType.asType(), env); success = equalityTest == Relation.EQUAL_TO || equalityTest == Relation.INSTANCE_OF; } if (!success) { @@ -378,7 +334,7 @@ private MethodSpec buildSerializeMethodWithInstantiator( Optional hasField = getFieldByNameRecursive(encodedType, parameter.getSimpleName().toString()); if (hasField.isPresent()) { - if (findRelationWithGenerics(hasField.get().value.asType(), parameter.asType()) + if (findRelationWithGenerics(hasField.get().value.asType(), parameter.asType(), env) == Relation.UNRELATED_TO) { throw new SerializationProcessingException( parameter, @@ -428,7 +384,7 @@ private String findGetterForClass(VariableElement parameter, TypeElement type) if (!element.getModifiers().contains(Modifier.STATIC) && !element.getModifiers().contains(Modifier.PRIVATE) && possibleGetterNames.contains(element.getSimpleName().toString()) - && findRelationWithGenerics(parameter.asType(), element.getReturnType()) + && findRelationWithGenerics(parameter.asType(), element.getReturnType(), env) != Relation.UNRELATED_TO) { return element.getSimpleName().toString(); } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/BUILD b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/BUILD index fdaf8c979c96e0..e08db8a7e62385 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/BUILD +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/BUILD @@ -131,7 +131,6 @@ java_library( "//third_party:auto_service", "//third_party:auto_value", "//third_party:guava", - "//third_party:jsr305", "//third_party/java/javapoet", "//third_party/protobuf:protobuf_java", ], diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/CodecGenerator.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/CodecGenerator.java index a9a7ead26f6371..fcf899d064c036 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/CodecGenerator.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/CodecGenerator.java @@ -47,7 +47,7 @@ final TypeSpec defineCodec( MethodSpec.Builder deserialize = initializeDeserializeMethod(encodedType); for (FieldGenerator generator : fieldGenerators) { - generator.generateOffsetMember(classBuilder, constructor); + generator.generateHandleMember(classBuilder, constructor); generator.generateAdditionalMemberVariables(classBuilder); generator.generateConstructorCode(constructor); generator.generateSerializeCode(serialize); diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/FieldGenerator.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/FieldGenerator.java index 0f65d9c024ce34..eec1a0832ab835 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/FieldGenerator.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/FieldGenerator.java @@ -14,11 +14,9 @@ package com.google.devtools.build.lib.skyframe.serialization.autocodec; import com.google.devtools.build.lib.skyframe.serialization.ObjectCodec; -import com.google.devtools.build.lib.unsafe.UnsafeProvider; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeSpec; -import javax.lang.model.element.Modifier; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; @@ -26,7 +24,7 @@ /** * Generates code for a specific field. * - *

Always stores the offset of the field in a variable named {@link #getOffsetName}. + *

Always stores a handle to the field in a variable named {@link #getHandleName}. */ abstract class FieldGenerator { /** @@ -36,7 +34,7 @@ abstract class FieldGenerator { */ private static final String GENERATED_TAG = "$AutoCodec$"; - private static final String OFFSET_SUFFIX = "_offset"; + private static final String HANDLE_SUFFIX = "_handle"; private final VariableElement variable; private final ClassName parentName; @@ -68,12 +66,15 @@ final ClassName getParentName() { } /** - * Generated codecs store the offset of every field of the serialized class in a member variable. + * Name for a handle to the associated field. * - * @return name of the offset member variable + *

A handle can either be a {@link java.lang.invoke.VarHandle} or a field offset. + * + * @return name of the handle member variable */ - final String getOffsetName() { - return namePrefix + OFFSET_SUFFIX; + // TODO: b/331765692 - cleanup use of field offsets + final String getHandleName() { + return namePrefix + HANDLE_SUFFIX; } /** @@ -85,22 +86,19 @@ final Name getParameterName() { return variable.getSimpleName(); } - final void generateOffsetMember(TypeSpec.Builder classBuilder, MethodSpec.Builder constructor) { - classBuilder.addField(long.class, getOffsetName(), Modifier.PRIVATE, Modifier.FINAL); - constructor.addStatement( - "this.$L = $T.unsafe().objectFieldOffset($T.class.getDeclaredField(\"$L\"))", - getOffsetName(), - UnsafeProvider.class, - getParentName(), - variable.getSimpleName()); - } + /** + * Defines the handle field. + * + *

Adds the field to the {@code classBuilder} and assigns its value in {@code constructor}. + */ + abstract void generateHandleMember(TypeSpec.Builder classBuilder, MethodSpec.Builder constructor); /** * Generates any additional member variables needed for this field. * *

To avoid collisions, field specific field names should be prefixed with {@link #namePrefix}. * - *

The *offset* field is already generated by {@link #generateOffsetMember}. + *

The *offset* field is already generated by {@link #generateHandleMember}. */ void generateAdditionalMemberVariables(TypeSpec.Builder classBuilder) {} @@ -108,7 +106,7 @@ void generateAdditionalMemberVariables(TypeSpec.Builder classBuilder) {} * Adds field specific code to the constructor. * *

Many implementations don't need to do anything here given that the offset is already - * initialized by {@link #generateOffsetMember}. + * initialized by {@link #generateHandleMember}. */ void generateConstructorCode(MethodSpec.Builder constructor) {} diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/InterningObjectCodecFieldGenerators.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/InterningObjectCodecFieldGenerators.java index f0e3c76e1191b1..b7216f599133a2 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/InterningObjectCodecFieldGenerators.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/InterningObjectCodecFieldGenerators.java @@ -93,9 +93,26 @@ static FieldGenerator create( } } - private static final class NestedArrayFieldGenerator extends FieldGenerator { + /** Implementation that uses field offsets as handles. */ + private abstract static class OffsetFieldGenerator extends FieldGenerator { + private OffsetFieldGenerator(VariableElement variable, int hierarchyLevel) { + super(variable, hierarchyLevel); + } + + @Override + final void generateHandleMember(TypeSpec.Builder classBuilder, MethodSpec.Builder constructor) { + classBuilder.addField(long.class, getHandleName(), Modifier.PRIVATE, Modifier.FINAL); + constructor.addStatement( + "this.$L = $T.unsafe().objectFieldOffset($T.class.getDeclaredField(\"$L\"))", + getHandleName(), + UnsafeProvider.class, + getParentName(), + getParameterName()); + } + } + + private static final class NestedArrayFieldGenerator extends OffsetFieldGenerator { private final String processorName; - private final boolean isPrimitiveArray; private NestedArrayFieldGenerator( VariableElement variable, int hierarchyLevel, TypeKind baseComponentKind) { @@ -110,14 +127,12 @@ private NestedArrayFieldGenerator( case LONG: case SHORT: this.processorName = baseComponentKind.name() + "_ARRAY_PROCESSOR"; - this.isPrimitiveArray = true; break; case DECLARED: case TYPEVAR: // See comments of `ArrayProcessor.OBJECT_ARRAY_PROCESSOR` to understand how it works for // any type of object array. this.processorName = "OBJECT_ARRAY_PROCESSOR"; - this.isPrimitiveArray = false; break; default: throw new IllegalStateException( @@ -160,30 +175,22 @@ void generateSerializeCode(MethodSpec.Builder serialize) { processorName, getTypeName(), UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override void generateDeserializeCode(MethodSpec.Builder deserialize) { - if (isPrimitiveArray) { - deserialize.addStatement( - "$T.$L.deserialize(codedIn, $L, instance, $L)", - ArrayProcessor.class, - processorName, - getTypeName(), - getOffsetName()); - } else { - deserialize.addStatement( - "$T.$L.deserialize(context, codedIn, $L, instance, $L)", - ArrayProcessor.class, - processorName, - getTypeName(), - getOffsetName()); - } + deserialize.addStatement( + "$T.unsafe().putObject(instance, $L, $T.$L.deserialize(context, codedIn, $L))", + UnsafeProvider.class, + getHandleName(), + ArrayProcessor.class, + processorName, + getTypeName()); } } - private static class PrimitiveArrayFieldGenerator extends FieldGenerator { + private static class PrimitiveArrayFieldGenerator extends OffsetFieldGenerator { private final TypeKind componentType; private PrimitiveArrayFieldGenerator( @@ -205,7 +212,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { Object.class, objName, UnsafeProvider.class, - getOffsetName()) + getHandleName()) .beginControlFlow("if ($L == null)", objName) .addStatement("codedOut.writeInt32NoTag(0)") .nextControlFlow("else") @@ -227,7 +234,7 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { .addStatement( "$T.unsafe().putObject(instance, $L, $T.$L.deserializeArrayData(codedIn, $L))", UnsafeProvider.class, - getOffsetName(), + getHandleName(), ArrayProcessor.class, getProcessorName(), lengthName) @@ -235,7 +242,7 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { } } - private static class ObjectArrayFieldGenerator extends FieldGenerator { + private static class ObjectArrayFieldGenerator extends OffsetFieldGenerator { private final TypeName componentTypeName; private ObjectArrayFieldGenerator( @@ -253,7 +260,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { Object.class, arrName, UnsafeProvider.class, - getOffsetName()) + getHandleName()) .beginControlFlow("if ($L == null)", arrName) .addStatement("codedOut.writeInt32NoTag(0)") .nextControlFlow("else") @@ -275,7 +282,7 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { .addStatement( "$T.unsafe().putObject(instance, $L, $L)", UnsafeProvider.class, - getOffsetName(), + getHandleName(), arrName) .addStatement( "$T.deserializeObjectArray(context, codedIn, $L, $L)", @@ -286,7 +293,7 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { } } - private static class BooleanFieldGenerator extends FieldGenerator { + private static class BooleanFieldGenerator extends OffsetFieldGenerator { private BooleanFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -296,7 +303,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { serialize.addStatement( "codedOut.writeBoolNoTag($T.unsafe().getBoolean(obj, $L))", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override @@ -304,11 +311,11 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { deserialize.addStatement( "$T.unsafe().putBoolean(instance, $L, codedIn.readBool())", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } } - private static class ByteFieldGenerator extends FieldGenerator { + private static class ByteFieldGenerator extends OffsetFieldGenerator { private ByteFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -318,7 +325,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { serialize.addStatement( "codedOut.writeRawByte($T.unsafe().getByte(obj, $L))", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override @@ -326,11 +333,11 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { deserialize.addStatement( "$T.unsafe().putByte(instance, $L, codedIn.readRawByte())", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } } - private static class CharFieldGenerator extends FieldGenerator { + private static class CharFieldGenerator extends OffsetFieldGenerator { private CharFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -341,7 +348,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { "$T.writeChar(codedOut, $T.unsafe().getChar(obj, $L))", CodecHelpers.class, UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override @@ -349,12 +356,12 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { deserialize.addStatement( "$T.unsafe().putChar(instance, $L, $T.readChar(codedIn))", UnsafeProvider.class, - getOffsetName(), + getHandleName(), CodecHelpers.class); } } - private static class DoubleFieldGenerator extends FieldGenerator { + private static class DoubleFieldGenerator extends OffsetFieldGenerator { private DoubleFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -364,7 +371,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { serialize.addStatement( "codedOut.writeDoubleNoTag($T.unsafe().getDouble(obj, $L))", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override @@ -372,11 +379,11 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { deserialize.addStatement( "$T.unsafe().putDouble(instance, $L, codedIn.readDouble())", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } } - private static class FloatFieldGenerator extends FieldGenerator { + private static class FloatFieldGenerator extends OffsetFieldGenerator { private FloatFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -386,7 +393,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { serialize.addStatement( "codedOut.writeFloatNoTag($T.unsafe().getFloat(obj, $L))", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override @@ -394,11 +401,11 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { deserialize.addStatement( "$T.unsafe().putFloat(instance, $L, codedIn.readFloat())", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } } - private static class IntFieldGenerator extends FieldGenerator { + private static class IntFieldGenerator extends OffsetFieldGenerator { private IntFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -408,7 +415,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { serialize.addStatement( "codedOut.writeInt32NoTag($T.unsafe().getInt(obj, $L))", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override @@ -416,11 +423,11 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { deserialize.addStatement( "$T.unsafe().putInt(instance, $L, codedIn.readInt32())", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } } - private static class LongFieldGenerator extends FieldGenerator { + private static class LongFieldGenerator extends OffsetFieldGenerator { private LongFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -430,7 +437,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { serialize.addStatement( "codedOut.writeInt64NoTag($T.unsafe().getLong(obj, $L))", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override @@ -438,11 +445,11 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { deserialize.addStatement( "$T.unsafe().putLong(instance, $L, codedIn.readInt64())", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } } - private static class ShortFieldGenerator extends FieldGenerator { + private static class ShortFieldGenerator extends OffsetFieldGenerator { private ShortFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -453,7 +460,7 @@ void generateSerializeCode(MethodSpec.Builder serialize) { "$T.writeShort(codedOut, $T.unsafe().getShort(obj, $L))", CodecHelpers.class, UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override @@ -461,12 +468,12 @@ void generateDeserializeCode(MethodSpec.Builder deserialize) { deserialize.addStatement( "$T.unsafe().putShort(instance, $L, $T.readShort(codedIn))", UnsafeProvider.class, - getOffsetName(), + getHandleName(), CodecHelpers.class); } } - private static class ObjectFieldGenerator extends FieldGenerator { + private static class ObjectFieldGenerator extends OffsetFieldGenerator { private ObjectFieldGenerator(VariableElement variable, int hierarchyLevel) { super(variable, hierarchyLevel); } @@ -476,12 +483,12 @@ void generateSerializeCode(MethodSpec.Builder serialize) { serialize.addStatement( "context.serialize($T.unsafe().getObject(obj, $L), codedOut)", UnsafeProvider.class, - getOffsetName()); + getHandleName()); } @Override void generateDeserializeCode(MethodSpec.Builder deserialize) { - deserialize.addStatement("context.deserialize(codedIn, instance, $L)", getOffsetName()); + deserialize.addStatement("context.deserialize(codedIn, instance, $L)", getHandleName()); } } } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/InterningObjectCodecGenerator.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/InterningObjectCodecGenerator.java index 7647f9525d79f7..35ba899051e718 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/InterningObjectCodecGenerator.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/InterningObjectCodecGenerator.java @@ -14,10 +14,9 @@ package com.google.devtools.build.lib.skyframe.serialization.autocodec; import static com.google.common.collect.Iterables.getOnlyElement; +import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.getClassLineage; import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.getErasure; -import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.getSuperclass; import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.isSerializableField; -import static com.google.devtools.build.lib.skyframe.serialization.autocodec.TypeOperations.matchesType; import static javax.lang.model.util.ElementFilter.fieldsIn; import com.google.common.collect.ImmutableList; @@ -32,7 +31,6 @@ import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import java.io.IOException; -import java.util.ArrayList; import javax.annotation.processing.ProcessingEnvironment; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; @@ -49,13 +47,7 @@ final class InterningObjectCodecGenerator extends CodecGenerator { @Override ImmutableList getFieldGenerators(TypeElement type) throws SerializationProcessingException { - // Collects the type and its supertypes. - ArrayList types = new ArrayList<>(); - for (TypeElement next = type; - next != null && !matchesType(next.asType(), Object.class, env); - next = getSuperclass(next)) { - types.add(next); - } + ImmutableList types = getClassLineage(type, env); ImmutableList.Builder result = ImmutableList.builder(); // Iterates in reverse order so variables are ordered highest superclass first, as they would diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/TypeOperations.java b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/TypeOperations.java index 8453544d2d7e5d..829ecffd25c9a5 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/TypeOperations.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/serialization/autocodec/TypeOperations.java @@ -13,11 +13,15 @@ // limitations under the License. package com.google.devtools.build.lib.skyframe.serialization.autocodec; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkState; + import com.google.common.collect.ImmutableList; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import java.io.IOException; +import java.util.List; import java.util.Set; import javax.annotation.Nullable; import javax.annotation.processing.ProcessingEnvironment; @@ -137,6 +141,73 @@ static TypeElement getSuperclass(TypeElement type) { return (TypeElement) ((DeclaredType) mirror).asElement(); } + enum Relation { + INSTANCE_OF, + EQUAL_TO, + SUPERTYPE_OF, + UNRELATED_TO + } + + @Nullable + static Relation findRelationWithGenerics( + TypeMirror type1, TypeMirror type2, ProcessingEnvironment env) { + if (type1.getKind() == TypeKind.TYPEVAR + || type1.getKind() == TypeKind.WILDCARD + || type2.getKind() == TypeKind.TYPEVAR + || type2.getKind() == TypeKind.WILDCARD) { + return Relation.EQUAL_TO; + } + if (env.getTypeUtils().isAssignable(type1, type2)) { + if (env.getTypeUtils().isAssignable(type2, type1)) { + return Relation.EQUAL_TO; + } + return Relation.INSTANCE_OF; + } + if (env.getTypeUtils().isAssignable(type2, type1)) { + return Relation.SUPERTYPE_OF; + } + // From here on out, we can't detect subtype/supertype, we're only checking for equality. + TypeMirror erasedType1 = env.getTypeUtils().erasure(type1); + TypeMirror erasedType2 = env.getTypeUtils().erasure(type2); + if (!env.getTypeUtils().isSameType(erasedType1, erasedType2)) { + // Technically, there could be a relationship, but it's too hard to figure out for now. + return Relation.UNRELATED_TO; + } + List genericTypes1 = ((DeclaredType) type1).getTypeArguments(); + List genericTypes2 = ((DeclaredType) type2).getTypeArguments(); + checkState( + genericTypes1.size() == genericTypes2.size(), + "%s and %s had same erased type but a different number of generic parameters %s vs %s", + type1, + type2, + genericTypes1, + genericTypes2); + for (int i = 0; i < genericTypes1.size(); i++) { + Relation result = findRelationWithGenerics(genericTypes1.get(i), genericTypes2.get(i), env); + if (result != Relation.EQUAL_TO) { + return Relation.UNRELATED_TO; + } + } + return Relation.EQUAL_TO; + } + + /** + * Collects {@code type} and its supertypes. + * + *

The first element is the type itself with each additional element representing the next + * superclass. {@code Object} is ignored. + */ + static ImmutableList getClassLineage(TypeElement type, ProcessingEnvironment env) { + checkArgument(type.asType() instanceof DeclaredType, "%s must be a class", type); + var types = ImmutableList.builder(); + for (TypeElement next = type; + next != null && !matchesType(next.asType(), Object.class, env); + next = getSuperclass(next)) { + types.add(next); + } + return types.build(); + } + static TypeMirror resolveBaseArrayComponentType(TypeMirror type) { if (!type.getKind().equals(TypeKind.ARRAY)) { return type; diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/toolchains/ToolchainResolutionFunction.java b/src/main/java/com/google/devtools/build/lib/skyframe/toolchains/ToolchainResolutionFunction.java index efe7d74d31e005..67f0f5f7fdea55 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/toolchains/ToolchainResolutionFunction.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/toolchains/ToolchainResolutionFunction.java @@ -636,7 +636,7 @@ private static String getMessage(List

If " + + CONFIGURABLE_ARG + + " is explicitly set to False, the symbolic macro attribute is" + + " non-configurable - in other words, it cannot take a select() value. If" + + " the " + + CONFIGURABLE_ARG + + " is either unbound or explicitly set to True, the attribute is" + + " configurable and can take a select() value." // + + "

For an attribute of a rule or aspect, " + + CONFIGURABLE_ARG + + " must be left unbound. Most Starlark rule attributes are always configurable," + + " with the exception of attr.output(), attr.output_list()," + + " and attr.license() rule attributes, which are always non-configurable."; String CONFIGURATION_ARG = "cfg"; // TODO(b/151742236): Update when new Starlark-based configuration framework is implemented. diff --git a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/StarlarkRuleFunctionsApi.java b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/StarlarkRuleFunctionsApi.java index fb9ce6dd4c0ff8..b3916c6cb11492 100644 --- a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/StarlarkRuleFunctionsApi.java +++ b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/StarlarkRuleFunctionsApi.java @@ -751,9 +751,9 @@ StarlarkCallable rule( named = true, defaultValue = "[]", doc = - "If set, the set of toolchains this rule requires. The list can contain String," + "If set, the set of toolchains this aspect requires. The list can contain String," + " Label, or StarlarkToolchainTypeApi objects, in any combination. Toolchains" - + " will be found by checking the current platform, and provided to the rule" + + " will be found by checking the current platform, and provided to the aspect" + " implementation via ctx.toolchain."), @Param( name = "incompatible_use_toolchain_transition", diff --git a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/android/AndroidStarlarkCommonApi.java b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/android/AndroidStarlarkCommonApi.java index e5b04e50d26ad7..828c418ca902bf 100644 --- a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/android/AndroidStarlarkCommonApi.java +++ b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/android/AndroidStarlarkCommonApi.java @@ -19,7 +19,6 @@ import com.google.devtools.build.lib.starlarkbuildapi.FileApi; import com.google.devtools.build.lib.starlarkbuildapi.FilesToRunProviderApi; import com.google.devtools.build.lib.starlarkbuildapi.StarlarkRuleContextApi; -import com.google.devtools.build.lib.starlarkbuildapi.config.ConfigurationTransitionApi; import com.google.devtools.build.lib.starlarkbuildapi.java.JavaInfoApi; import com.google.devtools.build.lib.starlarkbuildapi.platform.ConstraintValueInfoApi; import javax.annotation.Nullable; @@ -76,16 +75,6 @@ public interface AndroidStarlarkCommonApi< structField = true) AndroidSplitTransitionApi getAndroidSplitTransition(); - @StarlarkMethod( - name = "android_platforms_transition", - doc = - "A configuration for rules that uses the --android_platforms flag instead of --platforms." - + " This should only be used by Android rules during migration and is not for" - + " general use.", - documented = false, - structField = true) - ConfigurationTransitionApi getAndroidPlatformsTransition(); - @StarlarkMethod( name = "enable_implicit_sourceless_deps_exports_compatibility", doc = "Takes a JavaInfo and converts it to an implicit exportable JavaInfo.", diff --git a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp/CcModuleApi.java b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp/CcModuleApi.java index bc53c218d17871..71fde5241c9d7c 100755 --- a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp/CcModuleApi.java +++ b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp/CcModuleApi.java @@ -615,7 +615,19 @@ Tuple compile( allowedTypes = { @ParamType(type = BuildConfigurationApi.class), @ParamType(type = NoneType.class) - }) + }), + @Param( + name = "emit_interface_shared_library", + doc = + "When 'output_type' is 'dynamic_library' and this parameter is set to True, an" + + " interface shared library will be generated during linking. On Windows the" + + " interface shared library will always be generated regardless of this" + + " parameter since it is a requirement for linking", + positional = false, + named = true, + documented = false, + allowedTypes = {@ParamType(type = Boolean.class)}, + defaultValue = "unbound"), }) LinkingOutputsT link( StarlarkActionFactoryT starlarkActionFactoryApi, @@ -644,6 +656,7 @@ LinkingOutputsT link( Object mainOutput, Object useShareableArtifactFactory, Object buildConfig, + Object emitInterfaceSharedLibrary, StarlarkThread thread) throws InterruptedException, EvalException; diff --git a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/ConstraintSettingInfoApi.java b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/ConstraintSettingInfoApi.java index e6b89834f0e7ec..5cebd43527c678 100644 --- a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/ConstraintSettingInfoApi.java +++ b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/ConstraintSettingInfoApi.java @@ -27,7 +27,7 @@ name = "ConstraintSettingInfo", doc = "A specific constraint setting that may be used to define a platform. See " - + "Defining " + + "Defining " + "Constraints and Platforms for more information." + PlatformInfoApi.EXPERIMENTAL_WARNING, category = DocCategory.PROVIDER) diff --git a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/ConstraintValueInfoApi.java b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/ConstraintValueInfoApi.java index 985c16cc81d2f5..fe0446f1fbebb6 100644 --- a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/ConstraintValueInfoApi.java +++ b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/ConstraintValueInfoApi.java @@ -28,7 +28,7 @@ name = "ConstraintValueInfo", doc = "A value for a constraint setting that can be used to define a platform. See " - + "Defining " + + "Defining " + "Constraints and Platforms for more information." + PlatformInfoApi.EXPERIMENTAL_WARNING, category = DocCategory.PROVIDER) diff --git a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/PlatformInfoApi.java b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/PlatformInfoApi.java index b97fcb92fe75ca..2c5c42dc8ce8d0 100644 --- a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/PlatformInfoApi.java +++ b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/platform/PlatformInfoApi.java @@ -26,7 +26,7 @@ name = "PlatformInfo", doc = "Provides access to data about a specific platform. See " - + "Defining " + + "Defining " + "Constraints and Platforms for more information." + PlatformInfoApi.EXPERIMENTAL_WARNING, category = DocCategory.PROVIDER) diff --git a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/python/PyBootstrap.java b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/python/PyBootstrap.java index baf7a6b4ec4927..63dd9a838d0e4e 100644 --- a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/python/PyBootstrap.java +++ b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/python/PyBootstrap.java @@ -49,14 +49,6 @@ public void addBindingsToBuilder(ImmutableMap.Builder builder) { // Workaround for https://github.com/bazelbuild/bazel/issues/17713 new ProviderStub(), allowedRepositories)); - - builder.put( - "PyWrapCcInfo", - ContextAndFlagGuardedValue.onlyInAllowedReposOrWhenIncompatibleFlagIsFalse( - BuildLanguageOptions.INCOMPATIBLE_STOP_EXPORTING_LANGUAGE_MODULES, - // Workaround for https://github.com/bazelbuild/bazel/issues/17713 - new ProviderStub(), - allowedRepositories)); builder.put( "PyCcLinkParamsProvider", ContextAndFlagGuardedValue.onlyInAllowedReposOrWhenIncompatibleFlagIsFalse( diff --git a/src/main/java/com/google/devtools/build/lib/testing/common/FakeOptions.java b/src/main/java/com/google/devtools/build/lib/testing/common/FakeOptions.java index 71f4e4171dec17..d74d50eef9283b 100644 --- a/src/main/java/com/google/devtools/build/lib/testing/common/FakeOptions.java +++ b/src/main/java/com/google/devtools/build/lib/testing/common/FakeOptions.java @@ -14,13 +14,13 @@ package com.google.devtools.build.lib.testing.common; import com.google.common.collect.ImmutableClassToInstanceMap; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.common.options.Options; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsProvider; import com.google.devtools.common.options.ParsedOptionDescription; import com.google.errorprone.annotations.CanIgnoreReturnValue; -import java.util.Map; import java.util.function.Predicate; import javax.annotation.Nullable; @@ -110,8 +110,13 @@ public ImmutableMap getStarlarkOptions() { } @Override - public Map getExplicitStarlarkOptions( + public ImmutableMap getExplicitStarlarkOptions( Predicate filter) { return ImmutableMap.of(); } + + @Override + public ImmutableList getUserOptions() { + return ImmutableList.of(); + } } diff --git a/src/main/java/com/google/devtools/build/lib/worker/SandboxedWorker.java b/src/main/java/com/google/devtools/build/lib/worker/SandboxedWorker.java index da6e23d5fa9da3..5595a8ed23b995 100644 --- a/src/main/java/com/google/devtools/build/lib/worker/SandboxedWorker.java +++ b/src/main/java/com/google/devtools/build/lib/worker/SandboxedWorker.java @@ -46,9 +46,6 @@ /** A {@link SingleplexWorker} that runs inside a sandboxed execution root. */ final class SandboxedWorker extends SingleplexWorker { - - public static final String TMP_DIR_MOUNT_NAME = "_tmp"; - @AutoValue public abstract static class WorkerSandboxOptions { // Need to have this data class because we can't depend on SandboxOptions in here. @@ -113,11 +110,12 @@ public static WorkerSandboxOptions create( TreeDeleter treeDeleter, @Nullable VirtualCgroupFactory cgroupFactory) { super(workerKey, workerId, workDir, logFile, workerOptions, cgroupFactory); + Path tmpDirPath = SandboxHelpers.getTmpDirPath(workDir); this.workerExecRoot = new WorkerExecRoot( workDir, hardenedSandboxOptions != null - ? ImmutableList.of(PathFragment.create("../" + TMP_DIR_MOUNT_NAME)) + ? ImmutableList.of(PathFragment.create(tmpDirPath.getPathString())) : ImmutableList.of()); this.hardenedSandboxOptions = hardenedSandboxOptions; this.treeDeleter = treeDeleter; @@ -153,8 +151,6 @@ private SortedMap getBindMounts(Path sandboxExecRoot, @Nullable Path FileSystem fs = sandboxExecRoot.getFileSystem(); Path tmpPath = fs.getPath("/tmp"); final SortedMap bindMounts = Maps.newTreeMap(); - // Mount a fresh, empty temporary directory as /tmp for each sandbox rather than reusing the - // host filesystem's /tmp. Since we're in a worker, we clean this dir between requests. bindMounts.put(tmpPath, sandboxTmp); SandboxHelpers.mountAdditionalPaths( hardenedSandboxOptions.additionalMountPaths(), sandboxExecRoot, bindMounts); @@ -192,9 +188,7 @@ protected Subprocess createProcess() throws IOException, UserExecException { // TODO(larsrc): Check that execRoot and outputBase are not under /tmp if (hardenedSandboxOptions != null) { - // In hardened mode, we bindmount a temp dir. We put the mount dir in the parent directory to - // avoid clashes with workspace files. - Path sandboxTmp = workDir.getParentDirectory().getRelative(TMP_DIR_MOUNT_NAME); + Path sandboxTmp = SandboxHelpers.getTmpDirPath(workDir); sandboxTmp.createDirectoryAndParents(); // Mostly tests require network, and some blaze run commands, but no workers. @@ -267,6 +261,9 @@ void destroy() { cgroup.destroy(); } workDir.deleteTree(); + if (hardenedSandboxOptions != null) { + SandboxHelpers.getTmpDirPath(workDir).deleteTree(); + } } catch (IOException e) { logger.atWarning().withCause(e).log("Caught IOException while deleting workdir."); } diff --git a/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java b/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java index af00a871c7cdff..a2453b87ea1298 100644 --- a/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java +++ b/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java @@ -226,7 +226,9 @@ public String getTypeDescription() { defaultValue = "false", documentationCategory = OptionDocumentationCategory.EXECUTION_STRATEGY, effectTags = {OptionEffectTag.EXECUTION}, - help = "If enabled, workers are run in a hardened sandbox, if the implementation allows it.") + help = + "If enabled, workers are run in a hardened sandbox, if the implementation allows it. If" + + " hardening is enabled then tmp directories are distinct for different workers.") public boolean sandboxHardening; @Option( diff --git a/src/main/java/com/google/devtools/build/lib/worker/WorkerProcessMetricsCollector.java b/src/main/java/com/google/devtools/build/lib/worker/WorkerProcessMetricsCollector.java index 06c453ed33acf7..74b050f98ee6aa 100644 --- a/src/main/java/com/google/devtools/build/lib/worker/WorkerProcessMetricsCollector.java +++ b/src/main/java/com/google/devtools/build/lib/worker/WorkerProcessMetricsCollector.java @@ -284,6 +284,4 @@ public void clearKilledWorkerProcessMetrics() { public void beforeCommand() { pidToWorkerProcessMetrics.values().forEach(m -> m.onBeforeCommand()); } - - // TODO(b/238416583) Add deregister function } diff --git a/src/main/java/com/google/devtools/build/skyframe/AbstractParallelEvaluator.java b/src/main/java/com/google/devtools/build/skyframe/AbstractParallelEvaluator.java index 9a6b6c9b815b83..e546b311cafcdb 100644 --- a/src/main/java/com/google/devtools/build/skyframe/AbstractParallelEvaluator.java +++ b/src/main/java/com/google/devtools/build/skyframe/AbstractParallelEvaluator.java @@ -31,7 +31,6 @@ import com.google.common.graph.ImmutableGraph; import com.google.common.graph.Traverser; import com.google.common.util.concurrent.ListenableFuture; -import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.concurrent.QuiescingExecutor; import com.google.devtools.build.lib.events.Event; @@ -54,7 +53,6 @@ import com.google.devtools.build.skyframe.proto.GraphInconsistency.Inconsistency; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; -import java.time.Duration; import java.util.Collection; import java.util.List; import java.util.Set; @@ -461,22 +459,14 @@ public void run() { nodeEntry); SkyValue value = null; - long startTimeNanos = BlazeClock.instance().nanoTime(); - try { + try (var s = + Profiler.instance() + .profile(ProfilerTask.SKYFUNCTION, skyKey.functionName().getName())) { try { evaluatorContext.getProgressReceiver().stateStarting(skyKey, NodeState.COMPUTE); value = skyFunction.compute(skyKey, env); } finally { evaluatorContext.getProgressReceiver().stateEnding(skyKey, NodeState.COMPUTE); - long elapsedTimeNanos = BlazeClock.instance().nanoTime() - startTimeNanos; - if (elapsedTimeNanos > 0) { - Profiler.instance() - .logSimpleTaskDuration( - startTimeNanos, - Duration.ofNanos(elapsedTimeNanos), - ProfilerTask.SKYFUNCTION, - skyKey.functionName().getName()); - } } } catch (SkyFunctionException builderException) { // TODO(b/261604460): invalidating the state cache here appears to be load-bearing for diff --git a/src/main/java/com/google/devtools/common/options/OptionPriority.java b/src/main/java/com/google/devtools/common/options/OptionPriority.java index 53f0d75b8d4732..9146fa0b8c7d8d 100644 --- a/src/main/java/com/google/devtools/common/options/OptionPriority.java +++ b/src/main/java/com/google/devtools/common/options/OptionPriority.java @@ -26,6 +26,7 @@ */ public class OptionPriority implements Comparable { private final PriorityCategory priorityCategory; + /** * Each option that is passed explicitly has 0 ancestors, so it only has its command line index * (or rc index, etc., depending on the category), but expanded options have the command line @@ -150,6 +151,9 @@ public enum PriorityCategory { */ COMPUTED_DEFAULT, + /** For options coming from a global blazerc file. */ + GLOBAL_RC_FILE, + /** For options coming from a configuration file or rc file. */ RC_FILE, diff --git a/src/main/java/com/google/devtools/common/options/OptionsParser.java b/src/main/java/com/google/devtools/common/options/OptionsParser.java index 5059ed93879325..a6547d7a649fdb 100644 --- a/src/main/java/com/google/devtools/common/options/OptionsParser.java +++ b/src/main/java/com/google/devtools/common/options/OptionsParser.java @@ -31,6 +31,7 @@ import com.google.common.collect.MoreCollectors; import com.google.common.escape.Escaper; import com.google.devtools.build.lib.util.Pair; +import com.google.devtools.common.options.OptionPriority.PriorityCategory; import com.google.devtools.common.options.OptionsParserImpl.OptionsParserImplResult; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.ArrayList; @@ -870,6 +871,33 @@ public List canonicalize() { return impl.asCanonicalizedList(); } + @Override + public ImmutableList getUserOptions() { + Predicate isUserOption = + (option) -> + (option.getOrigin().getPriority().getPriorityCategory().equals(PriorityCategory.RC_FILE) + || option + .getOrigin() + .getPriority() + .getPriorityCategory() + .equals(PriorityCategory.COMMAND_LINE)); + ImmutableList.Builder userOptions = ImmutableList.builder(); + return userOptions + .addAll( + asListOfExplicitOptions().stream() + .filter(isUserOption) + .filter(option -> !option.getCanonicalForm().contains("default_override")) + .map(option -> option.getCanonicalForm()) + .collect(toImmutableList())) + .addAll( + impl.getSkippedOptions().stream() + .filter(isUserOption) + .map(option -> option.getCanonicalForm()) + .filter(o -> getStarlarkOptions().containsKey(o)) + .collect(toImmutableList())) + .build(); + } + /** Returns all options fields of the given options class, in alphabetic order. */ public static ImmutableList getOptionDefinitions( Class optionsClass) { diff --git a/src/main/java/com/google/devtools/common/options/OptionsProvider.java b/src/main/java/com/google/devtools/common/options/OptionsProvider.java index b45f88798b3185..6c0a211864937f 100644 --- a/src/main/java/com/google/devtools/common/options/OptionsProvider.java +++ b/src/main/java/com/google/devtools/common/options/OptionsProvider.java @@ -13,14 +13,15 @@ // limitations under the License. package com.google.devtools.common.options; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.Map; import java.util.function.Predicate; import javax.annotation.Nullable; /** - * A read-only interface for options parser results, which only allows to query the options of - * a specific class, but not e.g. the residue any other information pertaining to the command line. + * A read-only interface for options parser results, which only allows to query the options of a + * specific class, but not e.g. the residue any other information pertaining to the command line. */ public interface OptionsProvider { public static final OptionsProvider EMPTY = @@ -41,16 +42,22 @@ public ImmutableMap getExplicitStarlarkOptions( Predicate filter) { return ImmutableMap.of(); } + + @Override + public ImmutableList getUserOptions() { + return ImmutableList.of(); + } }; /** - * Returns the options instance for the given {@code optionsClass}, that is, - * the parsed options, or null if it is not among those available. + * Returns the options instance for the given {@code optionsClass}, that is, the parsed options, + * or null if it is not among those available. * - *

The returned options should be treated by library code as immutable and - * a provider is permitted to return the same options instance multiple times. + *

The returned options should be treated by library code as immutable and a provider is + * permitted to return the same options instance multiple times. */ - @Nullable O getOptions(Class optionsClass); + @Nullable + O getOptions(Class optionsClass); /** * Returns the starlark options in a name:value map. @@ -68,4 +75,10 @@ public ImmutableMap getExplicitStarlarkOptions( * the given filter criteria. */ Map getExplicitStarlarkOptions(Predicate filter); + + /** + * Returns the options that were parsed from either a user blazerc file or the command line as a + * map of option name to option value. + */ + ImmutableList getUserOptions(); } diff --git a/src/main/protobuf/failure_details.proto b/src/main/protobuf/failure_details.proto index f449491b6487c8..3eee7295bcc1f2 100644 --- a/src/main/protobuf/failure_details.proto +++ b/src/main/protobuf/failure_details.proto @@ -376,6 +376,7 @@ message Skyfocus { // The user needs to augment their working set to include the new file(s). NON_WORKING_SET_CHANGE = 1 [(metadata) = { exit_code: 2 }]; CONFIGURATION_CHANGE = 2 [(metadata) = { exit_code: 2 }]; + DISALLOWED_OPERATION_ON_FOCUSED_GRAPH = 3 [(metadata) = { exit_code: 2 }]; } Code code = 1; diff --git a/src/main/protobuf/worker_protocol.proto b/src/main/protobuf/worker_protocol.proto index 4bca44874b5ed5..ae17121ba42454 100644 --- a/src/main/protobuf/worker_protocol.proto +++ b/src/main/protobuf/worker_protocol.proto @@ -77,9 +77,11 @@ message WorkRequest { message WorkResponse { int32 exit_code = 1; - // This is printed to the user after the WorkResponse has been received and is - // supposed to contain compiler warnings / errors etc. - thus we'll use a - // string type here, which gives us UTF-8 encoding. + // Output message for this work unit. + // This is akin to the combined stdout/stderr if the work unit were executed + // as a standalone process. Output pertaining to a work unit should be + // reported here instead of through the stdout/stderr of the worker process. + // Assumed to be UTF-8 encoded. string output = 2; // This field must be set to the same request_id as the WorkRequest it is a diff --git a/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl b/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl index eaf01fb7dbc8f3..56721f148a9b2a 100644 --- a/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl +++ b/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl @@ -210,9 +210,9 @@ def _get_providers(ctx): return [dep[CcInfo] for dep in all_deps if CcInfo in dep] def _filter_libraries_that_are_linked_dynamically(ctx, feature_configuration, cc_linking_context): - merged_cc_shared_library_infos = merge_cc_shared_library_infos(ctx) - link_once_static_libs_map = build_link_once_static_libs_map(merged_cc_shared_library_infos) - transitive_exports = build_exports_map_from_only_dynamic_deps(merged_cc_shared_library_infos) + merged_cc_shared_library_infos_list = merge_cc_shared_library_infos(ctx).to_list() + link_once_static_libs_map = build_link_once_static_libs_map(merged_cc_shared_library_infos_list) + transitive_exports = build_exports_map_from_only_dynamic_deps(merged_cc_shared_library_infos_list) linker_inputs = cc_linking_context.linker_inputs.to_list() all_deps = ctx.attr._deps_analyzed_by_graph_structure_aspect @@ -232,7 +232,7 @@ def _filter_libraries_that_are_linked_dynamically(ctx, feature_configuration, cc topologically_sorted_labels, unused_dynamic_linker_inputs, ) = separate_static_and_dynamic_link_libraries( - ctx, + ctx.attr.dynamic_deps, graph_structure_aspect_nodes, can_be_linked_dynamically, ) diff --git a/src/main/starlark/builtins_bzl/common/cc/cc_common.bzl b/src/main/starlark/builtins_bzl/common/cc/cc_common.bzl index b37eb48d7ba0bf..9f12b882a307ac 100644 --- a/src/main/starlark/builtins_bzl/common/cc/cc_common.bzl +++ b/src/main/starlark/builtins_bzl/common/cc/cc_common.bzl @@ -76,7 +76,8 @@ def _link( link_artifact_name_suffix = _UNBOUND, main_output = _UNBOUND, use_shareable_artifact_factory = _UNBOUND, - build_config = _UNBOUND): + build_config = _UNBOUND, + emit_interface_shared_library = _UNBOUND): if output_type == "archive": cc_common_internal.check_private_api(allowlist = _PRIVATE_STARLARKIFICATION_ALLOWLIST) @@ -94,7 +95,8 @@ def _link( link_artifact_name_suffix != _UNBOUND or \ main_output != _UNBOUND or \ use_shareable_artifact_factory != _UNBOUND or \ - build_config != _UNBOUND: + build_config != _UNBOUND or \ + emit_interface_shared_library != _UNBOUND: cc_common_internal.check_private_api(allowlist = _PRIVATE_STARLARKIFICATION_ALLOWLIST) if use_test_only_flags == _UNBOUND: @@ -121,6 +123,8 @@ def _link( use_shareable_artifact_factory = False if build_config == _UNBOUND: build_config = None + if emit_interface_shared_library == _UNBOUND: + emit_interface_shared_library = False return cc_common_internal.link( actions = actions, @@ -149,6 +153,7 @@ def _link( main_output = main_output, use_shareable_artifact_factory = use_shareable_artifact_factory, build_config = build_config, + emit_interface_shared_library = emit_interface_shared_library, ) def _create_lto_compilation_context(*, objects = {}): diff --git a/src/main/starlark/builtins_bzl/common/cc/cc_shared_library.bzl b/src/main/starlark/builtins_bzl/common/cc/cc_shared_library.bzl index 1870fe6b426486..85f3fa0ea0783a 100644 --- a/src/main/starlark/builtins_bzl/common/cc/cc_shared_library.bzl +++ b/src/main/starlark/builtins_bzl/common/cc/cc_shared_library.bzl @@ -84,16 +84,16 @@ def _sort_linker_inputs(topologically_sorted_labels, label_to_linker_inputs, lin # dynamically. The transitive_dynamic_dep_labels parameter is only needed for # binaries because they link all dynamic_deps (cc_binary|cc_test). def _separate_static_and_dynamic_link_libraries( - ctx, - direct_children, + dynamic_deps, + deps_graph_nodes, can_be_linked_dynamically): ( transitive_dynamic_dep_labels, all_dynamic_dep_linker_inputs, - ) = _build_map_direct_dynamic_dep_to_transitive_dynamic_deps(ctx) + ) = _build_map_direct_dynamic_dep_to_transitive_dynamic_deps(dynamic_deps) node = None - all_children = reversed(direct_children) + deps_graph_nodes_to_process = reversed(deps_graph_nodes) targets_to_be_linked_statically_map = {} targets_to_be_linked_dynamically_set = {} seen_labels = {} @@ -123,10 +123,10 @@ def _separate_static_and_dynamic_link_libraries( # Horrible I know. Perhaps Starlark team gives me a way to prune a tree. for i in range(2147483647): - if not len(all_children): + if not len(deps_graph_nodes_to_process): break - node = all_children[-1] + node = deps_graph_nodes_to_process[-1] must_add_children = False @@ -189,7 +189,7 @@ def _separate_static_and_dynamic_link_libraries( # in which dependencies were listed in the deps attribute in the # BUILD file we must reverse the list so that the first one listed # in the BUILD file is processed first. - all_children.extend(reversed(node.children)) + deps_graph_nodes_to_process.extend(reversed(node.children)) else: if node.owners[0] not in first_owner_to_depset: # We have 3 cases in this branch: @@ -206,12 +206,12 @@ def _separate_static_and_dynamic_link_libraries( transitive.append(transitive_dynamic_dep_labels[str(node.owners[0])]) first_owner_to_depset[node.owners[0]] = depset(direct = node.owners, transitive = transitive, order = "topological") - all_children.pop() + deps_graph_nodes_to_process.pop() topologically_sorted_labels = [] - if direct_children: + if deps_graph_nodes: transitive = [] - for child in direct_children: + for child in deps_graph_nodes: transitive.append(first_owner_to_depset[child.owners[0]]) topologically_sorted_labels = depset(transitive = transitive, order = "topological").to_list() @@ -236,9 +236,9 @@ def _merge_cc_shared_library_infos(ctx): return depset(direct = dynamic_deps, transitive = transitive_dynamic_deps, order = "topological") -def _build_exports_map_from_only_dynamic_deps(merged_shared_library_infos): +def _build_exports_map_from_only_dynamic_deps(merged_cc_shared_library_infos_list): exports_map = {} - for entry in merged_shared_library_infos.to_list(): + for entry in merged_cc_shared_library_infos_list: exports = entry.exports linker_input = entry.linker_input for export in exports: @@ -252,9 +252,9 @@ def _build_exports_map_from_only_dynamic_deps(merged_shared_library_infos): # The map points from the target that can only be linked once to the # cc_shared_library target that already links it. -def _build_link_once_static_libs_map(merged_shared_library_infos): +def _build_link_once_static_libs_map(merged_cc_shared_library_infos_list): link_once_static_libs_map = {} - for entry in merged_shared_library_infos.to_list(): + for entry in merged_cc_shared_library_infos_list: link_once_static_libs = entry.link_once_static_libs linker_input = entry.linker_input for static_lib in link_once_static_libs: @@ -329,11 +329,11 @@ def _contains_code_to_link(linker_input): return False def _find_top_level_linker_input_labels( - nodes, + deps_graph_nodes, linker_inputs_to_be_linked_statically_map, targets_to_be_linked_dynamically_set): top_level_linker_input_labels_set = {} - nodes_to_check = list(nodes) + nodes_to_check = list(deps_graph_nodes) seen_nodes_set = {} for i in range(2147483647): @@ -374,13 +374,13 @@ def _filter_inputs( link_once_static_libs_map): curr_link_once_static_libs_set = {} - graph_structure_aspect_nodes = [] + deps_root_tree_nodes = [] dependency_linker_inputs_sets = [] direct_deps_set = {} for dep in deps: direct_deps_set[str(dep.label)] = True dependency_linker_inputs_sets.append(dep[CcInfo].linking_context.linker_inputs) - graph_structure_aspect_nodes.append(dep[GraphNodeInfo]) + deps_root_tree_nodes.append(dep[GraphNodeInfo]) if ctx.attr.experimental_disable_topo_sort_do_not_use_remove_before_7_0: dependency_linker_inputs = depset(transitive = dependency_linker_inputs_sets).to_list() @@ -403,8 +403,8 @@ def _filter_inputs( topologically_sorted_labels, unused_dynamic_linker_inputs, ) = _separate_static_and_dynamic_link_libraries( - ctx, - graph_structure_aspect_nodes, + ctx.attr.dynamic_deps, + deps_root_tree_nodes, can_be_linked_dynamically, ) @@ -415,7 +415,7 @@ def _filter_inputs( linker_inputs_to_be_linked_statically_map.setdefault(owner, []).append(linker_input) top_level_linker_input_labels_set = _find_top_level_linker_input_labels( - graph_structure_aspect_nodes, + deps_root_tree_nodes, linker_inputs_to_be_linked_statically_map, targets_to_be_linked_dynamically_set, ) @@ -586,10 +586,10 @@ def _get_deps(ctx): return deps -def _build_map_direct_dynamic_dep_to_transitive_dynamic_deps(ctx): +def _build_map_direct_dynamic_dep_to_transitive_dynamic_deps(direct_dynamic_deps): all_dynamic_dep_linker_inputs = {} direct_dynamic_dep_to_transitive_dynamic_deps = {} - for dep in ctx.attr.dynamic_deps: + for dep in direct_dynamic_deps: owner = dep[CcSharedLibraryInfo].linker_input.owner all_dynamic_dep_linker_inputs[owner] = dep[CcSharedLibraryInfo].linker_input transitive_dynamic_dep_labels = [] @@ -645,8 +645,11 @@ def _cc_shared_library_impl(ctx): unsupported_features = ctx.disabled_features, ) - merged_cc_shared_library_info = _merge_cc_shared_library_infos(ctx) - exports_map = _build_exports_map_from_only_dynamic_deps(merged_cc_shared_library_info) + merged_cc_shared_library_infos = _merge_cc_shared_library_infos(ctx) + + # Small performance tweak to avoid flattening merged_cc_shared_library_infos twice: + merged_cc_shared_library_infos_list = merged_cc_shared_library_infos.to_list() + exports_map = _build_exports_map_from_only_dynamic_deps(merged_cc_shared_library_infos_list) for export in deps: # Do not check for overlap between targets matched by the current # rule's exports_filter and what is in exports_map. A library in roots @@ -662,7 +665,7 @@ def _cc_shared_library_impl(ctx): fail("Trying to export a library already exported by a different shared library: " + str(export.label)) - link_once_static_libs_map = _build_link_once_static_libs_map(merged_cc_shared_library_info) + link_once_static_libs_map = _build_link_once_static_libs_map(merged_cc_shared_library_infos_list) (exports, linker_inputs, curr_link_once_static_libs_set, precompiled_only_dynamic_libraries) = _filter_inputs( ctx, @@ -789,7 +792,7 @@ def _cc_shared_library_impl(ctx): **additional_output_groups ), CcSharedLibraryInfo( - dynamic_deps = merged_cc_shared_library_info, + dynamic_deps = merged_cc_shared_library_infos, exports = exports.keys(), link_once_static_libs = curr_link_once_static_libs_set, linker_input = cc_common.create_linker_input( diff --git a/src/main/starlark/builtins_bzl/common/cc/link/link.bzl b/src/main/starlark/builtins_bzl/common/cc/link/link.bzl index c266bc9f4133d1..29ec6b5c1d3ced 100644 --- a/src/main/starlark/builtins_bzl/common/cc/link/link.bzl +++ b/src/main/starlark/builtins_bzl/common/cc/link/link.bzl @@ -63,7 +63,8 @@ def link( link_artifact_name_suffix = "", main_output = None, use_shareable_artifact_factory = False, - build_config = None): + build_config = None, + emit_interface_shared_library = False): """Used for C++ transitive linking. In the most basic and most used case, the function creates an action for a single executable or @@ -130,6 +131,10 @@ def link( If not specified, then one will be computed based on `name` and `output_type`. use_shareable_artifact_factory: (bool) undocumented. build_config: (None|BuildConfiguration) undocumented. + emit_interface_shared_library: (bool) When 'output_type' is 'dynamic_library' and this + parameter is set to True, an interface shared library will be generated during + linking. On Windows the interface shared library will always be generated + regardless of this parameter since it is a requirement for linking. Returns: (CcLinkingOutputs = { library_to_link: LibraryToLink, @@ -191,5 +196,6 @@ def link( link_artifact_name_suffix, main_output, # linker_output_artifact emit_interface_shared_libraries = dynamic_link_type == LINK_TARGET_TYPE.DYNAMIC_LIBRARY and - feature_configuration.is_enabled("targets_windows"), + (emit_interface_shared_library or + feature_configuration.is_enabled("targets_windows")), ) diff --git a/src/main/starlark/tests/builtins_bzl/objc/dotted_version_test.bzl b/src/main/starlark/tests/builtins_bzl/objc/dotted_version_test.bzl new file mode 100644 index 00000000000000..abba1988feb998 --- /dev/null +++ b/src/main/starlark/tests/builtins_bzl/objc/dotted_version_test.bzl @@ -0,0 +1,119 @@ +# Copyright 2024 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests for apple_common.dotted_version()""" + +load("@bazel_skylib//lib:unittest.bzl", "asserts", "unittest") + +tested_dotted_version = apple_common.dotted_version + +def _assert_v1_less_v2(env, v1, v2): + dv1 = tested_dotted_version(v1) + dv2 = tested_dotted_version(v2) + asserts.true(env, dv1.compare_to(dv2) < 0) + asserts.true(env, dv2.compare_to(dv1) > 0) + asserts.equals(env, dv1.compare_to(dv1), 0) + +def _assert_v1_equal_v2(env, v1, v2): + dv1 = tested_dotted_version(v1) + dv2 = tested_dotted_version(v2) + asserts.equals(env, dv1.compare_to(dv2), 0) + asserts.equals(env, dv2.compare_to(dv1), 0) + +def _compare_equal_length_versions_impl(ctx): + env = unittest.begin(ctx) + v1 = "5" + v2 = "6" + _assert_v1_less_v2(env, v1, v2) + v3 = "3.4" + v4 = "3.5" + _assert_v1_less_v2(env, v3, v4) + v5 = "1.2.3" + v6 = "1.2.4" + _assert_v1_less_v2(env, v5, v6) + v7 = "1.2.5" + v8 = "1.3.4" + _assert_v1_less_v2(env, v7, v8) + v9 = "1.8" + v10 = "1.12" # make sure component's first_number is compared as int, not as string + _assert_v1_less_v2(env, v9, v10) + v11 = "3.1.2" + v12 = "3.1.20" + _assert_v1_less_v2(env, v11, v12) + return unittest.end(env) + +compare_equal_length_versions_test = unittest.make(_compare_equal_length_versions_impl) + +def _compare_different_length_versions_impl(ctx): + env = unittest.begin(ctx) + v1 = "9" + v2 = "9.7.4" + _assert_v1_less_v2(env, v1, v2) + v3 = "2.1" + v4 = "2.1.8" + _assert_v1_less_v2(env, v3, v4) + v5 = "3.1" + v6 = "3.1.0.0" + _assert_v1_equal_v2(env, v5, v6) + return unittest.end(env) + +compare_different_length_versions_test = unittest.make(_compare_different_length_versions_impl) + +def _compare_versions_with_alphanum_components_impl(ctx): + env = unittest.begin(ctx) + v1 = "1.5alpha" + _assert_v1_equal_v2(env, v1, v1) + v3 = "1.5alpha" + v4 = "1.5beta" + _assert_v1_less_v2(env, v3, v4) + v5 = "1.5beta2" + v6 = "1.5beta3" + _assert_v1_less_v2(env, v5, v6) + v7 = "1.5gamma5" + v8 = "1.5gamma29" # make sure component's second_number is compared as int, not as string + _assert_v1_less_v2(env, v7, v8) + v9 = "1.5alpha9" + v10 = "1.5beta7" + _assert_v1_less_v2(env, v9, v10) + v11 = "3.1.0" + v12 = "3.1.1beta1" + _assert_v1_less_v2(env, v11, v12) + return unittest.end(env) + +compare_versions_with_alphanum_components_test = unittest.make(_compare_versions_with_alphanum_components_impl) + +def _check_description_is_ignored_impl(ctx): + env = unittest.begin(ctx) + v1 = "1.5.decription" + v2 = "1.5" + _assert_v1_equal_v2(env, v1, v2) + v3 = "1.5.decription.6.7" # everything after the description is ignored + v4 = "1.5" + _assert_v1_equal_v2(env, v3, v4) + env = unittest.begin(ctx) + v5 = "9.description" + v6 = "9.7.4" + _assert_v1_less_v2(env, v5, v6) + return unittest.end(env) + +check_description_is_ignored_test = unittest.make(_check_description_is_ignored_impl) + +def dotted_version_test_suite(name): + unittest.suite( + name, + compare_equal_length_versions_test, + compare_different_length_versions_test, + compare_versions_with_alphanum_components_test, + check_description_is_ignored_test, + ) diff --git a/src/test/java/com/google/devtools/build/lib/actions/ActionsTest.java b/src/test/java/com/google/devtools/build/lib/actions/ActionsTest.java new file mode 100644 index 00000000000000..dd95dc23c52c08 --- /dev/null +++ b/src/test/java/com/google/devtools/build/lib/actions/ActionsTest.java @@ -0,0 +1,41 @@ +// Copyright 2017 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.actions; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.devtools.build.lib.cmdline.Label; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** Test for {@link Actions}. */ +@RunWith(JUnit4.class) +public class ActionsTest { + + @Test + public void testEscapeLabelGolden() { + // Fix a particular encoding in case users hardcode paths generated by it. + assertThat(Actions.escapeLabel(Label.parseCanonicalUnchecked("//pa-t_h/to/pkg:dir/na-m_e"))) + .isEqualTo("pa-t_Uh_Sto_Spkg_Cdir_Sna-m_Ue"); + assertThat(Actions.escapeLabel(Label.parseCanonicalUnchecked("//:name"))).isEqualTo("_Cname"); + } + + @Test + public void testEscapeLabelDifferentRepos() { + // Fix a particular encoding in case users hardcode paths generated by it. + assertThat(Actions.escapeLabel(Label.parseCanonicalUnchecked("@@repo_1//:target"))) + .isNotEqualTo(Actions.escapeLabel(Label.parseCanonicalUnchecked("@@repo_2//:target"))); + } +} diff --git a/src/test/java/com/google/devtools/build/lib/analysis/TransitiveValidationPropagationTest.java b/src/test/java/com/google/devtools/build/lib/analysis/TransitiveValidationPropagationTest.java index cdb83e43395015..8d6ee4f1b2c5c8 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/TransitiveValidationPropagationTest.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/TransitiveValidationPropagationTest.java @@ -162,7 +162,7 @@ public void testValidationOutputPropagation() throws Exception { @Test public void testTransitiveValidationOutputGroupNotAllowedForStarlarkRules() throws Exception { scratch.file( - "test/foo_rule.bzl", + "foobar/foo_rule.bzl", """ def _impl(ctx): return [OutputGroupInfo(_validation_transitive = depset())] @@ -170,18 +170,18 @@ def _impl(ctx): foo_rule = rule(implementation = _impl) """); scratch.file( - "test/BUILD", + "foobar/BUILD", """ - load("//test:foo_rule.bzl", "foo_rule") + load("//foobar:foo_rule.bzl", "foo_rule") foo_rule(name = "foo") """); AssertionError expected = - assertThrows(AssertionError.class, () -> getConfiguredTarget("//test:foo")); + assertThrows(AssertionError.class, () -> getConfiguredTarget("//foobar:foo")); assertThat(expected) .hasMessageThat() - .contains("//test:foo_rule.bzl cannot access the _transitive_validation private API"); + .contains("//foobar:foo_rule.bzl cannot access the _transitive_validation private API"); } } diff --git a/src/test/java/com/google/devtools/build/lib/analysis/mock/BazelAnalysisMock.java b/src/test/java/com/google/devtools/build/lib/analysis/mock/BazelAnalysisMock.java index a8a3dc5d82efa1..9d0744be74a18b 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/mock/BazelAnalysisMock.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/mock/BazelAnalysisMock.java @@ -931,6 +931,9 @@ public ImmutableMap getBuiltinModules(BlazeDirector .getPathString()))); } + @Override + public void setupPrelude(MockToolsConfig mockToolsConfig) {} + @Override public ConfiguredRuleClassProvider createRuleClassProvider() { return TestRuleClassProvider.getRuleClassProviderWithClearedSuffix(); diff --git a/src/test/java/com/google/devtools/build/lib/analysis/util/AnalysisMock.java b/src/test/java/com/google/devtools/build/lib/analysis/util/AnalysisMock.java index c7a16c496bf52e..02f94d1b5d2b9c 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/util/AnalysisMock.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/util/AnalysisMock.java @@ -230,6 +230,8 @@ public abstract void addExtraRepositoryFunctions( public abstract ImmutableMap getBuiltinModules( BlazeDirectories directories); + public abstract void setupPrelude(MockToolsConfig mockToolsConfig) throws IOException; + /** * Stub class for tests to extend in order to update a small amount of {@link AnalysisMock} * functionality. @@ -316,6 +318,11 @@ public ImmutableMap getBuiltinModules( return delegate.getBuiltinModules(directories); } + @Override + public void setupPrelude(MockToolsConfig mockToolsConfig) throws IOException { + delegate.setupPrelude(mockToolsConfig); + } + @Override public void addExtraRepositoryFunctions( ImmutableMap.Builder repositoryHandlers) { diff --git a/src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewTestCase.java b/src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewTestCase.java index 7d80e235c669b0..9df49557a03f71 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewTestCase.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewTestCase.java @@ -419,6 +419,7 @@ protected ImmutableList extraPrecomputedValues() thro protected void initializeMockClient() throws IOException { analysisMock.setupMockClient(mockToolsConfig); analysisMock.setupMockWorkspaceFiles(directories.getEmbeddedBinariesRoot()); + analysisMock.setupPrelude(mockToolsConfig); } protected AnalysisMock getAnalysisMock() { diff --git a/src/test/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryIntegrationTest.java b/src/test/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryIntegrationTest.java index 04940e5d67c52d..bab1a9466a9a0a 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryIntegrationTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryIntegrationTest.java @@ -515,8 +515,7 @@ def _impl(ctx): invalidatePackages(); getConfiguredTarget("//:x"); - assertContainsEvent( - "repo rules may only be called from a WORKSPACE file or a macro loaded from there"); + assertContainsEvent("repository rules can only be used while evaluating a WORKSPACE file"); } @Test diff --git a/src/test/java/com/google/devtools/build/lib/buildtool/BUILD b/src/test/java/com/google/devtools/build/lib/buildtool/BUILD index b133eed5ebf935..234f63797c6e5c 100644 --- a/src/test/java/com/google/devtools/build/lib/buildtool/BUILD +++ b/src/test/java/com/google/devtools/build/lib/buildtool/BUILD @@ -751,6 +751,7 @@ java_test( srcs = ["SkyfocusIntegrationTest.java"], deps = [ "//src/main/java/com/google/devtools/build/lib/cmdline", + "//src/main/java/com/google/devtools/build/lib/runtime/commands", "//src/main/java/com/google/devtools/build/lib/skyframe:skyfocus_state", "//src/main/java/com/google/devtools/build/lib/util:abrupt_exit_exception", "//src/test/java/com/google/devtools/build/lib/buildtool/util", diff --git a/src/test/java/com/google/devtools/build/lib/buildtool/CorruptedActionCacheTest.java b/src/test/java/com/google/devtools/build/lib/buildtool/CorruptedActionCacheTest.java index 62dd9c56fb020d..3d4814feb2131d 100644 --- a/src/test/java/com/google/devtools/build/lib/buildtool/CorruptedActionCacheTest.java +++ b/src/test/java/com/google/devtools/build/lib/buildtool/CorruptedActionCacheTest.java @@ -65,7 +65,6 @@ public void testCorruptionActionCacheErrorMessage() throws Exception { assertThat(buildTarget("//foo:foo").getSuccess()).isTrue(); assertThat(events.errors()).hasSize(1); events.assertContainsError("Error during action cache initialization"); - events.assertContainsError( - "Bazel will now reset action cache data, potentially causing rebuilds"); + events.assertContainsError("Data will be reset, potentially causing target rebuilds"); } } diff --git a/src/test/java/com/google/devtools/build/lib/buildtool/QueryIntegrationTest.java b/src/test/java/com/google/devtools/build/lib/buildtool/QueryIntegrationTest.java index 9cd3dacecdd57f..5a31d7486ebca9 100644 --- a/src/test/java/com/google/devtools/build/lib/buildtool/QueryIntegrationTest.java +++ b/src/test/java/com/google/devtools/build/lib/buildtool/QueryIntegrationTest.java @@ -830,7 +830,7 @@ public void depthBoundedQuery(@TestParameter boolean orderResults) throws Except getQueryResult("deps(//depth:one, 3)", "--experimental_ui_debug_all_events"); if (orderResults) { - events.assertContainsEvent(EventKind.PROGRESS, "Loading package: depth2"); + assertContainsEvent(EventKind.PROGRESS, "Loading package: depth2"); } assertQueryOutputContains( diff --git a/src/test/java/com/google/devtools/build/lib/buildtool/SkyfocusIntegrationTest.java b/src/test/java/com/google/devtools/build/lib/buildtool/SkyfocusIntegrationTest.java index 84cafa4599969c..0884d6b1037769 100644 --- a/src/test/java/com/google/devtools/build/lib/buildtool/SkyfocusIntegrationTest.java +++ b/src/test/java/com/google/devtools/build/lib/buildtool/SkyfocusIntegrationTest.java @@ -19,6 +19,7 @@ import com.google.devtools.build.lib.buildtool.util.BuildIntegrationTestCase; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.TargetParsingException; +import com.google.devtools.build.lib.runtime.commands.CqueryCommand; import com.google.devtools.build.lib.skyframe.SkyfocusState.WorkingSetType; import com.google.devtools.build.lib.skyframe.util.SkyframeExecutorTestUtils; import com.google.devtools.build.lib.util.AbruptExitException; @@ -36,6 +37,25 @@ protected void setupOptions() throws Exception { addOptions("--experimental_enable_skyfocus"); } + @Test + public void cquery_doesNotTriggerSkyfocus() throws Exception { + write("hello/x.txt", "x"); + write( + "hello/BUILD", + """ + genrule( + name = "target", + srcs = ["x.txt"], + outs = ["out"], + cmd = "cat $< > $@", + ) + """); + + runtimeWrapper.newCommand(CqueryCommand.class); + buildTarget("//hello/..."); + assertThat(getSkyframeExecutor().getSkyfocusState().workingSetStrings()).isEmpty(); + } + @Test public void workingSet_canBeUsedWithBuildCommandAndNoTargets() throws Exception { write("hello/x.txt", "x"); diff --git a/src/test/java/com/google/devtools/build/lib/buildtool/util/BlazeRuntimeWrapper.java b/src/test/java/com/google/devtools/build/lib/buildtool/util/BlazeRuntimeWrapper.java index 6ff96bc3c4cf52..4379ae652247a5 100644 --- a/src/test/java/com/google/devtools/build/lib/buildtool/util/BlazeRuntimeWrapper.java +++ b/src/test/java/com/google/devtools/build/lib/buildtool/util/BlazeRuntimeWrapper.java @@ -17,6 +17,7 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.devtools.build.lib.runtime.Command.BuildPhase.NONE; import static com.google.devtools.build.lib.util.io.CommandExtensionReporter.NO_OP_COMMAND_EXTENSION_REPORTER; import com.google.common.collect.ImmutableList; @@ -267,6 +268,9 @@ private void initializeOptionsParser(Command commandAnnotation) throws OptionsPa optionsParser = createOptionsParser(commandAnnotation); optionsParser.parse(optionsToParse); + // Allow the command to edit the options. + command.editOptions(optionsParser); + // Enforce the test invocation policy once the options have been added InvocationPolicyEnforcer optionsPolicyEnforcer = new InvocationPolicyEnforcer( @@ -312,7 +316,7 @@ private OptionsParser createOptionsParser(Command commandAnnotation) { void executeNonBuildCommand() throws Exception { checkNotNull(command, "No command created, try calling newCommand()"); checkState( - !env.getCommand().builds(), + env.getCommand().buildPhase() == NONE, "%s is a build command, did you mean to call executeBuild()?", env.getCommandName()); @@ -353,7 +357,7 @@ void executeBuild(List targets) throws Exception { newCommand(BuildCommand.class); // If you didn't create a command we do it for you. } checkState( - env.getCommand().builds(), + env.getCommand().buildPhase().loads(), "%s is not a build command, did you mean to call executeNonBuildCommand()?", env.getCommandName()); @@ -450,7 +454,6 @@ private void beforeCommand() throws Exception { private void commandComplete(@Nullable Crash crash) throws Exception { Reporter reporter = env.getReporter(); - getSkyframeExecutor().notifyCommandComplete(reporter); if (crash != null) { runtime.getBugReporter().handleCrash(crash, CrashContext.keepAlive().reportingTo(reporter)); } diff --git a/src/test/java/com/google/devtools/build/lib/buildtool/util/BuildIntegrationTestCase.java b/src/test/java/com/google/devtools/build/lib/buildtool/util/BuildIntegrationTestCase.java index a1fff55b7dfd97..244a398f99bfaf 100644 --- a/src/test/java/com/google/devtools/build/lib/buildtool/util/BuildIntegrationTestCase.java +++ b/src/test/java/com/google/devtools/build/lib/buildtool/util/BuildIntegrationTestCase.java @@ -1163,6 +1163,11 @@ public final Event assertContainsEvent(String expectedEvent) { return assertContainsEvent(events.collector(), expectedEvent); } + @CanIgnoreReturnValue + public final Event assertContainsEvent(EventKind kind, String expectedEvent) { + return MoreAsserts.assertContainsEvent(events.collector(), expectedEvent, kind); + } + @CanIgnoreReturnValue public static Event assertContainsEvent(EventCollector eventCollector, String expectedEvent) { return MoreAsserts.assertContainsEvent(eventCollector, expectedEvent); diff --git a/src/test/java/com/google/devtools/build/lib/events/util/EventCollectionApparatus.java b/src/test/java/com/google/devtools/build/lib/events/util/EventCollectionApparatus.java index d11bdb12a31d67..09c38f06bec1f5 100644 --- a/src/test/java/com/google/devtools/build/lib/events/util/EventCollectionApparatus.java +++ b/src/test/java/com/google/devtools/build/lib/events/util/EventCollectionApparatus.java @@ -195,14 +195,6 @@ public Event assertContainsWarning(String expectedMessage) { return MoreAsserts.assertContainsEvent(eventCollector, expectedMessage, EventKind.WARNING); } - /** - * Utility method: Assert that the {@link #collector()} has received an event of the given type - * and with the {@code expectedMessage}. - */ - public Event assertContainsEvent(EventKind kind, String expectedMessage) { - return MoreAsserts.assertContainsEvent(eventCollector, expectedMessage, kind); - } - public List assertContainsEventWithFrequency(String expectedMessage, int expectedFrequency) { return MoreAsserts.assertContainsEventWithFrequency(eventCollector, expectedMessage, diff --git a/src/test/java/com/google/devtools/build/lib/packages/SymbolicMacroTest.java b/src/test/java/com/google/devtools/build/lib/packages/SymbolicMacroTest.java index 71943e27eea433..712d08ed072777 100644 --- a/src/test/java/com/google/devtools/build/lib/packages/SymbolicMacroTest.java +++ b/src/test/java/com/google/devtools/build/lib/packages/SymbolicMacroTest.java @@ -336,9 +336,10 @@ def _impl(name): assertGetPackageFailsWithEvent( "pkg", String.format( - "%s can only be used while evaluating a BUILD file, a WORKSPACE file, or a legacy macro" - + " loaded from there", - apiName)); + // The error also has one of the following suffixes: + // - " or a symbolic macro" + // - ", a symbolic macro, or a WORKSPACE file" + "%s can only be used while evaluating a BUILD file (or legacy macro)", apiName)); } @Test diff --git a/src/test/java/com/google/devtools/build/lib/packages/util/MockToolsConfig.java b/src/test/java/com/google/devtools/build/lib/packages/util/MockToolsConfig.java index 412c1cd21f9005..ec9025cf4de9f6 100644 --- a/src/test/java/com/google/devtools/build/lib/packages/util/MockToolsConfig.java +++ b/src/test/java/com/google/devtools/build/lib/packages/util/MockToolsConfig.java @@ -22,6 +22,8 @@ import com.google.devtools.build.lib.vfs.util.FileSystems; import com.google.devtools.build.runfiles.Runfiles; import java.io.IOException; +import java.nio.file.Files; +import java.util.stream.Stream; import javax.annotation.Nullable; /** @@ -178,4 +180,35 @@ public void linkTools(String... tools) throws IOException { linkTool(tool); } } + + public void copyDirectory(String relativeDirPath, int depth, boolean useEmptyBuildFiles) + throws IOException { + // Tests are assumed to be run from the main repository only. + Runfiles runfiles = Runfiles.preload().withSourceRepository(""); + PathFragment rlocationPath = + PathFragment.create(TestConstants.WORKSPACE_NAME).getRelative(relativeDirPath); + java.nio.file.Path source = + FileSystems.getNativeFileSystem() + .getPath(runfiles.rlocation(rlocationPath.getPathString())) + .getPathFile() + .toPath(); + try (Stream stream = Files.walk(source, depth)) { + stream + .filter(f -> f.toFile().isFile()) + .map(f -> source.relativize(f).toString()) + .filter(f -> !f.isEmpty()) + .forEach( + f -> { + try { + if (f.endsWith("BUILD") && useEmptyBuildFiles) { + create(relativeDirPath + "/" + f); + } else { + copyTool(relativeDirPath + "/" + f); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + } } diff --git a/src/test/java/com/google/devtools/build/lib/pkgcache/IncrementalLoadingTest.java b/src/test/java/com/google/devtools/build/lib/pkgcache/IncrementalLoadingTest.java index 6bc1f54a61b40b..fc7d118592f315 100644 --- a/src/test/java/com/google/devtools/build/lib/pkgcache/IncrementalLoadingTest.java +++ b/src/test/java/com/google/devtools/build/lib/pkgcache/IncrementalLoadingTest.java @@ -422,7 +422,8 @@ public void close() { private class ManualDiffAwarenessFactory implements DiffAwareness.Factory { @Nullable @Override - public DiffAwareness maybeCreate(Root pathEntry, ImmutableSet ignoredPaths) { + public DiffAwareness maybeCreate( + Root pathEntry, ImmutableSet ignoredPaths, OptionsProvider optionsProvider) { return pathEntry.asPath().equals(workspace) ? new ManualDiffAwareness() : null; } } diff --git a/src/test/java/com/google/devtools/build/lib/query2/cquery/ConfiguredTargetQuerySemanticsTest.java b/src/test/java/com/google/devtools/build/lib/query2/cquery/ConfiguredTargetQuerySemanticsTest.java index 27c54825649d03..5c6d299fae4e1c 100644 --- a/src/test/java/com/google/devtools/build/lib/query2/cquery/ConfiguredTargetQuerySemanticsTest.java +++ b/src/test/java/com/google/devtools/build/lib/query2/cquery/ConfiguredTargetQuerySemanticsTest.java @@ -832,6 +832,116 @@ def _test_impl(ctx): "//donut:test_filegroup"); } + @Test + public void testToolchainPropagatingAspectDepsAppearInCqueryDeps() throws Exception { + writeFile( + "donut_toolchains/test_toolchain.bzl", + """ + def _impl(ctx): + return [platform_common.ToolchainInfo()] + + test_toolchain = rule( + implementation = _impl, + ) + """); + writeFile( + "donut_toolchains/BUILD", + """ + load("//donut_toolchains:test_toolchain.bzl", "test_toolchain") + + toolchain_type(name = "toolchain_type_1") + + test_toolchain( + name = "foo", + ) + + toolchain( + name = "foo_toolchain", + toolchain = ":foo", + toolchain_type = ":toolchain_type_1", + ) + """); + writeFile( + "donut/test.bzl", + """ + TestAspectInfo = provider("TestAspectInfo", fields = ["info"]) + + def _test_aspect_impl(target, ctx): + return [ + TestAspectInfo( + info = depset([target.label]), + ), + ] + + _test_aspect = aspect( + implementation = _test_aspect_impl, + toolchains_aspects = ["//donut_toolchains:toolchain_type_1"], + attrs = { + "_test_attr": attr.label( + allow_files = True, + default = Label("//donut:test_filegroup"), + ), + }, + provides = [TestAspectInfo], + ) + + def _test_impl(ctx): + pass + + test_rule = rule( + _test_impl, + attrs = { + "deps": attr.label_list( + aspects = [_test_aspect], + ), + }, + ) + + rule_with_toolchain = rule( + _test_impl, + toolchains = ["//donut_toolchains:toolchain_type_1"], + ) + """); + writeFile( + "donut/BUILD", + """ + load(":test.bzl", "test_rule", "rule_with_toolchain") + + filegroup( + name = "test_filegroup", + srcs = ["test.bzl"], + ) + + rule_with_toolchain( + name = "test_rule_dep", + ) + + test_rule( + name = "test_rule", + deps = [":test_rule_dep"], + ) + """); + helper.setQuerySettings(Setting.INCLUDE_ASPECTS, Setting.EXPLICIT_ASPECTS); + ((PostAnalysisQueryHelper) helper) + .useConfiguration("--extra_toolchains=//donut_toolchains:foo_toolchain"); + + var result = + eval("filter(//donut, deps(//donut:test_rule))").stream() + .map(cf -> cf.getDescription(LabelPrinter.legacy())) + .collect(ImmutableList.toImmutableList()); + + assertThat(result) + .containsExactly( + "//donut:test_rule", + "//donut:test_rule_dep", + "//donut:test.bzl%_test_aspect of //donut:test_rule_dep", + "//donut:test.bzl", + "//donut:test_filegroup", + "//donut_toolchains:foo", + "//donut_toolchains:toolchain_type_1", + "//donut:test.bzl%_test_aspect of //donut_toolchains:foo"); + } + @Test public void testAspectOnAspectDepsAppearInCqueryDeps() throws Exception { writeFile( diff --git a/src/test/java/com/google/devtools/build/lib/rules/android/AndroidStarlarkCommonTest.java b/src/test/java/com/google/devtools/build/lib/rules/android/AndroidStarlarkCommonTest.java index b93e2846c34e7a..55f980ec6e48ee 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/android/AndroidStarlarkCommonTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/android/AndroidStarlarkCommonTest.java @@ -82,31 +82,4 @@ def _impl(ctx): assertThat(fooJavaInfo.getJavaPluginInfo()).isNotNull(); assertThat(barJavaInfo.getJavaPluginInfo()).isNull(); } - - @Test - public void androidPlatformsTransition() throws Exception { - scratch.file( - "java/android/compatible.bzl", - """ - def _impl(ctx): - pass - - my_rule = rule( - implementation = _impl, - cfg = android_common.android_platforms_transition, - ) - """); - scratch.file( - "java/android/BUILD", - """ - load(":compatible.bzl", "my_rule") - - my_rule( - name = "bar", - ) - """); - - // Just check that the rule can be analyzed. - assertThat(getConfiguredTarget("//java/android:bar")).isNotNull(); - } } diff --git a/src/test/java/com/google/devtools/build/lib/rules/config/FeatureFlagSetterRule.java b/src/test/java/com/google/devtools/build/lib/rules/config/FeatureFlagSetterRule.java index eee32f2ed89d44..80f29030457329 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/config/FeatureFlagSetterRule.java +++ b/src/test/java/com/google/devtools/build/lib/rules/config/FeatureFlagSetterRule.java @@ -82,7 +82,9 @@ public ConfiguredTarget create(RuleContext ruleContext) RuleConfiguredTargetBuilder builder = new RuleConfiguredTargetBuilder(ruleContext) - .setFilesToBuild(PrerequisiteArtifacts.nestedSet(ruleContext, "deps")) + .setFilesToBuild( + PrerequisiteArtifacts.nestedSet( + ruleContext.getRulePrerequisitesCollection(), "deps")) .addProvider(RunfilesProvider.class, RunfilesProvider.EMPTY); if (exportedFlagProvider != null) { builder.addNativeDeclaredProvider(exportedFlagProvider); diff --git a/src/test/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollectorTest.java b/src/test/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollectorTest.java index 41c1c0291a9618..74210984dd1e37 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollectorTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollectorTest.java @@ -286,9 +286,10 @@ public void dynamicLink_siblingLayout_externalToolchain_rpath() throws Exception List linkArgs = linkAction.getArguments(); assertThat(linkArgs) .contains( - "--runtime_library=../../../../toolchain/k8-fastbuild/bin/_solib___Cc_Utoolchain/"); + "--runtime_library=../../../../toolchain/k8-fastbuild/bin/_solib__toolchain_A_Cc_Utoolchain/"); assertThat(linkArgs) - .contains("--runtime_library=foo.runfiles/toolchain/_solib___Cc_Utoolchain/"); - assertThat(linkArgs).contains("--runtime_library=../../../toolchain/_solib___Cc_Utoolchain/"); + .contains("--runtime_library=foo.runfiles/toolchain/_solib__toolchain_A_Cc_Utoolchain/"); + assertThat(linkArgs) + .contains("--runtime_library=../../../toolchain/_solib__toolchain_A_Cc_Utoolchain/"); } } diff --git a/src/test/java/com/google/devtools/build/lib/rules/cpp/StarlarkCcCommonTest.java b/src/test/java/com/google/devtools/build/lib/rules/cpp/StarlarkCcCommonTest.java index b7da00cbda6bdb..4e2ad00951e78b 100755 --- a/src/test/java/com/google/devtools/build/lib/rules/cpp/StarlarkCcCommonTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/cpp/StarlarkCcCommonTest.java @@ -6298,6 +6298,30 @@ public void testInterfaceLibraryProducedForTransitiveLinkOnWindows() throws Exce assertThat(library.getInterfaceLibrary()).isNotNull(); } + @Test + public void testEmitInterfaceLibrary() throws Exception { + getAnalysisMock() + .ccSupport() + .setupCcToolchainConfig( + mockToolsConfig, + CcToolchainConfig.builder() + .withFeatures( + CppRuleClasses.SUPPORTS_DYNAMIC_LINKER, + CppRuleClasses.SUPPORTS_INTERFACE_SHARED_LIBRARIES, + CppRuleClasses.COPY_DYNAMIC_LIBRARIES_TO_BINARY)); + setupTestTransitiveLinkInternal( + scratch, + /* internalApi= */ true, + "output_type = 'dynamic_library'", + "emit_interface_shared_library = True"); + ConfiguredTarget target = getConfiguredTarget("//foo:bin"); + assertThat(target).isNotNull(); + LibraryToLink library = (LibraryToLink) getMyInfoFromTarget(target).getValue("library"); + assertThat(library).isNotNull(); + assertThat(library.getDynamicLibrary()).isNotNull(); + assertThat(library.getInterfaceLibrary()).isNotNull(); + } + @Test public void testTransitiveLinkForExecutable() throws Exception { setupTestTransitiveLink(scratch, "output_type = 'executable'"); @@ -6812,12 +6836,22 @@ private static void createCcBinRule( private static void setupTestTransitiveLink(Scratch scratch, String... additionalLines) throws Exception { - createCcBinRule(scratch, /* internalApi= */ false, additionalLines); + setupTestTransitiveLinkInternal(scratch, /* internalApi= */ false, additionalLines); + } + + private static void setupTestTransitiveLinkInternal( + Scratch scratch, boolean internalApi, String... additionalLines) throws Exception { + createCcBinRule(scratch, internalApi, additionalLines); + String bzlPath; + if (internalApi) { + bzlPath = "bazel_internal/test_rules/cc"; + } else { + bzlPath = "tools/build_defs"; + } scratch.file( "foo/BUILD", + "load(\"//" + bzlPath + ":extension.bzl\", \"cc_bin\")", """ - load("//tools/build_defs:extension.bzl", "cc_bin") - cc_library( name = "dep1", srcs = ["dep1.cc"], @@ -7602,7 +7636,8 @@ public void testExpandedLinkApiRaisesError() throws Exception { String.format(callFormatString, "additional_linkstamp_defines=[]"), String.format(callFormatString, "whole_archive=False"), String.format(callFormatString, "native_deps=False"), - String.format(callFormatString, "only_for_dynamic_libs=False")); + String.format(callFormatString, "only_for_dynamic_libs=False"), + String.format(callFormatString, "emit_interface_shared_library=True")); for (String call : calls) { scratch.overwriteFile( "b/rule.bzl", diff --git a/src/test/java/com/google/devtools/build/lib/rules/proto/BazelProtoCommonTest.java b/src/test/java/com/google/devtools/build/lib/rules/proto/BazelProtoCommonTest.java index aa6a6a873e782d..aa7020a48c2390 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/proto/BazelProtoCommonTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/proto/BazelProtoCommonTest.java @@ -134,7 +134,7 @@ def _impl(ctx): [outfile], **kwargs) return [DefaultInfo(files = depset([outfile]))] - generate_rule = rule(_impl, + compile_rule = rule(_impl, attrs = { 'proto_dep': attr.label(), 'plugin_output': attr.string(), @@ -158,7 +158,7 @@ def _impl(ctx): 'MyRule', ctx.attr.proto_dep.label) return [BoolProvider(value = result)] - should_generate_rule = rule(_impl, + should_compile_rule = rule(_impl, attrs = { 'proto_dep': attr.label(), 'toolchain': attr.label(default = '//foo:toolchain'), @@ -202,15 +202,17 @@ def _impl(ctx): """); } - /** Verifies basic usage of proto_common.generate_code. */ + // LINT.IfChange + + /** Verifies basic usage of proto_common.compile. */ @Test - public void generateCode_basic() throws Exception { + public void protoCommonCompile_basic() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto')"); + "compile_rule(name = 'simple', proto_dep = ':proto')"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -225,15 +227,15 @@ public void generateCode_basic() throws Exception { assertThat(spawnAction.getProgressMessage()).isEqualTo("Progress Message //bar:simple"); } - /** Verifies usage of proto_common.generate_code with no plugin specified by toolchain. */ + /** Verifies usage of proto_common.compile with no plugin specified by toolchain. */ @Test - public void generateCode_noPlugin() throws Exception { + public void protoCommonCompile_noPlugin() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto',", + "compile_rule(name = 'simple', proto_dep = ':proto',", " toolchain = '//foo:toolchain_noplugin')"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -247,17 +249,17 @@ public void generateCode_noPlugin() throws Exception { } /** - * Verifies usage of proto_common.generate_code with plugin_output - * parameter set to file. + * Verifies usage of proto_common.compile with plugin_output parameter + * set to file. */ @Test - public void generateCode_withPluginOutput() throws Exception { + public void protoCommonCompile_withPluginOutput() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto', plugin_output = 'single')"); + "compile_rule(name = 'simple', proto_dep = ':proto', plugin_output = 'single')"); useConfiguration( "--platforms=" + TestConstants.PLATFORM_LABEL, "--experimental_platform_in_output_dir", @@ -279,17 +281,17 @@ public void generateCode_withPluginOutput() throws Exception { } /** - * Verifies usage of proto_common.generate_code with plugin_output - * parameter set to directory. + * Verifies usage of proto_common.compile with plugin_output parameter + * set to directory. */ @Test - public void generateCode_withDirectoryPluginOutput() throws Exception { + public void protoCommonCompile_withDirectoryPluginOutput() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto', plugin_output = 'multiple')"); + "compile_rule(name = 'simple', proto_dep = ':proto', plugin_output = 'multiple')"); useConfiguration( "--platforms=" + TestConstants.PLATFORM_LABEL, "--experimental_platform_in_output_dir", @@ -312,17 +314,17 @@ public void generateCode_withDirectoryPluginOutput() throws Exception { } /** - * Verifies usage of proto_common.generate_code with additional_args + * Verifies usage of proto_common.compile with additional_args * parameter. */ @Test - public void generateCode_additionalArgs() throws Exception { + public void protoCommonCompile_additionalArgs() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto', additional_args = ['--a', '--b'])"); + "compile_rule(name = 'simple', proto_dep = ':proto', additional_args = ['--a', '--b'])"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -340,19 +342,19 @@ public void generateCode_additionalArgs() throws Exception { } /** - * Verifies usage of proto_common.generate_code with additional_tools + * Verifies usage of proto_common.compile with additional_tools * parameter. */ @Test - public void generateCode_additionalTools() throws Exception { + public void protoCommonCompile_additionalTools() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", "cc_binary(name = 'tool1', srcs = ['tool1.cc'])", "cc_binary(name = 'tool2', srcs = ['tool2.cc'])", - "generate_rule(name = 'simple', proto_dep = ':proto',", + "compile_rule(name = 'simple', proto_dep = ':proto',", " additional_tools = [':tool1', ':tool2'])"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -363,19 +365,19 @@ public void generateCode_additionalTools() throws Exception { } /** - * Verifies usage of proto_common.generate_code with additional_tools + * Verifies usage of proto_common.compile with additional_tools * parameter and no plugin on the toolchain. */ @Test - public void generateCode_additionalToolsNoPlugin() throws Exception { + public void protoCommonCompile_additionalToolsNoPlugin() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", "cc_binary(name = 'tool1', srcs = ['tool1.cc'])", "cc_binary(name = 'tool2', srcs = ['tool2.cc'])", - "generate_rule(name = 'simple',", + "compile_rule(name = 'simple',", " proto_dep = ':proto',", " additional_tools = [':tool1', ':tool2'],", " toolchain = '//foo:toolchain_noplugin',", @@ -389,17 +391,17 @@ public void generateCode_additionalToolsNoPlugin() throws Exception { } /** - * Verifies usage of proto_common.generate_code with additional_inputs + * Verifies usage of proto_common.compile with additional_inputs * parameter. */ @Test - public void generateCode_additionalInputs() throws Exception { + public void protoCommonCompile_additionalInputs() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto',", + "compile_rule(name = 'simple', proto_dep = ':proto',", " additional_inputs = [':input1.txt', ':input2.txt'])"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -410,17 +412,16 @@ public void generateCode_additionalInputs() throws Exception { } /** - * Verifies usage of proto_common.generate_code with resource_set - * parameter. + * Verifies usage of proto_common.compile with resource_set parameter. */ @Test - public void generateCode_resourceSet() throws Exception { + public void protoCommonCompile_resourceSet() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto', use_resource_set = True)"); + "compile_rule(name = 'simple', proto_dep = ':proto', use_resource_set = True)"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -433,14 +434,14 @@ public void generateCode_resourceSet() throws Exception { /** Verifies --protocopts are passed to command line. */ @Test - public void generateCode_protocOpts() throws Exception { + public void protoCommonCompile_protocOpts() throws Exception { useConfiguration("--protocopt=--foo", "--protocopt=--bar"); scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto')"); + "compile_rule(name = 'simple', proto_dep = ':proto')"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -458,18 +459,18 @@ public void generateCode_protocOpts() throws Exception { } /** - * Verifies proto_common.generate_code correctly handles direct generated + * Verifies proto_common.compile correctly handles direct generated * .proto files. */ @Test - public void generateCode_directGeneratedProtos() throws Exception { + public void protoCommonCompile_directGeneratedProtos() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "genrule(name = 'generate', srcs = ['A.txt'], cmd = '', outs = ['G.proto'])", "proto_library(name = 'proto', srcs = ['A.proto', 'G.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto')"); + "compile_rule(name = 'simple', proto_dep = ':proto')"); useConfiguration( "--platforms=" + TestConstants.PLATFORM_LABEL, "--experimental_platform_in_output_dir", @@ -493,19 +494,19 @@ public void generateCode_directGeneratedProtos() throws Exception { } /** - * Verifies proto_common.generate_code correctly handles in-direct generated + * Verifies proto_common.compile correctly handles in-direct generated * .proto files. */ @Test - public void generateCode_inDirectGeneratedProtos() throws Exception { + public void protoCommonCompile_inDirectGeneratedProtos() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "genrule(name = 'generate', srcs = ['A.txt'], cmd = '', outs = ['G.proto'])", "proto_library(name = 'generated', srcs = ['G.proto'])", "proto_library(name = 'proto', srcs = ['A.proto'], deps = [':generated'])", - "generate_rule(name = 'simple', proto_dep = ':proto')"); + "compile_rule(name = 'simple', proto_dep = ':proto')"); useConfiguration( "--platforms=" + TestConstants.PLATFORM_LABEL, @@ -528,8 +529,10 @@ public void generateCode_inDirectGeneratedProtos() throws Exception { .inOrder(); } + // LINT.ThenChange(@protobuf/github/bazel/tests/proto_common_compile_tests.bzl) + /** - * Verifies proto_common.generate_code correctly handles external proto_library + * Verifies proto_common.compile correctly handles external proto_library * -es. */ @Test @@ -540,7 +543,7 @@ public void generateCode_inDirectGeneratedProtos() throws Exception { "{sibling: true, generated: false,expectedFlags:" + " ['-I../foo']}", "{sibling: true, generated: true, expectedFlags:" + " ['-Ibl?azel?-out/foo/k8-fastbuild/bin']}", }) - public void generateCode_externalProtoLibrary( + public void protoCommonCompile_externalProtoLibrary( boolean sibling, boolean generated, List expectedFlags) throws Exception { if (sibling) { setBuildLanguageOptions("--experimental_sibling_repository_layout"); @@ -558,9 +561,9 @@ public void generateCode_externalProtoLibrary( scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'], deps = ['@foo//e:e'])", - "generate_rule(name = 'simple', proto_dep = ':proto')"); + "compile_rule(name = 'simple', proto_dep = ':proto')"); useConfiguration( "--platforms=" + TestConstants.PLATFORM_LABEL, "--experimental_platform_in_output_dir", @@ -584,13 +587,13 @@ public void generateCode_externalProtoLibrary( /** Verifies experimental_progress_message parameters. */ @Test - public void generateCode_overrideProgressMessage() throws Exception { + public void protoCommonCompile_overrideProgressMessage() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:generate.bzl', 'generate_rule')", + "load('//foo:generate.bzl', 'compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "generate_rule(name = 'simple', proto_dep = ':proto', progress_message = 'My %{label}')"); + "compile_rule(name = 'simple', proto_dep = ':proto', progress_message = 'My %{label}')"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -607,13 +610,13 @@ public void generateCode_overrideProgressMessage() throws Exception { /** Verifies proto_common.should_generate_code call. */ @Test - public void shouldGenerateCode_basic() throws Exception { + public void shouldprotoCommonCompile_basic() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:should_generate.bzl', 'should_generate_rule')", + "load('//foo:should_generate.bzl', 'should_compile_rule')", "proto_library(name = 'proto', srcs = ['A.proto'])", - "should_generate_rule(name = 'simple', proto_dep = ':proto')"); + "should_compile_rule(name = 'simple', proto_dep = ':proto')"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -623,12 +626,12 @@ public void shouldGenerateCode_basic() throws Exception { /** Verifies proto_common.should_generate_code call. */ @Test - public void shouldGenerateCode_dontGenerate() throws Exception { + public void shouldprotoCommonCompile_dontGenerate() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:should_generate.bzl', 'should_generate_rule')", - "should_generate_rule(name = 'simple', proto_dep = '//third_party/x:denied')"); + "load('//foo:should_generate.bzl', 'should_compile_rule')", + "should_compile_rule(name = 'simple', proto_dep = '//third_party/x:denied')"); ConfiguredTarget target = getConfiguredTarget("//bar:simple"); @@ -638,12 +641,12 @@ public void shouldGenerateCode_dontGenerate() throws Exception { /** Verifies proto_common.should_generate_code call. */ @Test - public void shouldGenerateCode_mixed() throws Exception { + public void shouldprotoCommonCompile_mixed() throws Exception { scratch.file( "bar/BUILD", TestConstants.LOAD_PROTO_LIBRARY, - "load('//foo:should_generate.bzl', 'should_generate_rule')", - "should_generate_rule(name = 'simple', proto_dep = '//third_party/x:mixed')"); + "load('//foo:should_generate.bzl', 'should_compile_rule')", + "should_compile_rule(name = 'simple', proto_dep = '//third_party/x:mixed')"); reporter.removeHandler(failFastHandler); getConfiguredTarget("//bar:simple"); diff --git a/src/test/java/com/google/devtools/build/lib/runtime/CommandLineEventTest.java b/src/test/java/com/google/devtools/build/lib/runtime/CommandLineEventTest.java index 9f67cc11dd06bf..0aeb8a2cc7ec00 100644 --- a/src/test/java/com/google/devtools/build/lib/runtime/CommandLineEventTest.java +++ b/src/test/java/com/google/devtools/build/lib/runtime/CommandLineEventTest.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.io.BaseEncoding; import com.google.devtools.build.lib.bazel.BazelStartupOptionsModule.Options; +import com.google.devtools.build.lib.buildeventstream.BuildEventProtocolOptions; import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.BuildEventId.StructuredCommandLineId; import com.google.devtools.build.lib.runtime.CommandLineEvent.CanonicalCommandLineEvent; import com.google.devtools.build.lib.runtime.CommandLineEvent.OriginalCommandLineEvent; @@ -511,4 +512,64 @@ public void testSimpleStringToolCommandLine() throws OptionsParsingException { assertThat(line.getSections(0).getChunkList().getChunk(0)) .isEqualTo("The quick brown fox jumps over the lazy dog"); } + + @Test + public void redactedResidual_includesTarget_originalCommandLine() throws OptionsParsingException { + OptionsParser fakeStartupOptions = + OptionsParser.builder().optionsClasses(BlazeServerStartupOptions.class).build(); + OptionsParser fakeCommandOptions = + OptionsParser.builder().optionsClasses(BuildEventProtocolOptions.class).build(); + fakeCommandOptions.parse("--experimental_run_bep_event_include_residue=false"); + fakeCommandOptions.setResidue( + ImmutableList.of("//some:target", "--sensitive_arg"), ImmutableList.of()); + + CommandLine line = + new OriginalCommandLineEvent( + "testblaze", + fakeStartupOptions, + "run", + fakeCommandOptions, + Optional.of(ImmutableList.of())) + .asStreamProto(null) + .getStructuredCommandLine(); + + assertThat(line.getCommandLineLabel()).isEqualTo("original"); + checkCommandLineSectionLabels(line); + assertThat(line.getSections(0).getChunkList().getChunk(0)).isEqualTo("testblaze"); + assertThat(line.getSections(1).getOptionList().getOptionCount()).isEqualTo(0); + assertThat(line.getSections(2).getChunkList().getChunk(0)).isEqualTo("run"); + assertThat(line.getSections(3).getOptionList().getOptionCount()).isEqualTo(1); + assertThat(line.getSections(4).getChunkList().getChunkCount()).isEqualTo(2); + assertThat(line.getSections(4).getChunkList().getChunk(0)).isEqualTo("//some:target"); + assertThat(line.getSections(4).getChunkList().getChunk(1)).isEqualTo("REDACTED"); + } + + @Test + public void redactedResidual_includesTarget_canonicalCommandLine() + throws OptionsParsingException { + OptionsParser fakeStartupOptions = + OptionsParser.builder().optionsClasses(BlazeServerStartupOptions.class).build(); + OptionsParser fakeCommandOptions = + OptionsParser.builder().optionsClasses(BuildEventProtocolOptions.class).build(); + fakeCommandOptions.parse("--experimental_run_bep_event_include_residue=false"); + fakeCommandOptions.setResidue( + ImmutableList.of("//some:target", "--sensitive_arg"), ImmutableList.of()); + + CommandLine line = + new CanonicalCommandLineEvent("testblaze", fakeStartupOptions, "run", fakeCommandOptions) + .asStreamProto(null) + .getStructuredCommandLine(); + + assertThat(line.getCommandLineLabel()).isEqualTo("canonical"); + checkCommandLineSectionLabels(line); + assertThat(line.getSections(0).getChunkList().getChunk(0)).isEqualTo("testblaze"); + assertThat(line.getSections(1).getOptionList().getOptionCount()).isEqualTo(1); + assertThat(line.getSections(1).getOptionList().getOption(0).getCombinedForm()) + .isEqualTo("--ignore_all_rc_files"); + assertThat(line.getSections(2).getChunkList().getChunk(0)).isEqualTo("run"); + assertThat(line.getSections(3).getOptionList().getOptionCount()).isEqualTo(1); + assertThat(line.getSections(4).getChunkList().getChunkCount()).isEqualTo(2); + assertThat(line.getSections(4).getChunkList().getChunk(0)).isEqualTo("//some:target"); + assertThat(line.getSections(4).getChunkList().getChunk(1)).isEqualTo("REDACTED"); + } } diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/BzlCompileFunctionTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/BzlCompileFunctionTest.java index 3e0627666ab798..846911cfa12ba8 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/BzlCompileFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/BzlCompileFunctionTest.java @@ -77,7 +77,7 @@ protected FileSystem createFileSystem() { @Test public void testIOExceptionOccursDuringReading() throws Exception { reporter.removeHandler(failFastHandler); - scratch.file("/workspace/tools/build_rules/BUILD"); + scratch.file("/workspace/tools/test_build_rules/BUILD"); scratch.file( "foo/BUILD", """ diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/ConfigurationsForTargetsTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/ConfigurationsForTargetsTest.java index 027940fa9fc0aa..17cdfd985e22c7 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/ConfigurationsForTargetsTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/ConfigurationsForTargetsTest.java @@ -174,7 +174,8 @@ public SkyValue compute(SkyKey skyKey, Environment env) starlarkExecTransition.orElse(null), env, env.getListener(), - /* baseTargetPrerequisitesSupplier= */ null); + /* baseTargetPrerequisitesSupplier= */ null, + /* baseTargetUnloadedToolchainContexts= */ null); return env.valuesMissing() ? null : new Value(depMap); } catch (RuntimeException e) { throw e; diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/DiffAwarenessManagerTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/DiffAwarenessManagerTest.java index b0ab2992f7ea8d..741156747fe13d 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/DiffAwarenessManagerTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/DiffAwarenessManagerTest.java @@ -217,12 +217,14 @@ public void testIndependentAwarenessPerIgnoredPaths() throws Exception { ModifiedFileSet diff1 = modifiedFileSet("/path/ignored-path-2/foo"); DiffAwareness diffAwareness1 = new DiffAwarenessStub(ImmutableList.of(diff1)); - when(factory.maybeCreate(pathEntry, ImmutableSet.of(fs.getPath("/path/ignored-path-1")))) + when(factory.maybeCreate( + pathEntry, ImmutableSet.of(fs.getPath("/path/ignored-path-1")), OptionsProvider.EMPTY)) .thenReturn(diffAwareness1); ModifiedFileSet diff2 = modifiedFileSet("/path/ignored-path-1/foo"); DiffAwareness diffAwareness2 = new DiffAwarenessStub(ImmutableList.of(diff2)); - when(factory.maybeCreate(pathEntry, ImmutableSet.of(fs.getPath("/path/ignored-path-2")))) + when(factory.maybeCreate( + pathEntry, ImmutableSet.of(fs.getPath("/path/ignored-path-2")), OptionsProvider.EMPTY)) .thenReturn(diffAwareness2); DiffAwarenessManager manager = new DiffAwarenessManager(ImmutableList.of(factory)); @@ -233,6 +235,7 @@ public void testIndependentAwarenessPerIgnoredPaths() throws Exception { pathEntry, ImmutableSet.of(fs.getPath("/path/ignored-path-1")), OptionsProvider.EMPTY); + processedDiff1.markProcessed(); assertThat(processedDiff1.getModifiedFileSet()).isEqualTo(ModifiedFileSet.EVERYTHING_MODIFIED); processedDiff1 = manager.getDiff( @@ -240,6 +243,7 @@ public void testIndependentAwarenessPerIgnoredPaths() throws Exception { pathEntry, ImmutableSet.of(fs.getPath("/path/ignored-path-1")), OptionsProvider.EMPTY); + processedDiff1.markProcessed(); assertThat(processedDiff1.getModifiedFileSet()).isEqualTo(diff1); ProcessableModifiedFileSet processedDiff2 = @@ -248,6 +252,7 @@ public void testIndependentAwarenessPerIgnoredPaths() throws Exception { pathEntry, ImmutableSet.of(fs.getPath("/path/ignored-path-2")), OptionsProvider.EMPTY); + processedDiff2.markProcessed(); assertThat(processedDiff2.getModifiedFileSet()).isEqualTo(ModifiedFileSet.EVERYTHING_MODIFIED); processedDiff2 = manager.getDiff( @@ -255,6 +260,7 @@ public void testIndependentAwarenessPerIgnoredPaths() throws Exception { pathEntry, ImmutableSet.of(fs.getPath("/path/ignored-path-2")), OptionsProvider.EMPTY); + processedDiff2.markProcessed(); assertThat(processedDiff2.getModifiedFileSet()).isEqualTo(diff2); } @@ -265,7 +271,8 @@ public void getDiff_cleanBuild_propagatesWorkspaceInfo() throws Exception { DiffAwareness diffAwareness = mock(DiffAwareness.class); when(diffAwareness.getCurrentView(any())).thenReturn(createView(workspaceInfo)); DiffAwareness.Factory factory = mock(DiffAwareness.Factory.class); - when(factory.maybeCreate(pathEntry, ImmutableSet.of())).thenReturn(diffAwareness); + when(factory.maybeCreate(pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY)) + .thenReturn(diffAwareness); DiffAwarenessManager manager = new DiffAwarenessManager(ImmutableList.of(factory)); ProcessableModifiedFileSet diff = @@ -286,7 +293,8 @@ public void getDiff_incrementalBuild_propagatesLatestWorkspaceInfo() throws Exce when(diffAwareness.getDiff(view1, view2)) .thenReturn(ModifiedFileSet.builder().modify(PathFragment.create("file")).build()); DiffAwareness.Factory factory = mock(DiffAwareness.Factory.class); - when(factory.maybeCreate(pathEntry, ImmutableSet.of())).thenReturn(diffAwareness); + when(factory.maybeCreate(pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY)) + .thenReturn(diffAwareness); DiffAwarenessManager manager = new DiffAwarenessManager(ImmutableList.of(factory)); var unused = manager.getDiff(events.reporter(), pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY); @@ -308,7 +316,8 @@ public void getDiff_incrementalBuildNoChange_propagatesNewWorkspaceInfo() throws when(diffAwareness.getCurrentView(any())).thenReturn(view1, view2); when(diffAwareness.getDiff(view1, view2)).thenReturn(ModifiedFileSet.NOTHING_MODIFIED); DiffAwareness.Factory factory = mock(DiffAwareness.Factory.class); - when(factory.maybeCreate(pathEntry, ImmutableSet.of())).thenReturn(diffAwareness); + when(factory.maybeCreate(pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY)) + .thenReturn(diffAwareness); DiffAwarenessManager manager = new DiffAwarenessManager(ImmutableList.of(factory)); var unused = manager.getDiff(events.reporter(), pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY); @@ -330,7 +339,8 @@ public void getDiff_incrementalBuildWithNoWorkspaceInfo_returnsDiffWithNullWorks when(diffAwareness.getDiff(view1, view2)) .thenReturn(ModifiedFileSet.builder().modify(PathFragment.create("file")).build()); DiffAwareness.Factory factory = mock(DiffAwareness.Factory.class); - when(factory.maybeCreate(pathEntry, ImmutableSet.of())).thenReturn(diffAwareness); + when(factory.maybeCreate(pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY)) + .thenReturn(diffAwareness); DiffAwarenessManager manager = new DiffAwarenessManager(ImmutableList.of(factory)); var unused = manager.getDiff(events.reporter(), pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY); @@ -352,10 +362,12 @@ public void getDiff_brokenDiffAwareness_returnsDiffWithNullWorkspaceInfo() throw when(diffAwareness.getCurrentView(any())).thenReturn(view1, view2); when(diffAwareness.getDiff(view1, view2)).thenThrow(BrokenDiffAwarenessException.class); DiffAwareness.Factory factory = mock(DiffAwareness.Factory.class); - when(factory.maybeCreate(pathEntry, ImmutableSet.of())).thenReturn(diffAwareness); + when(factory.maybeCreate(pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY)) + .thenReturn(diffAwareness); DiffAwarenessManager manager = new DiffAwarenessManager(ImmutableList.of(factory)); - var unused = + var diff1 = manager.getDiff(events.reporter(), pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY); + diff1.markProcessed(); ProcessableModifiedFileSet diff = manager.getDiff(events.reporter(), pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY); @@ -372,10 +384,12 @@ public void getDiff_incompatibleDiff_fails() throws Exception { when(diffAwareness.getCurrentView(any())).thenReturn(view1, view2); when(diffAwareness.getDiff(view1, view2)).thenThrow(IncompatibleViewException.class); DiffAwareness.Factory factory = mock(DiffAwareness.Factory.class); - when(factory.maybeCreate(pathEntry, ImmutableSet.of())).thenReturn(diffAwareness); + when(factory.maybeCreate(pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY)) + .thenReturn(diffAwareness); DiffAwarenessManager manager = new DiffAwarenessManager(ImmutableList.of(factory)); - var unused = + var diff1 = manager.getDiff(events.reporter(), pathEntry, ImmutableSet.of(), OptionsProvider.EMPTY); + diff1.markProcessed(); assertThrows( IllegalStateException.class, @@ -408,7 +422,8 @@ public void remove(Root pathEntry) { @Override @Nullable - public DiffAwareness maybeCreate(Root pathEntry, ImmutableSet ignoredPaths) { + public DiffAwareness maybeCreate( + Root pathEntry, ImmutableSet ignoredPaths, OptionsProvider optionsProvider) { return diffAwarenesses.get(pathEntry); } } @@ -453,7 +468,12 @@ public View getCurrentView(OptionsProvider options) throws BrokenDiffAwarenessEx } @Override - public ModifiedFileSet getDiff(View oldView, View newView) throws BrokenDiffAwarenessException { + public ModifiedFileSet getDiff(@Nullable View oldView, View newView) + throws BrokenDiffAwarenessException { + if (oldView == null) { + return ModifiedFileSet.EVERYTHING_MODIFIED; + } + assertThat(oldView).isInstanceOf(ViewStub.class); assertThat(newView).isInstanceOf(ViewStub.class); ViewStub oldViewStub = (ViewStub) oldView; diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/LocalDiffAwarenessTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/LocalDiffAwarenessTest.java index 551f8d67134d2c..729fa577213099 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/LocalDiffAwarenessTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/LocalDiffAwarenessTest.java @@ -69,14 +69,18 @@ public final void initializeSettings() throws Exception { testCaseRoot.createDirectoryAndParents(); testCaseIgnoredDir = testCaseRoot.getChild("ignored-dir"); testCaseIgnoredDir.createDirectoryAndParents(); - localDiff = - (LocalDiffAwareness) - factory.maybeCreate(Root.fromPath(testCaseRoot), ImmutableSet.of(testCaseIgnoredDir)); LocalDiffAwareness.Options localDiffOptions = new LocalDiffAwareness.Options(); localDiffOptions.watchFS = true; watchFsEnabledProvider = FakeOptions.of(localDiffOptions); + localDiff = + (LocalDiffAwareness) + factory.maybeCreate( + Root.fromPath(testCaseRoot), + ImmutableSet.of(testCaseIgnoredDir), + watchFsEnabledProvider); + // Ignore test failures when run on a Mac. // // On a Mac, LocalDiffAwareness.Factory#maybeCreate will produce a MacOSXFsEventsDiffAwareness. diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/PackageFunctionTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/PackageFunctionTest.java index 603113561820f2..00bf7e507c75a4 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/PackageFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/PackageFunctionTest.java @@ -1843,15 +1843,15 @@ protected ConfiguredRuleClassProvider createRuleClassProvider() { ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder(); // addStandardRules() may call setPrelude(), so do it first. TestRuleClassProvider.addStandardRules(builder); - builder.setPrelude("//tools/build_rules:test_prelude"); + builder.setPrelude("//tools/test_build_rules:test_prelude"); return builder.build(); } @Test public void testPreludeDefinedSymbolIsUsable() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "foo = 'FOO'"); scratch.file( "pkg/BUILD", // @@ -1865,9 +1865,9 @@ public void testPreludeDefinedSymbolIsUsable() throws Exception { @Test public void testPreludeAutomaticallyReexportsLoadedSymbols() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "load('//util:common.bzl', 'foo')"); scratch.file("util/BUILD"); scratch.file( @@ -1887,9 +1887,9 @@ public void testPreludeAutomaticallyReexportsLoadedSymbols() throws Exception { // mutation on BUILD files. @Test public void testPreludeCanExportUnderscoreSymbols() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "_foo = 'FOO'"); scratch.file( "pkg/BUILD", // @@ -1903,9 +1903,9 @@ public void testPreludeCanExportUnderscoreSymbols() throws Exception { @Test public void testPreludeCanShadowUniversal() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "len = 'FOO'"); scratch.file( "pkg/BUILD", // @@ -1919,9 +1919,9 @@ public void testPreludeCanShadowUniversal() throws Exception { @Test public void testPreludeCanShadowPredeclareds() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "cc_library = 'FOO'"); scratch.file( "pkg/BUILD", // @@ -1943,9 +1943,9 @@ public void testPreludeCanShadowInjectedPredeclareds() throws Exception { exported_rules = {"cc_library": "BAR"} exported_to_java = {} """); - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "cc_library = 'FOO'"); scratch.file( "pkg/BUILD", // @@ -1965,9 +1965,9 @@ public void testPreludeCanShadowInjectedPredeclareds() throws Exception { @Test public void testPreludeSymbolCannotBeMutated() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "foo = ['FOO']"); scratch.file( "pkg/BUILD", // @@ -1982,10 +1982,10 @@ public void testPreludeSymbolCannotBeMutated() throws Exception { @Test public void testPreludeCanAccessBzlDialectFeatures() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); // Test both bzl symbols and syntax (e.g. function defs). scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "def foo():", " return native.glob"); scratch.file( @@ -2011,7 +2011,7 @@ public void testPreludeNeedNotBePresent() throws Exception { @Test public void testPreludeNeedNotBePresent_evenWhenPackageIs() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( "pkg/BUILD", // "print('FOO')"); @@ -2023,7 +2023,7 @@ public void testPreludeNeedNotBePresent_evenWhenPackageIs() throws Exception { @Test public void testPreludeFileNotRecognizedWithoutPackage() throws Exception { scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "foo = 'FOO'"); scratch.file( "pkg/BUILD", // @@ -2038,9 +2038,9 @@ public void testPreludeFileNotRecognizedWithoutPackage() throws Exception { @Test public void testPreludeFailsWhenErrorInPreludeFile() throws Exception { - scratch.file("tools/build_rules/BUILD"); + scratch.file("tools/test_build_rules/BUILD"); scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "1//0", // <-- dynamic error "foo = 'FOO'"); scratch.file( @@ -2059,17 +2059,18 @@ public void testPreludeFailsWhenErrorInPreludeFile() throws Exception { getConfiguredTarget("//pkg:BUILD"); assertContainsEvent( - "File \"/workspace/tools/build_rules/test_prelude\", line 1, column 2, in "); + "File \"/workspace/tools/test_build_rules/test_prelude\", line 1, column 2, in" + + " "); assertContainsEvent("Error: integer division by zero"); } @Test public void testPreludeWorksEvenWhenPreludePackageInError() throws Exception { scratch.file( - "tools/build_rules/BUILD", // + "tools/test_build_rules/BUILD", // "1//0"); // <-- dynamic error scratch.file( - "tools/build_rules/test_prelude", // + "tools/test_build_rules/test_prelude", // "foo = 'FOO'"); scratch.file( "pkg/BUILD", // diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/SequencedSkyframeExecutorTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/SequencedSkyframeExecutorTest.java index 4ae993e0c626b5..0ce35b40b60189 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/SequencedSkyframeExecutorTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/SequencedSkyframeExecutorTest.java @@ -455,7 +455,7 @@ public void sync_nothingChangedWithExternalListing_reportsNoExternalKeysInDiff() } private static DiffAwareness.Factory nothingChangedDiffAwarenessFactory() { - return (pathEntry, ignoredPaths) -> + return (pathEntry, ignoredPaths, optionsProvider) -> new DiffAwareness() { @Override public View getCurrentView(OptionsProvider options) { diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/config/FlagSetsFunctionTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/config/FlagSetsFunctionTest.java index e06248c508283a..fc6f5f22388350 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/config/FlagSetsFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/config/FlagSetsFunctionTest.java @@ -30,6 +30,7 @@ import com.google.devtools.build.lib.testutil.TestRuleClassProvider; import com.google.devtools.build.skyframe.EvaluationResult; import java.util.Optional; +import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -201,6 +202,183 @@ public void enforceCanonicalConfigsSupportedConfig() throws Exception { .isEqualTo("test_config_value"); } + @Test + public void enforceCanonicalConfigsExtraNativeFlag_commandLine() throws Exception { + scratch.file( + "test/build_settings.bzl", + """ +string_flag = rule(implementation = lambda ctx: [], build_setting = config.string(flag = True)) +"""); + scratch.file( + "test/BUILD", + """ + load("//test:build_settings.bzl", "string_flag") + string_flag( + name = "myflag", + build_setting_default = "default", + ) + """); + scratch.file( + "test/PROJECT.scl", + """ + configs = { + "test_config": ['--//test:myflag=test_config_value'], + "other_config": ['--//test:myflag=other_config_value'], + } + supported_configs = { + "test_config": "User documentation for what this config means", + } + """); + setBuildLanguageOptions("--experimental_enable_scl_dialect=true"); + BuildOptions buildOptions = createBuildOptions("--define=foo=bar"); + + FlagSetValue.Key key = + FlagSetValue.Key.create( + Label.parseCanonical("//test:PROJECT.scl"), + "test_config", + buildOptions, + /* enforceCanonical= */ true); + + var thrown = assertThrows(Exception.class, () -> executeFunction(key)); + assertThat(thrown).hasMessageThat().contains("Found [--define=foo=bar]"); + } + + @Test + @Ignore( + "It's not possible to test the Starlark flag case in this test. Somewhere the test setup" + + " doesn't recognize Starlark flags as being user options, and passes the build." + + " flagset_test.sh tests the Starlark flag case.") + public void enforceCanonicalConfigsExtraStarlarkFlag_commandLine() throws Exception { + scratch.file( + "test/build_settings.bzl", + """ +string_flag = rule(implementation = lambda ctx: [], build_setting = config.string(flag = True)) +"""); + scratch.file( + "test/BUILD", + """ + load("//test:build_settings.bzl", "string_flag") + string_flag( + name = "myflag", + build_setting_default = "default", + ) + string_flag( + name = "starlark_flags_always_affect_configuration", + build_setting_default = "default", + ) + """); + scratch.file( + "test/PROJECT.scl", + """ + configs = { + "test_config": ['--//test:myflag=test_config_value'], + "other_config": ['--//test:myflag=other_config_value'], + } + supported_configs = { + "test_config": "User documentation for what this config means", + } + """); + setBuildLanguageOptions("--experimental_enable_scl_dialect=true"); + BuildOptions buildOptions = + createBuildOptions("--//test:starlark_flags_always_affect_configuration=yes_they_do"); + + FlagSetValue.Key key = + FlagSetValue.Key.create( + Label.parseCanonical("//test:PROJECT.scl"), + "test_config", + buildOptions, + /* enforceCanonical= */ true); + + var thrown = assertThrows(Exception.class, () -> executeFunction(key)); + assertThat(thrown) + .hasMessageThat() + .contains("Found [--//test:starlark_flags_always_affect_configuration=yes_they_do]"); + } + + @Test + public void noEnforceCanonicalConfigsExtraFlag_commandLine() throws Exception { + scratch.file( + "test/build_settings.bzl", + """ +string_flag = rule(implementation = lambda ctx: [], build_setting = config.string(flag = True)) +"""); + scratch.file( + "test/BUILD", + """ + load("//test:build_settings.bzl", "string_flag") + string_flag( + name = "myflag", + build_setting_default = "default", + ) + """); + scratch.file( + "test/PROJECT.scl", + """ + configs = { + "test_config": ['--//test:myflag=test_config_value'], + "other_config": ['--//test:myflag=other_config_value'], + } + supported_configs = { + "test_config": "User documentation for what this config means", + } + """); + setBuildLanguageOptions("--experimental_enable_scl_dialect=true"); + BuildOptions buildOptions = createBuildOptions("--define=foo=bar"); + + FlagSetValue.Key key = + FlagSetValue.Key.create( + Label.parseCanonical("//test:PROJECT.scl"), + "test_config", + buildOptions, + /* enforceCanonical= */ false); + + var unused = executeFunction(key); + assertNoEvents(); + } + + @Test + public void enforceCanonicalConfigsExtraFlag_notConfigAffecting() throws Exception { + scratch.file( + "test/build_settings.bzl", + """ +string_flag = rule(implementation = lambda ctx: [], build_setting = config.string(flag = True)) +"""); + scratch.file( + "test/BUILD", + """ + load("//test:build_settings.bzl", "string_flag") + string_flag( + name = "myflag", + build_setting_default = "default", + ) + """); + scratch.file( + "test/PROJECT.scl", + """ + configs = { + "test_config": ['--//test:myflag=test_config_value'], + "other_config": ['--//test:myflag=other_config_value'], + } + supported_configs = { + "test_config": "User documentation for what this config means", + } + """); + setBuildLanguageOptions( + "--experimental_enable_scl_dialect=true", "--experimental_bzl_visibility"); + + BuildOptions buildOptions = createBuildOptions(); + + FlagSetValue.Key key = + FlagSetValue.Key.create( + Label.parseCanonical("//test:PROJECT.scl"), + "test_config", + buildOptions, + /* enforceCanonical= */ true); + + var unused = executeFunction(key); + assertNoEvents(); + } + @Test public void enforceCanonicalConfigsUnsupportedConfig() throws Exception { createStringFlag("//test:myflag", /* defaultValue= */ "default"); diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/rewinding/LostImportantOutputHandlerModule.java b/src/test/java/com/google/devtools/build/lib/skyframe/rewinding/LostImportantOutputHandlerModule.java index a458810049b44f..94f3fd1ad07c1c 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/rewinding/LostImportantOutputHandlerModule.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/rewinding/LostImportantOutputHandlerModule.java @@ -34,8 +34,10 @@ import com.google.devtools.build.lib.exec.ModuleActionContextRegistry; import com.google.devtools.build.lib.runtime.BlazeModule; import com.google.devtools.build.lib.runtime.CommandEnvironment; +import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; @@ -93,6 +95,11 @@ public ImmutableMap processRunfilesAndGetLostArtifacts( InputMetadataProvider metadataProvider) { return getLostOutputs(runfiles.values(), expander, metadataProvider); } + + @Override + public void processTestOutputs(List testOutputs) { + throw new UnsupportedOperationException(); + } }); } diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/serialization/BUILD b/src/test/java/com/google/devtools/build/lib/skyframe/serialization/BUILD index 07d152fdc384d6..2e121c0295b26f 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/serialization/BUILD +++ b/src/test/java/com/google/devtools/build/lib/skyframe/serialization/BUILD @@ -90,7 +90,9 @@ java_test( srcs = ["MemoizerTest.java"], deps = [ "//src/main/java/com/google/devtools/build/lib/skyframe/serialization", + "//src/main/java/com/google/devtools/build/lib/skyframe/serialization/testutils", "//src/main/java/com/google/devtools/build/lib/skyframe/serialization/testutils:round-tripping", + "//src/main/java/com/google/devtools/build/lib/util:hash_codes", "//third_party:guava", "//third_party:jsr305", "//third_party:junit4", diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/serialization/MemoizerTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/serialization/MemoizerTest.java index 74588478378990..62f093c7296c30 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/serialization/MemoizerTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/serialization/MemoizerTest.java @@ -15,9 +15,12 @@ import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.skyframe.serialization.strings.UnsafeStringCodec.stringCodec; +import static com.google.devtools.build.lib.util.HashCodes.hashObjects; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.skyframe.serialization.testutils.RoundTripping; +import com.google.devtools.build.lib.skyframe.serialization.testutils.SerializationTester; +import com.google.errorprone.annotations.Keep; import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import java.io.IOException; @@ -144,7 +147,7 @@ public void serializedLeaf_canBeBackreferenced() throws Exception { assertThat(((Wrapper) subject.get(0)).value).isNotSameInstanceAs(subject.get(1)); ImmutableList deserialized = - RoundTripping.roundTripMemoized(subject, new WrapperLeafCodec()); + RoundTripping.roundTripMemoized(subject, wrapperLeafCodec()); assertThat(subject).isEqualTo(deserialized); // The "foo" instance memoized via serializeLeaf can be backreferenced by a codec that isn't // explicitly invoked via serializeLeaf. @@ -161,13 +164,28 @@ public void serializeLeaf_canBackreferenceNonSerializeLeaf() throws Exception { assertThat(subject.get(0)).isNotSameInstanceAs(((Wrapper) subject.get(1)).value); ImmutableList deserialized = - RoundTripping.roundTripMemoized(subject, new WrapperLeafCodec()); + RoundTripping.roundTripMemoized(subject, wrapperLeafCodec()); assertThat(subject).isEqualTo(deserialized); // The "foo" instance memoized via serialize can be backreferenced by a codec that uses // serializeLeaf. assertThat(deserialized.get(0)).isSameInstanceAs(((Wrapper) deserialized.get(1)).value); } + @Test + public void serializeAsBothLeafAndContainingSharedValue() throws Exception { + // Serializes the same Wrapper instance in two ways. Once using WrapperWithSharedStringCodec and + // once using WrapperLeafCodec. This would cause them to use the same memoization which would + // lead to an error without special handling. + TwoWrappers wrappers = new TwoWrappers(); + wrappers.one = new Wrapper("value"); + wrappers.two = wrappers.one; + + new SerializationTester(wrappers) + .makeMemoizingAndAllowFutureBlocking(/* allowFutureBlocking= */ true) + .addCodec(new WrapperWithSharedStringCodec()) + .runTests(); + } + /** An example class that allows {@link LeafObjectCodec} to be exercised. */ private static class Wrapper { private final String value; @@ -190,7 +208,13 @@ public int hashCode() { } } + private static WrapperLeafCodec wrapperLeafCodec() { + return WrapperLeafCodec.INSTANCE; + } + private static final class WrapperLeafCodec extends LeafObjectCodec { + private static final WrapperLeafCodec INSTANCE = new WrapperLeafCodec(); + @Override public Class getEncodedClass() { return Wrapper.class; @@ -214,6 +238,128 @@ public Wrapper deserialize(LeafDeserializationContext context, CodedInputStream } } + private static class WrapperWithSharedStringCodec extends DeferredObjectCodec { + @Override + public Class getEncodedClass() { + return Wrapper.class; + } + + @Override + public boolean autoRegister() { + return false; + } + + @Override + public void serialize(SerializationContext context, Wrapper obj, CodedOutputStream codedOut) + throws SerializationException, IOException { + context.putSharedValue( + obj.value, /* distinguisher= */ null, DeferredStringCodec.INSTANCE, codedOut); + } + + @Override + public DeferredValue deserializeDeferred( + AsyncDeserializationContext context, CodedInputStream codedIn) + throws SerializationException, IOException { + WrapperBuilder builder = new WrapperBuilder(); + context.getSharedValue( + codedIn, + /* distinguisher= */ null, + DeferredStringCodec.INSTANCE, + builder, + WrapperBuilder::setValue); + return builder; + } + + private static class WrapperBuilder implements DeferredValue { + private String value; + + private static void setValue(WrapperBuilder builder, Object value) { + builder.value = (String) value; + } + + @Override + public Wrapper call() { + return new Wrapper(value); + } + } + } + + private static class DeferredStringCodec extends DeferredObjectCodec { + private static final DeferredStringCodec INSTANCE = new DeferredStringCodec(); + + @Override + public boolean autoRegister() { + return false; + } + + @Override + public Class getEncodedClass() { + return String.class; + } + + @Override + public void serialize(SerializationContext context, String obj, CodedOutputStream codedOut) + throws SerializationException, IOException { + codedOut.writeStringNoTag(obj); + } + + @Override + public DeferredValue deserializeDeferred( + AsyncDeserializationContext context, CodedInputStream codedIn) + throws SerializationException, IOException { + String value = codedIn.readString(); + return () -> value; + } + } + + private static class TwoWrappers { + private Wrapper one; + private Wrapper two; + + @Override + public boolean equals(Object obj) { + if (obj instanceof TwoWrappers that) { + return one.equals(that.one) && two.equals(that.two); + } + return false; + } + + @Override + public int hashCode() { + return hashObjects(one, two); + } + + private static void setOne(TwoWrappers parent, Object value) { + parent.one = (Wrapper) value; + } + } + + @Keep + private static class TwoWrappersCodec extends AsyncObjectCodec { + @Override + public Class getEncodedClass() { + return TwoWrappers.class; + } + + @Override + public void serialize(SerializationContext context, TwoWrappers obj, CodedOutputStream codedOut) + throws SerializationException, IOException { + context.serialize(obj.one, codedOut); + context.serializeLeaf(obj.two, wrapperLeafCodec(), codedOut); + } + + @Override + public TwoWrappers deserializeAsync( + AsyncDeserializationContext context, CodedInputStream codedIn) + throws SerializationException, IOException { + TwoWrappers wrappers = new TwoWrappers(); + context.registerInitialValue(wrappers); + context.deserialize(codedIn, wrappers, TwoWrappers::setOne); + wrappers.two = context.deserializeLeaf(codedIn, wrapperLeafCodec()); + return wrappers; + } + } + /** Asserts that {@code value} has the linked list structure {@code A -> B -> C}. */ private static void assertABC(DummyLinkedList value) { assertThat(value.getValue()).isEqualTo("A"); diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/serialization/SharedValueDeserializationContextTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/serialization/SharedValueDeserializationContextTest.java index 0381fd19c2ba67..cb4ecd431a89ac 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/serialization/SharedValueDeserializationContextTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/serialization/SharedValueDeserializationContextTest.java @@ -13,6 +13,7 @@ // limitations under the License. package com.google.devtools.build.lib.skyframe.serialization; +import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.skyframe.serialization.NotNestedSet.createRandomLeafArray; import static com.google.devtools.build.lib.skyframe.serialization.testutils.Dumper.dumpStructureWithEquivalenceReduction; @@ -26,6 +27,7 @@ import com.google.devtools.build.lib.skyframe.serialization.testutils.GetRecordingStore; import com.google.devtools.build.lib.skyframe.serialization.testutils.GetRecordingStore.GetRequest; import com.google.devtools.build.lib.skyframe.serialization.testutils.SerializationTester; +import com.google.errorprone.annotations.Keep; import com.google.protobuf.ByteString; import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; @@ -34,6 +36,7 @@ import com.google.testing.junit.testparameterinjector.TestParameters; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import java.util.Random; import java.util.concurrent.ForkJoinPool; import org.junit.Test; @@ -257,6 +260,101 @@ public void valueDependsOnFuture( .runTests(); } + @Test + public void internedValueWithSharedElement() throws Exception { + new SerializationTester(InternedValue.create(101), InternedValue.create(45678)) + .makeMemoizingAndAllowFutureBlocking(/* allowFutureBlocking= */ true) + .runTests(); + } + + private static class InternedValue { + private Integer value; + + private static InternedValue create(int value) { + InternedValue result = new InternedValue(); + result.value = value; + return result; + } + + @Override + public int hashCode() { + return value; + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof InternedValue that) { + return Objects.equals(value, that.value); + } + return false; + } + } + + @Keep + private static class InternedValueCodec extends InterningObjectCodec { + @Override + public Class getEncodedClass() { + return InternedValue.class; + } + + @Override + public void serialize( + SerializationContext context, InternedValue obj, CodedOutputStream codedOut) + throws SerializationException, IOException { + context.putSharedValue( + obj.value, /* distinguisher= */ null, DeferredIntegerCodec.INSTANCE, codedOut); + } + + @Override + public InternedValue deserializeInterned( + AsyncDeserializationContext context, CodedInputStream codedIn) + throws SerializationException, IOException { + InternedValue value = new InternedValue(); + context.getSharedValue( + codedIn, + /* distinguisher= */ null, + DeferredIntegerCodec.INSTANCE, + value, + (parent, v) -> parent.value = (Integer) v); + return value; + } + + @Override + @SuppressWarnings("CanIgnoreReturnValueSuggester") // fake implementation just returns input + public InternedValue intern(InternedValue interned) { + checkNotNull(interned.value); + return interned; + } + } + + private static class DeferredIntegerCodec extends DeferredObjectCodec { + private static final DeferredIntegerCodec INSTANCE = new DeferredIntegerCodec(); + + @Override + public Class getEncodedClass() { + return Integer.class; + } + + @Override + public boolean autoRegister() { + return false; + } + + @Override + public void serialize(SerializationContext context, Integer obj, CodedOutputStream codedOut) + throws SerializationException, IOException { + codedOut.writeInt32NoTag(obj); + } + + @Override + public DeferredValue deserializeDeferred( + AsyncDeserializationContext context, CodedInputStream codedIn) + throws SerializationException, IOException { + int value = codedIn.readInt32(); + return () -> value; + } + } + private ListenableFuture deserializeWithExecutor( ObjectCodecs codecs, FingerprintValueService fingerprintValueService, ByteString data) { var task = diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/ToolchainsForTargetsTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/ToolchainsForTargetsTest.java index 734d7b5749dc79..1078f88a03ae34 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/ToolchainsForTargetsTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/ToolchainsForTargetsTest.java @@ -403,6 +403,41 @@ def _impl(ctx): assertThat(update("//test:custom_rule_name").hasError()).isFalse(); } + @Test + public void basicToolchainsWithAliasNoAutoExecGroups_test() throws Exception { + scratch.appendFile( + "toolchain/exec_group_rule.bzl", + """ + def _impl(ctx): + if "//toolchain:test_toolchain" in ctx.toolchains: + fail("this is not expected, it's an exec gp toolchain") + if ctx.exec_groups["temp"].toolchains["//toolchain:test_toolchain"] == None: + fail("this is not expected, it's an exec gp toolchain") + return [] + + my_exec_group_rule = rule( + implementation = _impl, + exec_groups = { + "temp": exec_group( + toolchains = ["//toolchain:test_toolchain"], + ), + }, + ) + """); + + scratch.file( + "a/BUILD", + """ + load("//toolchain:exec_group_rule.bzl", "my_exec_group_rule") + + my_exec_group_rule(name = "a") + """); + + useConfiguration("--incompatible_auto_exec_groups"); + + assertThat(update("//a:a").hasError()).isFalse(); + } + @Test public void execPlatform() throws Exception { // Add some platforms and custom constraints. diff --git a/src/test/java/com/google/devtools/build/lib/starlark/StarlarkAspectsToolchainPropagationTest.java b/src/test/java/com/google/devtools/build/lib/starlark/StarlarkAspectsToolchainPropagationTest.java new file mode 100644 index 00000000000000..ac2b2750e244f6 --- /dev/null +++ b/src/test/java/com/google/devtools/build/lib/starlark/StarlarkAspectsToolchainPropagationTest.java @@ -0,0 +1,1045 @@ +// Copyright 2024 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.starlark; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.truth.Truth.assertThat; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.common.collect.Sets; +import com.google.devtools.build.lib.analysis.util.AnalysisTestCase; +import com.google.devtools.build.lib.skyframe.AspectKeyCreator.AspectKey; +import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; +import com.google.devtools.build.skyframe.SkyKey; +import com.google.testing.junit.testparameterinjector.TestParameterInjector; +import com.google.testing.junit.testparameterinjector.TestParameters; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; + +/** Tests for Starlark aspects propagation to targets toolchain dependencies. */ +@RunWith(TestParameterInjector.class) +public final class StarlarkAspectsToolchainPropagationTest extends AnalysisTestCase { + + /** + * Sets up 3 toolchain rules: + * + *

test_toolchain: has no attribute dependency and no advertised providers + * + *

test_toolchain_with_provider: has an advertised provider but no attribute dependency + * + *

test_toolchain_with_dep: has an attribute dependency but no advertised providers + * + *

We also set up 3 toolchain types: + * + *

toolchain_type_1: resolved by `foo` of rule `test_toolchain` + * + *

toolchain_type_2: resolved by `foo_with_provider` of rule `test_toolchain_with_provider` + * + *

toolchain_type_3: resolved by `foo_with_dep` of rule `test_toolchain_with_dep` + * + *

Toolchain `foo_for_all` resolved both toolchain_type_2 and toolchain_type_3 + */ + public void createToolchainsAndPlatforms() throws Exception { + scratch.overwriteFile( + "rule/test_toolchain.bzl", + """ + MyProvider = provider() + + def _impl(ctx): + return [platform_common.ToolchainInfo( + tool = ctx.executable._tool, + files_to_run = ctx.attr._tool[DefaultInfo].files_to_run, + ), MyProvider(value = str(ctx.label))] + + test_toolchain = rule( + implementation = _impl, + attrs = { + "_tool": attr.label( + default = "//toolchain:a_tool", + executable = True, + cfg = "exec", + ), + }, + ) + + test_toolchain_with_provider = rule( + implementation = _impl, + attrs = { + "_tool": attr.label( + default = "//toolchain:a_tool", + executable = True, + cfg = "exec", + ), + }, + provides = [MyProvider] + ) + + test_toolchain_with_dep = rule( + implementation = _impl, + attrs = { + "_tool": attr.label( + default = "//toolchain:a_tool", + executable = True, + cfg = "exec", + ), + "dep": attr.label(), + }, + ) + + """); + scratch.overwriteFile( + "rule/BUILD", + """ + exports_files(["test_toolchain/bzl"]) + + toolchain_type(name = "toolchain_type_1") + alias(name = "toolchain_type_1_alias", actual = ":toolchain_type_1") + + toolchain_type(name = "toolchain_type_2") + + toolchain_type(name = "toolchain_type_3") + """); + scratch.overwriteFile( + "toolchain/BUILD", + """ + load("//rule:test_toolchain.bzl", "test_toolchain", + "test_toolchain_with_provider", "test_toolchain_with_dep") + + genrule( + name = "a_tool", + outs = ["atool"], + cmd = "", + executable = True, + ) + + test_toolchain( + name = "foo", + ) + + toolchain( + name = "foo_toolchain", + toolchain = ":foo", + toolchain_type = "//rule:toolchain_type_1", + ) + + test_toolchain_with_provider( + name = "foo_with_provider", + ) + + toolchain( + name = "foo_toolchain_with_provider", + toolchain = ":foo_with_provider", + toolchain_type = "//rule:toolchain_type_2", + ) + + sh_library(name = "toolchain_dep") + + test_toolchain_with_dep( + name = "foo_with_dep", + dep = ":toolchain_dep", + ) + + toolchain( + name = "foo_toolchain_with_dep", + toolchain = ":foo_with_dep", + toolchain_type = "//rule:toolchain_type_3", + ) + + test_toolchain(name = "foo_for_all") + + toolchain( + name = "foo_type_2", + toolchain = ":foo_for_all", + toolchain_type = "//rule:toolchain_type_2", + ) + + toolchain( + name = "foo_type_3", + toolchain = ":foo_for_all", + toolchain_type = "//rule:toolchain_type_3", + ) + """); + + scratch.overwriteFile( + "platforms/BUILD", + """ + constraint_setting(name = "setting") + + constraint_value( + name = "constraint_1", + constraint_setting = ":setting", + ) + + constraint_value( + name = "constraint_2", + constraint_setting = ":setting", + ) + + platform( + name = "platform_1", + constraint_values = [":constraint_1"], + ) + + platform( + name = "platform_2", + constraint_values = [":constraint_2"], + exec_properties = { + "watermelon.ripeness": "unripe", + "watermelon.color": "red", + }, + ) + """); + } + + @Before + public void setup() throws Exception { + createToolchainsAndPlatforms(); + } + + @Test + @TestParameters({ + "{autoExecGroups: True}", + "{autoExecGroups: False}", + }) + public void aspectPropagatesToToolchain_singleDepAdded(String autoExecGroups) throws Exception { + scratch.file( + "test/defs.bzl", + """ + def _impl(target, ctx): + return [] + + toolchain_aspect = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1'], + ) + + no_toolchain_aspect = aspect( + implementation = _impl, + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + toolchains = ['//rule:toolchain_type_1'], + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration( + "--extra_toolchains=//toolchain:foo_toolchain", + "--incompatible_auto_exec_groups=" + autoExecGroups); + + var unused = + update( + ImmutableList.of( + "//test:defs.bzl%toolchain_aspect", "//test:defs.bzl%no_toolchain_aspect"), + "//test:t1"); + + var toolchainAspect = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect")); + var toolchainAspectNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(toolchainAspect)) + .findFirst() + .orElse(null); + assertThat(toolchainAspectNode).isNotNull(); + + var noToolchainAspect = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%no_toolchain_aspect")); + var noToolchainAspectNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(noToolchainAspect)) + .findFirst() + .orElse(null); + assertThat(noToolchainAspectNode).isNotNull(); + + var toolchainAspectDirectDeps = + ImmutableSet.copyOf(Iterables.filter(toolchainAspectNode.getDirectDeps(), SkyKey.class)); + var noToolchainAspectDirectDeps = + ImmutableSet.copyOf(Iterables.filter(noToolchainAspectNode.getDirectDeps(), SkyKey.class)); + + // only one extra dependency is added for the toolchain propagating aspect + assertThat(toolchainAspectDirectDeps.size() - noToolchainAspectDirectDeps.size()).isEqualTo(1); + assertThat(toolchainAspectDirectDeps).containsAtLeastElementsIn(noToolchainAspectDirectDeps); + + // the extra dependency is the aspect application on the target's resolved toolchain + var aspectOnToolchainDep = + Iterables.getOnlyElement( + Sets.difference(toolchainAspectDirectDeps, noToolchainAspectDirectDeps)); + assertThat(aspectOnToolchainDep).isInstanceOf(AspectKey.class); + assertThat(((AspectKey) aspectOnToolchainDep).getAspectName()) + .isEqualTo("//test:defs.bzl%toolchain_aspect"); + assertThat(((AspectKey) aspectOnToolchainDep).getLabel().toString()) + .isEqualTo("//toolchain:foo"); + } + + @Test + public void aspectPropagatesToExecGpToolchain_singleDepAdded() throws Exception { + scratch.file( + "test/defs.bzl", + """ + def _impl(target, ctx): + return [] + + toolchain_aspect = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1'], + ) + + no_toolchain_aspect = aspect( + implementation = _impl, + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + exec_groups = {"gp": exec_group(toolchains = ['//rule:toolchain_type_1'])}, + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration("--extra_toolchains=//toolchain:foo_toolchain"); + + var unused = + update( + ImmutableList.of( + "//test:defs.bzl%toolchain_aspect", "//test:defs.bzl%no_toolchain_aspect"), + "//test:t1"); + + var toolchainAspect = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect")); + var toolchainAspectNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(toolchainAspect)) + .findFirst() + .orElse(null); + assertThat(toolchainAspectNode).isNotNull(); + + var noToolchainAspect = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%no_toolchain_aspect")); + var noToolchainAspectNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(noToolchainAspect)) + .findFirst() + .orElse(null); + assertThat(noToolchainAspectNode).isNotNull(); + + var toolchainAspectDirectDeps = + ImmutableSet.copyOf(Iterables.filter(toolchainAspectNode.getDirectDeps(), SkyKey.class)); + var noToolchainAspectDirectDeps = + ImmutableSet.copyOf(Iterables.filter(noToolchainAspectNode.getDirectDeps(), SkyKey.class)); + + // only one extra dependency is added for the toolchain propagating aspect + assertThat(toolchainAspectDirectDeps.size() - noToolchainAspectDirectDeps.size()).isEqualTo(1); + assertThat(toolchainAspectDirectDeps).containsAtLeastElementsIn(noToolchainAspectDirectDeps); + + // the extra dependency is the aspect application on the target's resolved toolchain + var aspectOnToolchainDep = + Iterables.getOnlyElement( + Sets.difference(toolchainAspectDirectDeps, noToolchainAspectDirectDeps)); + assertThat(aspectOnToolchainDep).isInstanceOf(AspectKey.class); + assertThat(((AspectKey) aspectOnToolchainDep).getAspectName()) + .isEqualTo("//test:defs.bzl%toolchain_aspect"); + assertThat(((AspectKey) aspectOnToolchainDep).getLabel().toString()) + .isEqualTo("//toolchain:foo"); + } + + @Test + @TestParameters({ + "{autoExecGroups: True}", + "{autoExecGroups: False}", + }) + public void aspectHasToolchains_dependencyEdgeCreated(String autoExecGroups) throws Exception { + scratch.file( + "test/defs.bzl", + """ + def _impl(target, ctx): + return [] + + toolchain_aspect = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1'], + toolchains = ['//rule:toolchain_type_2'], + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + toolchains = ['//rule:toolchain_type_1'], + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration( + "--extra_toolchains=//toolchain:foo_toolchain,//toolchain:foo_toolchain_with_provider", + "--incompatible_auto_exec_groups=" + autoExecGroups); + + var unused = update(ImmutableList.of("//test:defs.bzl%toolchain_aspect"), "//test:t1"); + + var toolchainAspect = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect")); + var toolchainAspectNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(toolchainAspect)) + .findFirst() + .orElse(null); + assertThat(toolchainAspectNode).isNotNull(); + + // A dependency edge is created from the aspect to its own toolchain but not to the target's + // toolchain. + var aspectConfiguredTargetDeps = + Iterables.transform( + Iterables.filter( + toolchainAspectNode.getDirectDeps(), d -> d instanceof ConfiguredTargetKey), + d -> ((ConfiguredTargetKey) d).getLabel().toString()); + assertThat(aspectConfiguredTargetDeps) + .containsExactly("//toolchain:foo_with_provider", "//test:t1"); + } + + @Test + @TestParameters({ + "{autoExecGroups: True}", + "{autoExecGroups: False}", + }) + public void aspectPropagatesToToolchainUsingToolchainTypeAlias(String autoExecGroups) + throws Exception { + scratch.file( + "test/defs.bzl", + """ + def _impl(target, ctx): + return [] + + toolchain_aspect = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1'], + ) + + no_toolchain_aspect = aspect( + implementation = _impl, + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + toolchains = ['//rule:toolchain_type_1_alias'], + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration( + "--extra_toolchains=//toolchain:foo_toolchain", + "--incompatible_auto_exec_groups=" + autoExecGroups); + + var unused = + update( + ImmutableList.of( + "//test:defs.bzl%toolchain_aspect", "//test:defs.bzl%no_toolchain_aspect"), + "//test:t1"); + + var toolchainAspect = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect")); + var toolchainAspectNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(toolchainAspect)) + .findFirst() + .orElse(null); + assertThat(toolchainAspectNode).isNotNull(); + + var noToolchainAspect = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%no_toolchain_aspect")); + var noToolchainAspectNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(noToolchainAspect)) + .findFirst() + .orElse(null); + assertThat(noToolchainAspectNode).isNotNull(); + + var toolchainAspectDirectDeps = + ImmutableSet.copyOf(Iterables.filter(toolchainAspectNode.getDirectDeps(), SkyKey.class)); + var noToolchainAspectDirectDeps = + ImmutableSet.copyOf(Iterables.filter(noToolchainAspectNode.getDirectDeps(), SkyKey.class)); + + // only one extra dependency is added for the toolchain propagating aspect + assertThat(toolchainAspectDirectDeps.size() - noToolchainAspectDirectDeps.size()).isEqualTo(1); + assertThat(toolchainAspectDirectDeps).containsAtLeastElementsIn(noToolchainAspectDirectDeps); + + // the extra dependency is the aspect application on the target's resolved toolchain + var aspectOnToolchainDep = + Iterables.getOnlyElement( + Sets.difference(toolchainAspectDirectDeps, noToolchainAspectDirectDeps)); + assertThat(aspectOnToolchainDep).isInstanceOf(AspectKey.class); + assertThat(((AspectKey) aspectOnToolchainDep).getAspectName()) + .isEqualTo("//test:defs.bzl%toolchain_aspect"); + assertThat(((AspectKey) aspectOnToolchainDep).getLabel().toString()) + .isEqualTo("//toolchain:foo"); + } + + @Test + @TestParameters({ + "{autoExecGroups: True}", + "{autoExecGroups: False}", + }) + public void toolchainPropagationBasedOnAspectRequiredProviders(String autoExecGroups) + throws Exception { + scratch.file( + "test/defs.bzl", + """ + load("//rule:test_toolchain.bzl", "MyProvider") + + def _impl(target, ctx): + return [] + + toolchain_aspect = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1', '//rule:toolchain_type_2'], + required_providers = [MyProvider], + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + toolchains = ['//rule:toolchain_type_1'], + exec_groups = {"gp": exec_group(toolchains = ['//rule:toolchain_type_2'])}, + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration( + "--extra_toolchains=//toolchain:foo_toolchain,//toolchain:foo_toolchain_with_provider", + "--incompatible_auto_exec_groups=" + autoExecGroups); + + var unused = update(ImmutableList.of("//test:defs.bzl%toolchain_aspect"), "//test:t1"); + + var aspectOnTarget = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect")); + var aspectOnTargetNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(aspectOnTarget)) + .findFirst() + .orElse(null); + assertThat(aspectOnTargetNode).isNotNull(); + + // aspect propagated only to //toolchain:foo_with_provider + var aspectOnToolchain = + Iterables.getOnlyElement( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class)); + assertThat(aspectOnToolchain.getLabel().toString()).isEqualTo("//toolchain:foo_with_provider"); + assertThat(aspectOnToolchain.getAspectName()).isEqualTo("//test:defs.bzl%toolchain_aspect"); + } + + @Test + public void aspectPropagatesToToolchainDeps() throws Exception { + scratch.file( + "test/defs.bzl", + """ + def _impl(target, ctx): + return [] + + toolchain_aspect = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_3'], + attr_aspects = ['dep'], + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + exec_groups = {"gp": exec_group(toolchains = ['//rule:toolchain_type_3'])}, + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration("--extra_toolchains=//toolchain:foo_toolchain_with_dep"); + + var unused = update(ImmutableList.of("//test:defs.bzl%toolchain_aspect"), "//test:t1"); + + var aspectOnTarget = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect")); + var aspectOnTargetNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(aspectOnTarget)) + .findFirst() + .orElse(null); + assertThat(aspectOnTargetNode).isNotNull(); + + var aspectOnToolchain = + Iterables.getOnlyElement( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class)); + var aspectOnToolchainNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(aspectOnToolchain)) + .findFirst() + .orElse(null); + assertThat(aspectOnToolchainNode).isNotNull(); + assertThat(aspectOnToolchain.getLabel().toString()).isEqualTo("//toolchain:foo_with_dep"); + + var aspectOnToolchainDep = + Iterables.getOnlyElement( + Iterables.filter(aspectOnToolchainNode.getDirectDeps(), AspectKey.class)); + assertThat(aspectOnToolchainDep.getLabel().toString()).isEqualTo("//toolchain:toolchain_dep"); + assertThat(aspectOnToolchainDep.getAspectName()).isEqualTo("//test:defs.bzl%toolchain_aspect"); + } + + @Test + public void requiredAspectPropagatesToToolchain() throws Exception { + scratch.file( + "test/defs.bzl", + """ + def _impl(target, ctx): + return [] + + required_aspect = aspect(implementation = _impl) + + toolchain_aspect = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1'], + requires = [required_aspect], + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + exec_groups = {"gp": exec_group(toolchains = ['//rule:toolchain_type_1'])}, + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration("--extra_toolchains=//toolchain:foo_toolchain"); + + var unused = update(ImmutableList.of("//test:defs.bzl%toolchain_aspect"), "//test:t1"); + + var aspectOnTarget = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect")); + var aspectOnTargetNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(aspectOnTarget)) + .findFirst() + .orElse(null); + assertThat(aspectOnTargetNode).isNotNull(); + + var aspectsDeps = + Iterables.transform( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class), + k -> k.getAspectName() + " on " + k.getLabel().toString()); + assertThat(aspectsDeps).hasSize(3); + // toolchain_aspect requires required_aspect so required_aspect will be propagated before + // toolchain_aspect to //test:t1 and its toolchain + assertThat(aspectsDeps) + .containsExactly( + "//test:defs.bzl%required_aspect on //test:t1", + "//test:defs.bzl%toolchain_aspect on //toolchain:foo", + "//test:defs.bzl%required_aspect on //toolchain:foo"); + } + + @Test + @TestParameters({ + "{autoExecGroups: True}", + "{autoExecGroups: False}", + }) + public void aspectOnAspectPropagateToToolchain(String autoExecGroups) throws Exception { + scratch.file( + "test/defs.bzl", + """ + Prov1 = provider() + Prov2 = provider() + + def _impl(target, ctx): + return [] + + def _impl_1(target, ctx): + return [Prov1()] + + def _impl_2(target, ctx): + return [Prov2()] + + toolchain_aspect_1 = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1'], + required_aspect_providers = [Prov1] + ) + + no_toolchain_aspect = aspect( + implementation = _impl_1, + provides = [Prov1], + required_aspect_providers = [Prov2] + ) + + toolchain_aspect_2 = aspect( + implementation = _impl_2, + toolchains_aspects = ['//rule:toolchain_type_1'], + provides = [Prov2], + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + toolchains = ['//rule:toolchain_type_1'], + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration( + "--extra_toolchains=//toolchain:foo_toolchain", + "--incompatible_auto_exec_groups=" + autoExecGroups); + + var unused = + update( + ImmutableList.of( + "//test:defs.bzl%toolchain_aspect_2", + "//test:defs.bzl%no_toolchain_aspect", "//test:defs.bzl%toolchain_aspect_1"), + "//test:t1"); + + var aspectOnTarget = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect_1")); + assertThat(aspectOnTarget.getBaseKeys()).hasSize(1); + assertThat(aspectOnTarget.getBaseKeys().get(0).getAspectName()) + .isEqualTo("//test:defs.bzl%no_toolchain_aspect"); + + var aspectOnTargetNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(aspectOnTarget)) + .findFirst() + .orElse(null); + assertThat(aspectOnTargetNode).isNotNull(); + + var aspectsOnToolchain = + Iterables.transform( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class), + k -> k.getAspectName() + " on " + k.getLabel().toString()); + assertThat(aspectsOnToolchain).hasSize(4); + // Only `toolchain_aspect_1` and `toolchain_aspect_2` are propagated to the toolchain + assertThat(aspectsOnToolchain) + .containsExactly( + "//test:defs.bzl%toolchain_aspect_2 on //test:t1", + "//test:defs.bzl%no_toolchain_aspect on //test:t1", + "//test:defs.bzl%toolchain_aspect_1 on //toolchain:foo", + "//test:defs.bzl%toolchain_aspect_2 on //toolchain:foo"); + + var toolchainAspect1 = + Iterables.getOnlyElement( + Iterables.filter( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class), + k -> + k.getAspectName().equals("//test:defs.bzl%toolchain_aspect_1") + && k.getLabel().toString().equals("//toolchain:foo"))); + // Since `toolchain_aspect_1` only depends on `no_toolchain_aspect`, it will have no base keys + // when applied on the toolchain. + assertThat(toolchainAspect1.getBaseKeys()).isEmpty(); + } + + @Test + public void execGroupWithMultipleToolchainTypes_aspectsPropagateToRelevantTypes() + throws Exception { + scratch.file( + "test/defs.bzl", + """ + Prov1 = provider() + Prov2 = provider() + + def _impl(target, ctx): + return [] + + def _impl_1(target, ctx): + return [Prov1()] + + def _impl_2(target, ctx): + return [Prov2()] + + toolchain_aspect_0 = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1'], + required_aspect_providers = [[Prov1], [Prov2]] + ) + + toolchain_aspect_1 = aspect( + implementation = _impl_1, + toolchains_aspects = ['//rule:toolchain_type_3'], + provides = [Prov1], + required_aspect_providers = [Prov2] + ) + + toolchain_aspect_2 = aspect( + implementation = _impl_2, + toolchains_aspects = ['//rule:toolchain_type_1'], + provides = [Prov2], + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + attrs = { + 'dep': attr.label(), + }, + exec_groups = {"gp": exec_group( + toolchains = ['//rule:toolchain_type_1', '//rule:toolchain_type_3'])}, + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration( + "--extra_toolchains=//toolchain:foo_toolchain,//toolchain:foo_toolchain_with_dep"); + + var unused = + update( + ImmutableList.of( + "//test:defs.bzl%toolchain_aspect_2", + "//test:defs.bzl%toolchain_aspect_1", "//test:defs.bzl%toolchain_aspect_0"), + "//test:t1"); + + var aspectOnTarget = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect_0")); + var aspectOnTargetNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(aspectOnTarget)) + .findFirst() + .orElse(null); + assertThat(aspectOnTargetNode).isNotNull(); + + var aspectsOnToolchain = + Iterables.transform( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class), + k -> k.getAspectName() + " on " + k.getLabel().toString()); + assertThat(aspectsOnToolchain).hasSize(5); + assertThat(aspectsOnToolchain) + .containsExactly( + "//test:defs.bzl%toolchain_aspect_1 on //test:t1", + "//test:defs.bzl%toolchain_aspect_2 on //test:t1", + // toolchain_aspect_0 and toolchain_aspect_2 propagate to //toolchain:foo of + // //rule:toolchain_type_1 + "//test:defs.bzl%toolchain_aspect_0 on //toolchain:foo", + "//test:defs.bzl%toolchain_aspect_2 on //toolchain:foo", + // toolchain_aspect_1 propagates to //toolchain:foo_with_dep of //rule:toolchain_type_3 + "//test:defs.bzl%toolchain_aspect_1 on //toolchain:foo_with_dep"); + + var toolchainAspect1 = + Iterables.getOnlyElement( + Iterables.filter( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class), + k -> + k.getAspectName().equals("//test:defs.bzl%toolchain_aspect_0") + && k.getLabel().toString().equals("//toolchain:foo"))); + // Since `toolchain_aspect_0` depends on `toolchain_aspect_2` when applied on //toolchain:foo, + assertThat(Iterables.getOnlyElement(toolchainAspect1.getBaseKeys()).getAspectName()) + .isEqualTo("//test:defs.bzl%toolchain_aspect_2"); + } + + @Test + public void toolchainTypesResolvedToSameToolchain_aspectsPropagateToSameToolchain() + throws Exception { + scratch.file( + "test/defs.bzl", + """ + prov = provider() + + def _impl(target, ctx): + return [] + + def _impl_1(target, ctx): + return [prov()] + + toolchain_aspect_1 = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_2'], + required_aspect_providers = [prov] + ) + + toolchain_aspect_2 = aspect( + implementation = _impl_1, + toolchains_aspects = ['//rule:toolchain_type_3'], + provides = [prov], + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + exec_groups = {"gp": exec_group( + toolchains = ['//rule:toolchain_type_2', '//rule:toolchain_type_3'])}, + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration( + "--extra_toolchains=//toolchain:foo_type_2", "--extra_toolchains=//toolchain:foo_type_3"); + + var unused = + update( + ImmutableList.of( + "//test:defs.bzl%toolchain_aspect_2", "//test:defs.bzl%toolchain_aspect_1"), + "//test:t1"); + + var aspectOnTarget = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect_1")); + var aspectOnTargetNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(aspectOnTarget)) + .findFirst() + .orElse(null); + assertThat(aspectOnTargetNode).isNotNull(); + + var aspectsOnToolchain = + Iterables.transform( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class), + k -> k.getAspectName() + " on " + k.getLabel().toString()); + assertThat(aspectsOnToolchain).hasSize(3); + + assertThat(aspectsOnToolchain) + .containsExactly( + "//test:defs.bzl%toolchain_aspect_2 on //test:t1", + // both aspects propagated to //toolchain:foo_for_all because it resolves both the + // toolchain types + "//test:defs.bzl%toolchain_aspect_1 on //toolchain:foo_for_all", + "//test:defs.bzl%toolchain_aspect_2 on //toolchain:foo_for_all"); + } + + @Test + public void toolchainTypesResolvedToSameToolchainDiffExecPlatform_aspectPropagateTwice() + throws Exception { + scratch.file( + "test/defs.bzl", + """ + def _impl(target, ctx): + return [] + + toolchain_aspect = aspect( + implementation = _impl, + toolchains_aspects = ['//rule:toolchain_type_1'], + ) + + def _rule_impl(ctx): + pass + + r1 = rule( + implementation = _rule_impl, + exec_groups = { + "gp1": exec_group( + toolchains = ['//rule:toolchain_type_1'], + exec_compatible_with = ['//platforms:constraint_2'] + ), + "gp2": exec_group(toolchains = ['//rule:toolchain_type_1'])}, + ) + """); + scratch.file( + "test/BUILD", + """ + load('//test:defs.bzl', 'r1') + r1(name = 't1') + """); + useConfiguration( + "--extra_toolchains=//toolchain:foo_toolchain", + "--extra_execution_platforms=//platforms:platform_1,//platforms:platform_2"); + + var unused = update(ImmutableList.of("//test:defs.bzl%toolchain_aspect"), "//test:t1"); + + var aspectOnTarget = + Iterables.getOnlyElement(getAspectKeys("//test:t1", "//test:defs.bzl%toolchain_aspect")); + var aspectOnTargetNode = + skyframeExecutor.getEvaluator().getInMemoryGraph().getAllNodeEntries().stream() + .filter(n -> n.getKey().equals(aspectOnTarget)) + .findFirst() + .orElse(null); + assertThat(aspectOnTargetNode).isNotNull(); + + var aspectsOnToolchain = + Iterables.transform( + Iterables.filter(aspectOnTargetNode.getDirectDeps(), AspectKey.class), + k -> + k.getAspectName() + + " on " + + k.getLabel().toString() + + ", exec_platform: " + + k.getBaseConfiguredTargetKey().getExecutionPlatformLabel().toString()); + assertThat(aspectsOnToolchain).hasSize(2); + // aspect propagated twice on the same toolchain target but with different execution platform + assertThat(aspectsOnToolchain) + .containsExactly( + "//test:defs.bzl%toolchain_aspect on //toolchain:foo, exec_platform:" + + " //platforms:platform_2", + "//test:defs.bzl%toolchain_aspect on //toolchain:foo, exec_platform:" + + " //platforms:platform_1"); + } + + private ImmutableList getAspectKeys(String targetLabel, String aspectLabel) { + return skyframeExecutor.getEvaluator().getDoneValues().entrySet().stream() + .filter( + entry -> + entry.getKey() instanceof AspectKey + && ((AspectKey) entry.getKey()).getAspectClass().getName().equals(aspectLabel) + && ((AspectKey) entry.getKey()).getLabel().toString().equals(targetLabel)) + .map(e -> (AspectKey) e.getKey()) + .collect(toImmutableList()); + } +} diff --git a/src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleClassFunctionsTest.java b/src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleClassFunctionsTest.java index 14ab80434cb4f5..5abdd71acb01df 100644 --- a/src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleClassFunctionsTest.java +++ b/src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleClassFunctionsTest.java @@ -4376,8 +4376,8 @@ def impl(ctx): getConfiguredTarget("//initializer_testing:my_target"); ev.assertContainsError( - "existing_rules() can only be used while evaluating a BUILD file, a WORKSPACE file, or a" - + " macro loaded from there"); + "existing_rules() can only be used while evaluating a BUILD file (or macro) or a WORKSPACE" + + " file"); } @Test diff --git a/src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleImplementationFunctionsTest.java b/src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleImplementationFunctionsTest.java index 3dc89a0e00a683..72dc73d6631e0c 100644 --- a/src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleImplementationFunctionsTest.java +++ b/src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleImplementationFunctionsTest.java @@ -2098,9 +2098,7 @@ def _foo(): """); reporter.removeHandler(failFastHandler); getConfiguredTarget("//test:my_glob"); - assertContainsEvent( - "glob() can only be used while evaluating a BUILD file, a WORKSPACE file, or a macro loaded" - + " from there"); + assertContainsEvent("glob() can only be used while evaluating a BUILD file (or macro)"); } @Test diff --git a/src/test/java/com/google/devtools/build/lib/view/cpp/CppTemplateTest.java b/src/test/java/com/google/devtools/build/lib/view/cpp/CppTemplateTest.java index 5a73a4aa67aa01..6dd5cb61a2c4b0 100644 --- a/src/test/java/com/google/devtools/build/lib/view/cpp/CppTemplateTest.java +++ b/src/test/java/com/google/devtools/build/lib/view/cpp/CppTemplateTest.java @@ -154,7 +154,7 @@ def _impl(ctx): """); write("cc/BUILD", "cc_library(name = 'cc', srcs = ['//tree:lib'])"); buildTarget("//cc:cc"); - events.assertContainsEvent(EventKind.WARNING, "This is a warning"); + assertContainsEvent(EventKind.WARNING, "This is a warning"); getSkyframeExecutor() .getEvaluator() .getDoneValues() diff --git a/src/test/java/com/google/devtools/common/options/OptionsParserTest.java b/src/test/java/com/google/devtools/common/options/OptionsParserTest.java index f4ff9f4927aa6c..c8632cf426236a 100644 --- a/src/test/java/com/google/devtools/common/options/OptionsParserTest.java +++ b/src/test/java/com/google/devtools/common/options/OptionsParserTest.java @@ -43,19 +43,17 @@ public final class OptionsParserTest { /** Dummy comment (linter suppression) */ public static class BadOptions extends OptionsBase { @Option( - name = "foo", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "foo", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean foo1; @Option( - name = "foo", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "foo", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean foo2; } @@ -112,21 +110,19 @@ public static class ChoosyOptions extends OptionsBase { public static class ExampleFoo extends OptionsBase { @Option( - name = "foo", - category = "one", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "defaultFoo" - ) + name = "foo", + category = "one", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "defaultFoo") public String foo; @Option( - name = "bar", - category = "two", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "42" - ) + name = "bar", + category = "two", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "42") public int bar; @Option( @@ -159,12 +155,11 @@ public static class ExampleFoo extends OptionsBase { public static class ExampleBaz extends OptionsBase { @Option( - name = "baz", - category = "one", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "defaultBaz" - ) + name = "baz", + category = "one", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "defaultBaz") public String baz; } @@ -172,40 +167,33 @@ public static class ExampleBaz extends OptionsBase { public static class ExampleBazSubclass extends ExampleBaz { @Option( - name = "baz_subclass", - category = "one", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "defaultBazSubclass" - ) + name = "baz_subclass", + category = "one", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "defaultBazSubclass") public String bazSubclass; } - /** - * Example with empty to null string converter - */ + /** Example with empty to null string converter */ public static class ExampleBoom extends OptionsBase { @Option( - name = "boom", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "defaultBoom", - converter = EmptyToNullStringConverter.class - ) + name = "boom", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "defaultBoom", + converter = EmptyToNullStringConverter.class) public String boom; } - /** - * Example with internal options - */ + /** Example with internal options */ public static class ExampleInternalOptions extends OptionsBase { @Option( - name = "internal_boolean", - metadataTags = {OptionMetadataTag.INTERNAL}, - documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "true" - ) + name = "internal_boolean", + metadataTags = {OptionMetadataTag.INTERNAL}, + documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "true") public boolean privateBoolean; @Option( @@ -276,9 +264,7 @@ public String getTypeDescription() { } } - /** - * A converter that defaults to null if the input is the empty string - */ + /** A converter that defaults to null if the input is the empty string */ public static class EmptyToNullStringConverter extends StringConverter { @Override public String convert(String input) { @@ -414,8 +400,8 @@ public void parsingFailsWithUnknownOptions() { OptionsParsingException e = assertThrows(OptionsParsingException.class, () -> parser.parse(unknownOpts)); assertThat(e.getInvalidArgument()).isEqualTo("--unknown"); - assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --unknown"); - assertThat(parser.getOptions(ExampleFoo.class)).isNotNull(); + assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --unknown"); + assertThat(parser.getOptions(ExampleFoo.class)).isNotNull(); assertThat(parser.getOptions(ExampleBaz.class)).isNotNull(); } @@ -427,7 +413,7 @@ public void parsingFailsWithInternalBooleanOptionAsIfUnknown() { OptionsParsingException e = assertThrows(OptionsParsingException.class, () -> parser.parse(internalOpts)); assertThat(e.getInvalidArgument()).isEqualTo("--internal_boolean"); - assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --internal_boolean"); + assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --internal_boolean"); assertThat(parser.getOptions(ExampleInternalOptions.class)).isNotNull(); } @@ -439,7 +425,7 @@ public void parsingFailsWithNegatedInternalBooleanOptionAsIfUnknown() { OptionsParsingException e = assertThrows(OptionsParsingException.class, () -> parser.parse(internalOpts)); assertThat(e.getInvalidArgument()).isEqualTo("--nointernal_boolean"); - assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --nointernal_boolean"); + assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --nointernal_boolean"); assertThat(parser.getOptions(ExampleInternalOptions.class)).isNotNull(); } @@ -454,7 +440,7 @@ public void parsingFailsForInternalOptionWithValueInSameArgAsIfUnknown() { OptionsParsingException.class, () -> parser.parse(internalOpts)); assertThat(e.getInvalidArgument()).isEqualTo("--internal_string=any_value"); - assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --internal_string=any_value"); + assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --internal_string=any_value"); assertThat(parser.getOptions(ExampleInternalOptions.class)).isNotNull(); } @@ -469,7 +455,7 @@ public void parsingFailsForInternalOptionWithValueInSeparateArgAsIfUnknown() { OptionsParsingException.class, () -> parser.parse(internalOpts)); assertThat(e.getInvalidArgument()).isEqualTo("--internal_string"); - assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --internal_string"); + assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --internal_string"); assertThat(parser.getOptions(ExampleInternalOptions.class)).isNotNull(); } @@ -481,8 +467,8 @@ public void parseKnownAndUnknownOptions() { OptionsParsingException e = assertThrows(OptionsParsingException.class, () -> parser.parse(opts)); assertThat(e.getInvalidArgument()).isEqualTo("--unknown"); - assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --unknown"); - assertThat(parser.getOptions(ExampleFoo.class)).isNotNull(); + assertThat(e).hasMessageThat().isEqualTo("Unrecognized option: --unknown"); + assertThat(parser.getOptions(ExampleFoo.class)).isNotNull(); assertThat(parser.getOptions(ExampleBaz.class)).isNotNull(); } @@ -499,21 +485,19 @@ public void parseAndOverrideWithEmptyStringToObtainNullValueInOption() public static class CategoryTest extends OptionsBase { @Option( - name = "swiss_bank_account_number", - documentationCategory = - OptionDocumentationCategory.UNDOCUMENTED, // Not printed in usage messages! - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "123456789" - ) + name = "swiss_bank_account_number", + documentationCategory = + OptionDocumentationCategory.UNDOCUMENTED, // Not printed in usage messages! + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "123456789") public int swissBankAccountNumber; @Option( - name = "student_bank_account_number", - category = "one", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "987654321" - ) + name = "student_bank_account_number", + category = "one", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "987654321") public int studentBankAccountNumber; } @@ -546,7 +530,7 @@ public void parserCanBeCalledRepeatedly() throws OptionsParsingException { @Test public void multipleOccurringOption() throws OptionsParsingException { OptionsParser parser = OptionsParser.builder().optionsClasses(ExampleFoo.class).build(); - parser.parse("--bing", "abcdef", "--foo", "foo1", "--bing", "123456" ); + parser.parse("--bing", "abcdef", "--foo", "foo1", "--bing", "123456"); assertThat(parser.getOptions(ExampleFoo.class).bing).containsExactly("abcdef", "123456"); } @@ -555,13 +539,12 @@ public void multipleOccurringOptionWithConverter() throws OptionsParsingExceptio // --bang is the same as --bing except that it has a "converter" specified. // This test also tests option values with embedded commas and spaces. OptionsParser parser = OptionsParser.builder().optionsClasses(ExampleFoo.class).build(); - parser.parse("--bang", "abc,def ghi", "--foo", "foo1", "--bang", "123456" ); + parser.parse("--bang", "abc,def ghi", "--foo", "foo1", "--bang", "123456"); assertThat(parser.getOptions(ExampleFoo.class).bang).containsExactly("abc,def ghi", "123456"); } @Test - public void parserIgnoresOptionsAfterMinusMinus() - throws OptionsParsingException { + public void parserIgnoresOptionsAfterMinusMinus() throws OptionsParsingException { OptionsParser parser = OptionsParser.builder().optionsClasses(ExampleFoo.class, ExampleBaz.class).build(); parser.parse("--foo", "well", "--baz", "here", "--", "--bar", "ignore"); @@ -599,25 +582,24 @@ public void multipleCallsToParse() throws Exception { // instance was printing out key=value pairs for all flags in the // OptionsParser, not just those belonging to the specific interface type. @Test - public void toStringDoesntIncludeFlagsForOtherOptionsInParserInstance() - throws Exception { + public void toStringDoesntIncludeFlagsForOtherOptionsInParserInstance() throws Exception { OptionsParser parser = OptionsParser.builder().optionsClasses(ExampleFoo.class, ExampleBaz.class).build(); parser.parse("--foo", "foo", "--bar", "43", "--baz", "baz"); String fooString = parser.getOptions(ExampleFoo.class).toString(); - if (!fooString.contains("foo=foo") || - !fooString.contains("bar=43") || - !fooString.contains("ExampleFoo") || - fooString.contains("baz=baz")) { + if (!fooString.contains("foo=foo") + || !fooString.contains("bar=43") + || !fooString.contains("ExampleFoo") + || fooString.contains("baz=baz")) { fail("ExampleFoo.toString() is incorrect: " + fooString); } String bazString = parser.getOptions(ExampleBaz.class).toString(); - if (!bazString.contains("baz=baz") || - !bazString.contains("ExampleBaz") || - bazString.contains("foo=foo") || - bazString.contains("bar=43")) { + if (!bazString.contains("baz=baz") + || !bazString.contains("ExampleBaz") + || bazString.contains("foo=foo") + || bazString.contains("bar=43")) { fail("ExampleBaz.toString() is incorrect: " + bazString); } } @@ -650,11 +632,10 @@ public void toStringIsIndependentOfExplicitCommandLineOptions() throws Exception // this scummy code off the streets. public static class DerivedBaz extends ExampleBaz { @Option( - name = "derived", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "defaultDerived" - ) + name = "derived", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "defaultDerived") public String derived; } @@ -662,8 +643,8 @@ public static class DerivedBaz extends ExampleBaz { public void toStringPrintsInheritedOptionsToo_Duh() throws Exception { DerivedBaz derivedBaz = Options.parse(DerivedBaz.class).getOptions(); String derivedBazString = derivedBaz.toString(); - if (!derivedBazString.contains("derived=defaultDerived") || - !derivedBazString.contains("baz=defaultBaz")) { + if (!derivedBazString.contains("derived=defaultDerived") + || !derivedBazString.contains("baz=defaultBaz")) { fail("DerivedBaz.toString() is incorrect: " + derivedBazString); } } @@ -671,21 +652,19 @@ public void toStringPrintsInheritedOptionsToo_Duh() throws Exception { // Tests for new default value override mechanism public static class CustomOptions extends OptionsBase { @Option( - name = "simple", - category = "custom", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "simple default" - ) + name = "simple", + category = "custom", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "simple default") public String simple; @Option( - name = "multipart_name", - category = "custom", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "multipart default" - ) + name = "multipart_name", + category = "custom", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "multipart default") public String multipartName; } @@ -698,11 +677,10 @@ public void assertDefaultStringsForCustomOptions() throws OptionsParsingExceptio public static class NullTestOptions extends OptionsBase { @Option( - name = "simple", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "simple", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String simple; } @@ -754,29 +732,26 @@ public void convertWithContext() throws Exception { public static class ImplicitDependencyOptions extends OptionsBase { @Option( - name = "first", - implicitRequirements = "--second=second", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "first", + implicitRequirements = "--second=second", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String first; @Option( - name = "second", - implicitRequirements = "--third=third", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "second", + implicitRequirements = "--third=third", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String second; @Option( - name = "third", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "third", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String third; } @@ -794,12 +769,11 @@ public void implicitDependencyHasImplicitDependency() throws Exception { public static class BadImplicitDependencyOptions extends OptionsBase { @Option( - name = "first", - implicitRequirements = "xxx", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "first", + implicitRequirements = "xxx", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String first; } @@ -819,12 +793,11 @@ public void badImplicitDependency() throws Exception { public static class BadExpansionOptions extends OptionsBase { @Option( - name = "first", - expansion = {"xxx"}, - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "first", + expansion = {"xxx"}, + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public Void first; } @@ -844,20 +817,18 @@ public void badExpansionOptions() throws Exception { /** ExpansionOptions */ public static class ExpansionOptions extends OptionsBase { @Option( - name = "underlying", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "underlying", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String underlying; @Option( - name = "expands", - expansion = {"--underlying=from_expansion"}, - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "expands", + expansion = {"--underlying=from_expansion"}, + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public Void expands; } @@ -1059,29 +1030,26 @@ public void getOptionValueDescriptionWithValue() throws Exception { public static class ImplicitDependencyWarningOptions extends OptionsBase { @Option( - name = "first", - implicitRequirements = "--second=requiredByFirst", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "first", + implicitRequirements = "--second=requiredByFirst", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean first; @Option( - name = "second", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "second", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String second; @Option( - name = "third", - implicitRequirements = "--second=requiredByThird", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "third", + implicitRequirements = "--second=requiredByThird", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String third; } @@ -1189,30 +1157,27 @@ public void testDependentOriginIsPropagatedToOption() throws OptionsParsingExcep */ public static class ImplicitDependencyOnAllowMultipleOptions extends OptionsBase { @Option( - name = "first", - implicitRequirements = "--second=requiredByFirst", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "first", + implicitRequirements = "--second=requiredByFirst", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean first; @Option( - name = "second", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - allowMultiple = true - ) + name = "second", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + allowMultiple = true) public List second; @Option( - name = "third", - implicitRequirements = "--second=requiredByThird", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "third", + implicitRequirements = "--second=requiredByThird", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String third; } @@ -1278,40 +1243,36 @@ public void testCanonicalizeExcludesImplicitDependencyForOtherwiseUnmentionedRep public static class WarningOptions extends OptionsBase { @Deprecated @Option( - name = "first", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "first", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public Void first; @Deprecated @Option( - name = "second", - allowMultiple = true, - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "second", + allowMultiple = true, + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public List second; @Deprecated @Option( - name = "third", - expansion = "--fourth=true", - abbrev = 't', - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "third", + expansion = "--fourth=true", + abbrev = 't', + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public Void third; @Option( - name = "fourth", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "fourth", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean fourth; } @@ -1348,40 +1309,36 @@ public void deprecationWarningForAbbreviatedExpansionOption() throws Exception { public static class NewWarningOptions extends OptionsBase { @Option( - name = "first", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - deprecationWarning = "it's gone" - ) + name = "first", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + deprecationWarning = "it's gone") public Void first; @Option( - name = "second", - allowMultiple = true, - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - deprecationWarning = "sorry, no replacement" - ) + name = "second", + allowMultiple = true, + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + deprecationWarning = "sorry, no replacement") public List second; @Option( - name = "third", - expansion = "--fourth=true", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - deprecationWarning = "use --forth instead" - ) + name = "third", + expansion = "--fourth=true", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + deprecationWarning = "use --forth instead") public Void third; @Option( - name = "fourth", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "fourth", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean fourth; } @@ -1413,29 +1370,26 @@ public void newDeprecationWarningForExpansionOption() throws Exception { public static class ExpansionWarningOptions extends OptionsBase { @Option( - name = "first", - expansion = "--underlying=expandedFromFirst", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "first", + expansion = "--underlying=expandedFromFirst", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public Void first; @Option( - name = "second", - expansion = "--underlying=expandedFromSecond", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "second", + expansion = "--underlying=expandedFromSecond", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public Void second; @Option( - name = "underlying", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "underlying", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public String underlying; } @@ -1466,10 +1420,12 @@ public void warningForTwoConflictingExpansionOptions() throws Exception { // in the code. @Test public void optionPrioritiesAreCorrectlyOrdered() throws Exception { - assertThat(OptionPriority.PriorityCategory.values()).hasLength(6); + assertThat(OptionPriority.PriorityCategory.values()).hasLength(7); assertThat(OptionPriority.PriorityCategory.DEFAULT) .isLessThan(OptionPriority.PriorityCategory.COMPUTED_DEFAULT); assertThat(OptionPriority.PriorityCategory.COMPUTED_DEFAULT) + .isLessThan(OptionPriority.PriorityCategory.GLOBAL_RC_FILE); + assertThat(OptionPriority.PriorityCategory.GLOBAL_RC_FILE) .isLessThan(OptionPriority.PriorityCategory.RC_FILE); assertThat(OptionPriority.PriorityCategory.RC_FILE) .isLessThan(OptionPriority.PriorityCategory.COMMAND_LINE); @@ -1481,46 +1437,41 @@ public void optionPrioritiesAreCorrectlyOrdered() throws Exception { public static class IntrospectionExample extends OptionsBase { @Option( - name = "alpha", - category = "one", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "alphaDefaultValue" - ) + name = "alpha", + category = "one", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "alphaDefaultValue") public String alpha; @Option( - name = "beta", - category = "one", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "betaDefaultValue" - ) + name = "beta", + category = "one", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "betaDefaultValue") public String beta; @Option( - name = "gamma", - documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "gammaDefaultValue" - ) + name = "gamma", + documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "gammaDefaultValue") public String gamma; @Option( - name = "delta", - documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "deltaDefaultValue" - ) + name = "delta", + documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "deltaDefaultValue") public String delta; @Option( - name = "echo", - metadataTags = {OptionMetadataTag.HIDDEN}, - documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "echoDefaultValue" - ) + name = "echo", + metadataTags = {OptionMetadataTag.HIDDEN}, + documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "echoDefaultValue") public String echo; } @@ -1651,13 +1602,12 @@ public void asListOfEffectiveOptions() throws Exception { public static class ListExample extends OptionsBase { @Option( - name = "alpha", - converter = StringConverter.class, - allowMultiple = true, - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "alpha", + converter = StringConverter.class, + allowMultiple = true, + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public List alpha; } @@ -1730,13 +1680,12 @@ public void listOptionsHaveCorrectPriorities() throws Exception { public static class CommaSeparatedOptionsExample extends OptionsBase { @Option( - name = "alpha", - converter = CommaSeparatedOptionListConverter.class, - allowMultiple = true, - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null" - ) + name = "alpha", + converter = CommaSeparatedOptionListConverter.class, + allowMultiple = true, + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null") public List alpha; } @@ -1786,64 +1735,57 @@ public void commaSeparatedListOptionsHaveCorrectPriorities() throws Exception { public static class Yesterday extends OptionsBase { @Option( - name = "a", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "a" - ) + name = "a", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "a") public String a; @Option( - name = "b", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "b" - ) + name = "b", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "b") public String b; @Option( - name = "c", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - expansion = {"--a=cExpansion"} - ) + name = "c", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + expansion = {"--a=cExpansion"}) public Void c; @Option( - name = "d", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - allowMultiple = true - ) + name = "d", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + allowMultiple = true) public List d; @Option( - name = "e", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - implicitRequirements = {"--a=eRequirement"} - ) + name = "e", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + implicitRequirements = {"--a=eRequirement"}) public String e; @Option( - name = "f", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - implicitRequirements = {"--b=fRequirement"} - ) + name = "f", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + implicitRequirements = {"--b=fRequirement"}) public String f; @Option( - name = "g", - abbrev = 'h', - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "g", + abbrev = 'h', + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean g; } @@ -1918,7 +1860,8 @@ public void implicitDepsAreNotInTheCanonicalOrder() throws Exception { @Test public void canonicalizeDoesNotSkipAllowMultiple() throws Exception { assertThat(canonicalize(Yesterday.class, "--d=a", "--d=b")) - .containsExactly("--d=a", "--d=b").inOrder(); + .containsExactly("--d=a", "--d=b") + .inOrder(); } @Test @@ -1929,9 +1872,9 @@ public void canonicalizeReplacesAbbrevWithName() throws Exception { /** * Check that all forms of boolean flags are canonicalizes to the same form. * - * The list of accepted values is from - * {@link com.google.devtools.common.options.Converters.BooleanConverter}, and the value-less - * --[no] form is controlled by {@link OptionsParserImpl#identifyOptionAndPossibleArgument}. + *

The list of accepted values is from {@link + * com.google.devtools.common.options.Converters.BooleanConverter}, and the value-less --[no] form + * is controlled by {@link OptionsParserImpl#identifyOptionAndPossibleArgument}. */ @Test public void canonicalizeNormalizesBooleanFlags() throws Exception { @@ -1952,19 +1895,17 @@ public void canonicalizeNormalizesBooleanFlags() throws Exception { public static class LongValueExample extends OptionsBase { @Option( - name = "longval", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "2147483648" - ) + name = "longval", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "2147483648") public long longval; @Option( - name = "intval", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "2147483647" - ) + name = "intval", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "2147483647") public int intval; } @@ -1995,12 +1936,11 @@ public void intOutOfBounds() { public static class OldNameExample extends OptionsBase { @Option( - name = "new_name", - oldName = "old_name", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "defaultValue" - ) + name = "new_name", + oldName = "old_name", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "defaultValue") public String flag; @Option( @@ -2175,11 +2115,10 @@ public void testOldName_noWarning() throws OptionsParsingException { public static class ExampleBooleanFooOptions extends OptionsBase { @Option( - name = "foo", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "foo", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean foo; } @@ -2199,30 +2138,27 @@ public void testBooleanUnderscorePrefixError() { @UsesOnlyCoreTypes public static class CoreTypesOptions extends OptionsBase { @Option( - name = "foo", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "foo", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean foo; @Option( - name = "bar", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "abc" - ) + name = "bar", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "abc") public String bar; } /** Dummy options that does not declare using only core types. */ public static class NonCoreTypesOptions extends OptionsBase { @Option( - name = "foo", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "foo", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean foo; } @@ -2250,24 +2186,22 @@ public String getTypeDescription() { } @Option( - name = "foo", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - converter = FooConverter.class - ) + name = "foo", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + converter = FooConverter.class) public Foo foo; } /** Dummy options that is unsafe for @UsesOnlyCoreTypes but doesn't use the annotation. */ public static class SuperBadCoreTypesOptions extends OptionsBase { @Option( - name = "foo", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "null", - converter = BadCoreTypesOptions.FooConverter.class - ) + name = "foo", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "null", + converter = BadCoreTypesOptions.FooConverter.class) public BadCoreTypesOptions.Foo foo; } @@ -2278,11 +2212,10 @@ public static class SuperBadCoreTypesOptions extends OptionsBase { @UsesOnlyCoreTypes public static class InheritedBadCoreTypesOptions extends SuperBadCoreTypesOptions { @Option( - name = "bar", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "bar", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean bar; } @@ -2323,27 +2256,24 @@ public void testValidationOfUsesOnlyCoreTypes_Inherited() { /** Dummy options for testing getHelpCompletion() and visitOptions(). */ public static class CompletionOptions extends OptionsBase { @Option( - name = "secret", - documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "secret", + documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean secret; @Option( name = "b", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) - public boolean b; + defaultValue = "false") + public boolean b; @Option( - name = "a", - documentationCategory = OptionDocumentationCategory.QUERY, - effectTags = {OptionEffectTag.NO_OP}, - defaultValue = "false" - ) + name = "a", + documentationCategory = OptionDocumentationCategory.QUERY, + effectTags = {OptionEffectTag.NO_OP}, + defaultValue = "false") public boolean a; } @@ -2410,8 +2340,8 @@ public void setOptionValueAtSpecificPriorityWithoutExpansion_setsOptionAndAddsPa new OptionInstanceOrigin( OptionPriority.lowestOptionPriorityAtCategory(PriorityCategory.INVOCATION_POLICY), "invocation policy", - /*implicitDependent=*/ null, - /*expandedFrom=*/ null); + /* implicitDependent= */ null, + /* expandedFrom= */ null); OptionDefinition optionDefinition = FieldOptionDefinition.extractOptionDefinition(ExampleFoo.class.getField("foo")); @@ -2432,8 +2362,8 @@ public void setOptionValueAtSpecificPriorityWithoutExpansion_addsFlagAlias() thr new OptionInstanceOrigin( OptionPriority.lowestOptionPriorityAtCategory(PriorityCategory.INVOCATION_POLICY), "invocation policy", - /*implicitDependent=*/ null, - /*expandedFrom=*/ null); + /* implicitDependent= */ null, + /* expandedFrom= */ null); OptionDefinition optionDefinition = FieldOptionDefinition.extractOptionDefinition(ExampleFoo.class.getField("foo")); @@ -2483,7 +2413,7 @@ public void setOptionValueAtSpecificPriorityWithoutExpansion_impliedFlag_setsVal createInvocationPolicyOrigin(), /* conversionContext= */ null); OptionInstanceOrigin origin = - createInvocationPolicyOrigin(/*implicitDependent=*/ first, /*expandedFrom=*/ null); + createInvocationPolicyOrigin(/* implicitDependent= */ first, /* expandedFrom= */ null); OptionDefinition optionDefinition = FieldOptionDefinition.extractOptionDefinition( @@ -2512,7 +2442,7 @@ public void setOptionValueAtSpecificPriorityWithoutExpansion_expandedFlag_setsVa createInvocationPolicyOrigin(), /* conversionContext= */ null); OptionInstanceOrigin origin = - createInvocationPolicyOrigin(/*implicitDependent=*/ null, /*expandedFrom=*/ first); + createInvocationPolicyOrigin(/* implicitDependent= */ null, /* expandedFrom= */ first); OptionDefinition optionDefinition = FieldOptionDefinition.extractOptionDefinition( @@ -2624,7 +2554,7 @@ public void fallbackOptions_expansionToNegativeBooleanFlag() throws OptionsParsi } private static OptionInstanceOrigin createInvocationPolicyOrigin() { - return createInvocationPolicyOrigin(/*implicitDependent=*/ null, /*expandedFrom=*/ null); + return createInvocationPolicyOrigin(/* implicitDependent= */ null, /* expandedFrom= */ null); } private static OptionInstanceOrigin createInvocationPolicyOrigin( diff --git a/src/test/java/net/starlark/java/syntax/ParserTest.java b/src/test/java/net/starlark/java/syntax/ParserTest.java index cc34d9b5acd291..5abf79809d7dc4 100644 --- a/src/test/java/net/starlark/java/syntax/ParserTest.java +++ b/src/test/java/net/starlark/java/syntax/ParserTest.java @@ -1348,7 +1348,7 @@ public void testParseExpressionStackOverflow() throws Exception { private static ParserInput veryDeepExpression() { StringBuilder s = new StringBuilder(); - for (int i = 0; i < 1000; i++) { + for (int i = 0; i < 5000; i++) { s.append("{"); } return ParserInput.fromString(s.toString(), "foo.star"); diff --git a/src/test/py/bazel/BUILD b/src/test/py/bazel/BUILD index 40c922817adcf2..fc623f82ff4236 100644 --- a/src/test/py/bazel/BUILD +++ b/src/test/py/bazel/BUILD @@ -392,6 +392,7 @@ py_test( name = "mod_command_test", size = "large", srcs = ["bzlmod/mod_command_test.py"], + shard_count = 8, tags = [ "requires-network", ], diff --git a/src/test/py/bazel/bzlmod/bazel_vendor_test.py b/src/test/py/bazel/bzlmod/bazel_vendor_test.py index a446aa87fb8c33..578c988ff722b4 100644 --- a/src/test/py/bazel/bzlmod/bazel_vendor_test.py +++ b/src/test/py/bazel/bzlmod/bazel_vendor_test.py @@ -143,6 +143,25 @@ def testVendorFailsWithNofetch(self): 'ERROR: You cannot run the vendor command with --nofetch', stderr ) + def testVendorAfterFetch(self): + self.main_registry.createCcModule('aaa', '1.0') + self.ScratchFile( + 'MODULE.bazel', + [ + 'bazel_dep(name = "aaa", version = "1.0")', + 'local_path_override(module_name="bazel_tools", path="tools_mock")', + 'local_path_override(module_name="local_config_platform", ', + 'path="platforms_mock")', + ], + ) + self.ScratchFile('BUILD') + + self.RunBazel(['fetch', '--repo=@@aaa~']) + self.RunBazel(['vendor', '--vendor_dir=vendor', '--repo=@@aaa~']) + + repos_vendored = os.listdir(self._test_cwd + '/vendor') + self.assertIn('aaa~', repos_vendored) + def testVendoringMultipleTimes(self): self.main_registry.createCcModule('aaa', '1.0') self.ScratchFile( diff --git a/src/test/shell/bazel/bazel_build_event_stream_test.sh b/src/test/shell/bazel/bazel_build_event_stream_test.sh index f3856a828121dd..00b7226d9280fa 100755 --- a/src/test/shell/bazel/bazel_build_event_stream_test.sh +++ b/src/test/shell/bazel/bazel_build_event_stream_test.sh @@ -256,6 +256,7 @@ EOF ls >& "$TEST_log" cat bep.json >> "$TEST_log" + expect_log "//a:arg" expect_log "execRequest" expect_log "argv" expect_log "REDACTED" diff --git a/src/test/shell/bazel/bazel_coverage_cc_test_llvm.sh b/src/test/shell/bazel/bazel_coverage_cc_test_llvm.sh index 7664558bb60cb4..eeaa248dbe5f6b 100755 --- a/src/test/shell/bazel/bazel_coverage_cc_test_llvm.sh +++ b/src/test/shell/bazel/bazel_coverage_cc_test_llvm.sh @@ -138,7 +138,10 @@ function test_cc_test_llvm_coverage_produces_lcov_report() { setup_llvm_coverage_tools_for_lcov || return 0 setup_a_cc_lib_and_t_cc_test - bazel coverage --test_output=all //:t &>$TEST_log || fail "Coverage for //:t failed" + # Ensure that coverage succeeds even with lazily built runfiles trees for the + # merger tool. + bazel coverage --nobuild_runfile_links \ + --test_output=all //:t &>$TEST_log || fail "Coverage for //:t failed" local expected_result="SF:a.cc FN:3,_Z1ab @@ -163,7 +166,10 @@ function test_cc_test_llvm_coverage_produces_lcov_report_with_split_postprocessi setup_llvm_coverage_tools_for_lcov || return 0 setup_a_cc_lib_and_t_cc_test + # Ensure that coverage succeeds even with lazily built runfiles trees for the + # merger tool. bazel coverage \ + --nobuild_runfile_links \ --experimental_split_coverage_postprocessing --experimental_fetch_all_coverage_outputs \ --test_env=VERBOSE_COVERAGE=1 --test_output=all //:t &>$TEST_log || fail "Coverage for //:t failed" diff --git a/src/test/shell/bazel/bazel_coverage_java_test.sh b/src/test/shell/bazel/bazel_coverage_java_test.sh index b1a98947eb2e11..1f94464552eeff 100755 --- a/src/test/shell/bazel/bazel_coverage_java_test.sh +++ b/src/test/shell/bazel/bazel_coverage_java_test.sh @@ -192,7 +192,11 @@ public class TestCollatz { } EOF - bazel coverage --test_output=all //:test --coverage_report_generator=@bazel_tools//tools/test:coverage_report_generator --combined_report=lcov &>$TEST_log \ + # Ensure that coverage succeeds even with lazily built runfiles trees for the + # merger tool. + bazel coverage \ + --nobuild_runfile_links \ + --test_output=all //:test --coverage_report_generator=@bazel_tools//tools/test:coverage_report_generator --combined_report=lcov &>$TEST_log \ || echo "Coverage for //:test failed" local expected_result="SF:src/main/com/example/Collatz.java diff --git a/src/test/shell/integration/bazel_java_test.sh b/src/test/shell/integration/bazel_java_test.sh index c72e2b58fa8e4f..2ac96ad5e9258a 100755 --- a/src/test/shell/integration/bazel_java_test.sh +++ b/src/test/shell/integration/bazel_java_test.sh @@ -328,16 +328,16 @@ EOF # Test the genrule with no java dependencies. bazel cquery --max_config_changes_to_show=0 --implicit_deps 'deps(//:without_java)' >& $TEST_log - expect_not_log "foo" - expect_not_log "bar" + expect_not_log ":foo" + expect_not_log ":bar" expect_not_log "embedded_jdk" expect_not_log "remotejdk_" expect_not_log "remotejdk11_" # Test the genrule that specifically depends on :bar_runtime. bazel cquery --max_config_changes_to_show=0 --implicit_deps 'deps(//:with_java)' >& $TEST_log - expect_not_log "foo" - expect_log "bar" + expect_not_log ":foo" + expect_log ":bar" expect_not_log "embedded_jdk" expect_not_log "remotejdk_" expect_not_log "remotejdk11_" @@ -346,8 +346,8 @@ EOF # roolchains attribute. bazel cquery --max_config_changes_to_show=0 --implicit_deps 'deps(//:with_java)' \ --tool_java_runtime_version=foo_javabase >& $TEST_log - expect_not_log "foo" - expect_log "bar" + expect_not_log ":foo" + expect_log ":bar" expect_not_log "embedded_jdk" expect_not_log "remotejdk_" expect_not_log "remotejdk11_" diff --git a/src/test/shell/integration/flagset_test.sh b/src/test/shell/integration/flagset_test.sh new file mode 100755 index 00000000000000..67375c1cdd2b99 --- /dev/null +++ b/src/test/shell/integration/flagset_test.sh @@ -0,0 +1,145 @@ +#!/bin/bash +# +# Copyright 2024 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# An end-to-end test for Skyfocus & working sets. + +# --- begin runfiles.bash initialization --- +set -euo pipefail +if [[ ! -d "${RUNFILES_DIR:-/dev/null}" && ! -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then + if [[ -f "$0.runfiles_manifest" ]]; then + export RUNFILES_MANIFEST_FILE="$0.runfiles_manifest" + elif [[ -f "$0.runfiles/MANIFEST" ]]; then + export RUNFILES_MANIFEST_FILE="$0.runfiles/MANIFEST" + elif [[ -f "$0.runfiles/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then + export RUNFILES_DIR="$0.runfiles" + fi +fi +if [[ -f "${RUNFILES_DIR:-/dev/null}/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then + source "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash" +elif [[ -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then + source "$(grep -m1 "^bazel_tools/tools/bash/runfiles/runfiles.bash " \ + "$RUNFILES_MANIFEST_FILE" | cut -d ' ' -f 2-)" +else + echo >&2 "ERROR: cannot find @bazel_tools//tools/bash/runfiles:runfiles.bash" + exit 1 +fi +# --- end runfiles.bash initialization --- + +source "$(rlocation "io_bazel/src/test/shell/integration_test_setup.sh")" \ + || { echo "integration_test_setup.sh not found!" >&2; exit 1; } + +function set_up_project_file() { + mkdir -p test + cat > test/BUILD < test/PROJECT.scl < ~/.bazelrc + if [[ $(bazel build --nobuild //test:test --enforce_project_configs --scl_config=test_config &> $TEST_log) ]] then + fail "Scl enabled build expected to fail with config flag in user bazelrc" + fi + expect_log "Found [--define=bar=baz]" +} + +function test_scl_config_plus_passed_bazelrc_fails(){ + set_up_project_file + add_to_bazelrc "build --define=bar=baz" + cat .blazerc >> test/test.bazelrc + if [[ $(bazel --blazerc=test/test.bazelrc build --nobuild //test:test --enforce_project_configs --scl_config=test_config --experimental_enable_scl_dialect &> $TEST_log) ]] then + fail "Scl enabled build expected to fail with config flag in user bazelrc" + fi + expect_log "Found [--define=bar=baz]" +} + +function test_scl_config_plus_starlark_in_passed_blazerc_fails(){ + set_up_project_file + touch test/test.bzl + cat >> test/test.bzl <> test/BUILD <> test/test.bazelrc + if [[ $(bazel --blazerc=test/test.bazelrc build --nobuild //test:test --enforce_project_configs --scl_config=test_config --experimental_enable_scl_dialect &> $TEST_log) ]] then + fail "Scl enabled build expected to fail with starlark flag in user bazelrc" + fi + expect_log "Found [--\/\/test:starlark_flags_always_affect_configuration=yes]" +} + +function test_scl_config_plus_command_line_starlark_flag_fails(){ + set_up_project_file + touch test/test.bzl + cat >> test/test.bzl <> test/BUILD < $TEST_log) ]] + then + fail "Scl enabled build expected to fail with starlark flag on command line" + fi + expect_log "Found [--\/\/test:starlark_flags_always_affect_configuration=yes]" +} + +function test_scl_config_plus_workspace_bazelrc_passes(){ + set_up_project_file + add_to_bazelrc "build --define=foo=bar" + bazel build --nobuild //test:test --enforce_project_configs --scl_config=test_config \ + || fail "Scl enabled build expected to pass with config flag in global bazelrc" +} + +function test_scl_config_plus_starlark_workspace_bazelrc_passes(){ + set_up_project_file + touch test/test.bzl + cat >> test/test.bzl <> test/BUILD < .\+" } -function test_focus_command_dump_keys_verbose() { +function test_dump_keys_verbose() { local -r pkg=${FUNCNAME[0]} mkdir ${pkg}|| fail "cannot mkdir ${pkg}" mkdir -p ${pkg} @@ -131,7 +133,7 @@ EOF expect_not_log "FILE_STATE: .\+ -> .\+ (-.\+%)" } -function test_focus_command_dump_keys_count() { +function test_dump_keys_count() { local -r pkg=${FUNCNAME[0]} mkdir ${pkg}|| fail "cannot mkdir ${pkg}" mkdir -p ${pkg} @@ -206,34 +208,28 @@ genrule( ) EOF - # Fresh build, so there is no working set. - bazel info working_set &> "$TEST_log" \ - || fail "expected working_set to be a valid key" - expect_log "No working set found." - expect_not_log "${pkg}/in.txt" - # Initial build with working set. bazel build //${pkg}:g --experimental_working_set=${pkg}/in.txt - bazel info working_set &> "$TEST_log" + bazel dump --skyframe=working_set &> "$TEST_log" expect_log "${pkg}/in.txt" # Working set is expanded. bazel build //${pkg}:g --experimental_working_set=${pkg}/in.txt,${pkg}/in2.txt - bazel info working_set &> "$TEST_log" + bazel dump --skyframe=working_set &> "$TEST_log" expect_log "${pkg}/in.txt" expect_log "${pkg}/in2.txt" # Working set can be defined with files not in the downward transitive - # closure but `info working_set` will not report it. + # closure but `dump --skyframe=working_set` will not report it. bazel build //${pkg}:g --experimental_working_set=${pkg}/in.txt,${pkg}/in2.txt,${pkg}/not.used - bazel info working_set &> "$TEST_log" + bazel dump --skyframe=working_set &> "$TEST_log" expect_log "${pkg}/in.txt" expect_log "${pkg}/in2.txt" expect_not_log "${pkg}/not.used" # The active set is retained for subsequent builds that don't pass the flag. bazel build //${pkg}:g - bazel info working_set &> "$TEST_log" + bazel dump --skyframe=working_set &> "$TEST_log" expect_log "${pkg}/in.txt" expect_log "${pkg}/in2.txt" expect_not_log "${pkg}/not.used" @@ -474,4 +470,58 @@ EOF assert_contains "another change" ${out} } +function test_test_command_runs_skyfocus() { + local -r pkg=${FUNCNAME[0]} + mkdir -p ${pkg} + cat > ${pkg}/in.sh < ${pkg}/BUILD < "$TEST_log" || fail "expected to succeed" + expect_log "${pkg}/in.sh" + expect_log "${pkg}/BUILD" +} + +function test_disallowed_commands_after_focus() { + local -r pkg=${FUNCNAME[0]} + mkdir -p ${pkg} + cat > ${pkg}/in.sh < ${pkg}/BUILD < "$TEST_log" && fail "expected to fail" + expect_log "query is not supported after using Skyfocus" + + bazel cquery //${pkg}:g &> "$TEST_log" && fail "expected to fail" + expect_log "cquery is not supported after using Skyfocus" + + bazel aquery //${pkg}:g &> "$TEST_log" && fail "expected to fail" + expect_log "aquery is not supported after using Skyfocus" + + bazel print_action //${pkg}:g &> "$TEST_log" && fail "expected to fail" + expect_log "print_action is not supported after using Skyfocus" + + bazel info || fail "expected to succeed" + bazel dump --skyframe=summary || fail "expected to succeed" + + bazel build //${pkg}:g || fail "expected to succeed" +} + run_suite "Tests for Skyfocus" diff --git a/src/test/tools/bzlmod/MODULE.bazel.lock b/src/test/tools/bzlmod/MODULE.bazel.lock index 1925135ab4e2a4..bbc4f4221b9df3 100644 --- a/src/test/tools/bzlmod/MODULE.bazel.lock +++ b/src/test/tools/bzlmod/MODULE.bazel.lock @@ -32,8 +32,8 @@ "https://bcr.bazel.build/modules/rules_cc/0.0.9/MODULE.bazel": "836e76439f354b89afe6a911a7adf59a6b2518fafb174483ad78a2a2fde7b1c5", "https://bcr.bazel.build/modules/rules_cc/0.0.9/source.json": "1f1ba6fea244b616de4a554a0f4983c91a9301640c8fe0dd1d410254115c8430", "https://bcr.bazel.build/modules/rules_java/4.0.0/MODULE.bazel": "5a78a7ae82cd1a33cef56dc578c7d2a46ed0dca12643ee45edbb8417899e6f74", - "https://bcr.bazel.build/modules/rules_java/7.6.5/MODULE.bazel": "481164be5e02e4cab6e77a36927683263be56b7e36fef918b458d7a8a1ebadb1", - "https://bcr.bazel.build/modules/rules_java/7.6.5/source.json": "a805b889531d1690e3c72a7a7e47a870d00323186a9904b36af83aa3d053ee8d", + "https://bcr.bazel.build/modules/rules_java/7.7.0/MODULE.bazel": "93a198f955574cdbb0e0b089faa3d3a76b6bfffc553c13bd8a96e162e179ac87", + "https://bcr.bazel.build/modules/rules_java/7.7.0/source.json": "529d8a0496f2fb3ca5d120011e66fb17bdc09017d95bed893768c98ff48c8bba", "https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel": "a56b85e418c83eb1839819f0b515c431010160383306d13ec21959ac412d2fe7", "https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/source.json": "a075731e1b46bc8425098512d038d416e966ab19684a10a34f4741295642fc35", "https://bcr.bazel.build/modules/rules_license/0.0.3/MODULE.bazel": "627e9ab0247f7d1e05736b59dbb1b6871373de5ad31c3011880b4133cafd4bd0", diff --git a/src/tools/android/java/com/google/devtools/build/android/CompatOptionsParsingException.java b/src/tools/android/java/com/google/devtools/build/android/CompatOptionsParsingException.java new file mode 100644 index 00000000000000..a226f391625bb7 --- /dev/null +++ b/src/tools/android/java/com/google/devtools/build/android/CompatOptionsParsingException.java @@ -0,0 +1,50 @@ +// Copyright 2024 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.android; + +/** + * An exception that's thrown when the {@link CompatShellQuotedParamsFilePreProcessor} fails. + * + *

This is effectively a fork of OptionsParsingException that's part of the OptionsParser lib. + */ +public class CompatOptionsParsingException extends Exception { + private final String invalidArgument; + + public CompatOptionsParsingException(String message) { + this(message, (String) null); + } + + public CompatOptionsParsingException(String message, String argument) { + super(message); + this.invalidArgument = argument; + } + + public CompatOptionsParsingException(String message, Throwable throwable) { + this(message, null, throwable); + } + + public CompatOptionsParsingException(String message, String argument, Throwable throwable) { + super(message, throwable); + this.invalidArgument = argument; + } + + /** + * Gets the name of the invalid argument or {@code null} if the exception can not determine the + * exact invalid arguments + */ + public String getInvalidArgument() { + return invalidArgument; + } +} diff --git a/src/tools/android/java/com/google/devtools/build/android/CompatShellQuotedParamsFilePreProcessor.java b/src/tools/android/java/com/google/devtools/build/android/CompatShellQuotedParamsFilePreProcessor.java new file mode 100644 index 00000000000000..ab44a1071ba07c --- /dev/null +++ b/src/tools/android/java/com/google/devtools/build/android/CompatShellQuotedParamsFilePreProcessor.java @@ -0,0 +1,163 @@ +// Copyright 2024 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.devtools.build.android; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import java.io.IOException; +import java.io.PushbackReader; +import java.io.Reader; +import java.nio.file.FileSystem; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import javax.annotation.Nullable; + +/** + * Emulates the behavior of the ShellQuotedParamsFilePreProcessor class from Bazel. + * + *

This class purely emulates the quote escaping/unescaping that + * ShellQuotedParamsFilePreProcessor does. It is intended to be used in ResourceProcessorBusyBox + * (and affiliated tools) in conjunction with JCommander's Parameter annotations instead of the + * Bazel-specific OptionsParser. There is no guarantee that this class will behave 100% identically + * to ShellQuotedParamsFilePreProcessor. + */ +public class CompatShellQuotedParamsFilePreProcessor { + private FileSystem fs; + static final String UNFINISHED_QUOTE_MESSAGE_FORMAT = "Unfinished quote %s at %s"; + + public CompatShellQuotedParamsFilePreProcessor(FileSystem fs) { + this.fs = fs; + } + + public List preProcess(List args) throws CompatOptionsParsingException { + if (!args.isEmpty() && args.get(0).startsWith("@")) { + if (args.size() > 1) { + throw new CompatOptionsParsingException( + String.format("A params file must be the only argument: %s", args), args.get(0)); + } + Path path = fs.getPath(args.get(0).substring(1)); + try { + return parse(path); + } catch (RuntimeException | IOException e) { + throw new CompatOptionsParsingException( + String.format("Error reading params file: %s %s", path, e.getMessage()), + args.get(0), + e); + } + } + return args; + } + + public List parse(Path paramsFile) throws IOException { + List args = new ArrayList<>(); + try (ShellQuotedReader reader = + new ShellQuotedReader(Files.newBufferedReader(paramsFile, UTF_8))) { + String arg; + while ((arg = reader.readArg()) != null) { + args.add(arg); + } + } + return args; + } + + private static class ShellQuotedReader implements AutoCloseable { + + private final PushbackReader reader; + private int position = -1; + + public ShellQuotedReader(Reader reader) { + this.reader = new PushbackReader(reader, 10); + } + + private char read() throws IOException { + int value = reader.read(); + position++; + return (char) value; + } + + private void unread(char value) throws IOException { + reader.unread(value); + position--; + } + + private boolean hasNext() throws IOException { + char value = read(); + boolean hasNext = value != (char) -1; + unread(value); + return hasNext; + } + + @Override + public void close() throws IOException { + reader.close(); + } + + @Nullable + public String readArg() throws IOException { + if (!hasNext()) { + return null; + } + + StringBuilder arg = new StringBuilder(); + + int quoteStart = -1; + boolean quoted = false; + char current; + + while ((current = read()) != (char) -1) { + if (quoted) { + if (current == '\'') { + StringBuilder escapedQuoteRemainder = + new StringBuilder().append(read()).append(read()).append(read()); + if (escapedQuoteRemainder.toString().equals("\\''")) { + arg.append("'"); + } else { + for (char c : escapedQuoteRemainder.reverse().toString().toCharArray()) { + unread(c); + } + quoted = false; + quoteStart = -1; + } + } else { + arg.append(current); + } + } else { + if (current == '\'') { + quoted = true; + quoteStart = position; + } else if (current == '\r') { + char next = read(); + if (next == '\n') { + return arg.toString(); + } else { + unread(next); + return arg.toString(); + } + } else if (Character.isWhitespace(current)) { + return arg.toString(); + } else { + arg.append(current); + } + } + } + if (quoted) { + throw new IOException(String.format(UNFINISHED_QUOTE_MESSAGE_FORMAT, "'", quoteStart)); + } + return arg.toString(); + } + } +} diff --git a/src/tools/android/java/com/google/devtools/build/android/ConvertResourceZipToApkAction.java b/src/tools/android/java/com/google/devtools/build/android/ConvertResourceZipToApkAction.java new file mode 100644 index 00000000000000..0a45ce07067799 --- /dev/null +++ b/src/tools/android/java/com/google/devtools/build/android/ConvertResourceZipToApkAction.java @@ -0,0 +1,110 @@ +// Copyright 2024 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.android; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; +import com.google.devtools.build.android.Converters.ExistingPathConverter; +import com.google.devtools.build.android.Converters.PathConverter; +import com.google.devtools.build.android.aapt2.Aapt2ConfigOptions; +import com.google.devtools.build.android.aapt2.ProtoApk; +import com.google.devtools.build.android.aapt2.ResourceLinker; +import com.google.devtools.build.android.aapt2.StaticLibrary; +import com.google.devtools.common.options.Option; +import com.google.devtools.common.options.OptionDocumentationCategory; +import com.google.devtools.common.options.OptionEffectTag; +import com.google.devtools.common.options.OptionsBase; +import com.google.devtools.common.options.OptionsParser; +import com.google.devtools.common.options.ShellQuotedParamsFilePreProcessor; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; + +/** + * An action that will take a ResourcesZip and convert it into a proto APK. + * + *

+ * Example Usage:
+ *   java/com/google/build/android/ConvertResourceZipToApkAction
+ *       --aapt2 path to sdk/aapt2
+ *       --androidJar path to sdk/androidJar
+ *       --resources path to processed resources zip
+ *       --outputApk path to write shrunk ap_
+ * 
+ */ +public final class ConvertResourceZipToApkAction { + public static void main(String[] args) throws Exception { + final Profiler profiler = + LoggingProfiler.createAndStart("convert_proto_apk").startTask("flags"); + // Parse arguments. + OptionsParser optionsParser = + OptionsParser.builder() + .optionsClasses( + Options.class, Aapt2ConfigOptions.class, ResourceProcessorCommonOptions.class) + .argsPreProcessor(new ShellQuotedParamsFilePreProcessor(FileSystems.getDefault())) + .build(); + optionsParser.parseAndExitUponError(args); + Aapt2ConfigOptions aapt2ConfigOptions = optionsParser.getOptions(Aapt2ConfigOptions.class); + Options options = optionsParser.getOptions(Options.class); + System.out.println("WTF: " + options.resourcesZip); + Preconditions.checkArgument(options.resourcesZip != null, "Missing input resource zip."); + profiler.recordEndOf("flags").startTask("setup"); + try (ScopedTemporaryDirectory scopedTmp = + new ScopedTemporaryDirectory("android_resources_tmp"); + ExecutorServiceCloser executorService = ExecutorServiceCloser.createWithFixedPoolOf(15)) { + final ResourcesZip resourcesZip = + ResourcesZip.createFrom( + options.resourcesZip, scopedTmp.subDirectoryOf("merged-resources")); + final ResourceLinker linker = + ResourceLinker.create( + aapt2ConfigOptions.aapt2, executorService, scopedTmp.subDirectoryOf("linking")) + .profileUsing(profiler) + .dependencies(ImmutableList.of(StaticLibrary.from(aapt2ConfigOptions.androidJar))); + profiler.recordEndOf("setup").startTask("convert"); + ProtoApk inputApk = ProtoApk.readFrom(resourcesZip.asApk()); + Files.copy( + aapt2ConfigOptions.resourceTableAsProto + ? inputApk.asApkPath() + : linker.convertProtoApkToBinary(inputApk), + options.outputApk, + StandardCopyOption.REPLACE_EXISTING); + profiler.recordEndOf("convert"); + } + profiler.recordEndOf("convert_proto_apk"); + } + + /** Extra options specific to {@link ConvertResourceZipToApkAction}. */ + public static class Options extends OptionsBase { + @Option( + name = "resources", + defaultValue = "null", + category = "input", + converter = ExistingPathConverter.class, + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.UNKNOWN}, + help = "Path to the resources zip to be shrunk.") + public Path resourcesZip; + + @Option( + name = "outputApk", + defaultValue = "null", + converter = PathConverter.class, + category = "output", + documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, + effectTags = {OptionEffectTag.UNKNOWN}, + help = "Path to the output resource APK.") + public Path outputApk; + } +} diff --git a/src/tools/android/java/com/google/devtools/build/android/ResourceProcessorBusyBox.java b/src/tools/android/java/com/google/devtools/build/android/ResourceProcessorBusyBox.java index 6688423f1dbb85..925088742e49a8 100644 --- a/src/tools/android/java/com/google/devtools/build/android/ResourceProcessorBusyBox.java +++ b/src/tools/android/java/com/google/devtools/build/android/ResourceProcessorBusyBox.java @@ -14,24 +14,23 @@ package com.google.devtools.build.android; +import com.beust.jcommander.JCommander; +import com.beust.jcommander.Parameter; +import com.beust.jcommander.ParameterException; +import com.google.common.collect.ImmutableList; import com.google.devtools.build.android.aapt2.Aapt2Exception; import com.google.devtools.build.android.resources.JavaIdentifierValidator.InvalidJavaIdentifier; import com.google.devtools.build.lib.worker.ProtoWorkerMessageProcessor; import com.google.devtools.build.lib.worker.WorkRequestHandler; -import com.google.devtools.common.options.EnumConverter; -import com.google.devtools.common.options.Option; -import com.google.devtools.common.options.OptionDocumentationCategory; -import com.google.devtools.common.options.OptionEffectTag; -import com.google.devtools.common.options.OptionsBase; -import com.google.devtools.common.options.OptionsParser; -import com.google.devtools.common.options.OptionsParsingException; -import com.google.devtools.common.options.ShellQuotedParamsFilePreProcessor; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; +import java.nio.file.FileSystem; import java.nio.file.FileSystems; +import java.nio.file.Path; import java.time.Duration; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; @@ -124,6 +123,12 @@ void call(String[] args) throws Exception { Aapt2OptimizeAction.main(args); } }, + CONVERT_RESOURCE_ZIP_TO_APK() { + @Override + void call(String[] args) throws Exception { + ConvertResourceZipToApkAction.main(args); + } + }, MERGE_ASSETS() { @Override void call(String[] args) throws Exception { @@ -149,29 +154,27 @@ void call(String[] args) throws Exception { private static final Logger logger = Logger.getLogger(ResourceProcessorBusyBox.class.getName()); private static final Properties properties = loadSiteCustomizations(); - /** Converter for the Tool enum. */ - public static final class ToolConverter extends EnumConverter { - - public ToolConverter() { - super(Tool.class, "resource tool"); - } - } - /** Flag specifications for this action. */ - public static final class Options extends OptionsBase { - @Option( - name = "tool", - defaultValue = "null", - converter = ToolConverter.class, - category = "input", - documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, - effectTags = {OptionEffectTag.UNKNOWN}, - help = + public static final class Options { + @Parameter( + names = "--tool", + description = "The processing tool to execute. " + "Valid tools: GENERATE_BINARY_R, PARSE, " + "GENERATE_AAR, MERGE_MANIFEST, COMPILE_LIBRARY_RESOURCES, " + "LINK_STATIC_LIBRARY, AAPT2_PACKAGE, SHRINK_AAPT2, MERGE_COMPILED.") public Tool tool; + + // See https://jcommander.org/#_main_parameter + @Parameter() private List residue; + + public Options() { + this.residue = new ArrayList<>(); + } + + public List getResidue() { + return residue; + } } public static void main(String[] args) throws Exception { @@ -224,23 +227,33 @@ private static int processRequest(List args, PrintWriter pw) { } private static int processRequest(List args) throws Exception { - OptionsParser optionsParser = - OptionsParser.builder() - .optionsClasses(Options.class) - .allowResidue(true) - .argsPreProcessor(new ShellQuotedParamsFilePreProcessor(FileSystems.getDefault())) - .build(); - Options options; + Options options = new Options(); try { - optionsParser.parse(args); - options = optionsParser.getOptions(Options.class); - options.tool.call(optionsParser.getResidue().toArray(new String[0])); + JCommander jc = new JCommander(options); + // Handle arg files (start with "@") + // NOTE: JCommander handles this automatically, but enabling Main Parameter (aka "residue") + // collection seems to disable this behavior. In case that behavior changes in the future, + // we'll want to _always_ disable this, since JCommander's handling of escaped quotes in arg + // files does not interact well with how the sub-tools handle them. + jc.setExpandAtSign(false); + if (args.size() == 1 && args.get(0).startsWith("@")) { + // Use CompatShellQuotedParamsFilePreProcessor to handle the arg file. + FileSystem fs = FileSystems.getDefault(); + Path argFile = fs.getPath(args.get(0).substring(1)); + CompatShellQuotedParamsFilePreProcessor paramsFilePreProcessor = + new CompatShellQuotedParamsFilePreProcessor(fs); + args = paramsFilePreProcessor.preProcess(ImmutableList.of("@" + argFile)); + } + jc.parse(args.toArray(new String[0])); + ArrayList residue = new ArrayList<>(options.getResidue()); + + options.tool.call(residue.toArray(new String[0])); } catch (UserException e) { // UserException is for exceptions that shouldn't have stack traces recorded, including // AndroidDataMerger.MergeConflictException. logger.log(Level.SEVERE, e.getMessage()); return 1; - } catch (OptionsParsingException | IOException | Aapt2Exception | InvalidJavaIdentifier e) { + } catch (ParameterException | IOException | Aapt2Exception | InvalidJavaIdentifier e) { logSuppressed(e); throw e; } catch (Exception e) { diff --git a/third_party/BUILD b/third_party/BUILD index 5d3115cfd2bdb7..e6066647eacd3e 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -10,7 +10,6 @@ filegroup( srcs = glob(["**"]) + [ "//third_party/allocation_instrumenter:srcs", "//third_party/android_dex:srcs", - "//third_party/cncf_udpa:srcs", "//third_party/def_parser:srcs", "//third_party/googleapis:srcs", "//third_party/grpc:srcs", @@ -27,7 +26,6 @@ filegroup( "//third_party/java/proguard:srcs", "//third_party/pprof:srcs", "//third_party/protobuf:srcs", - "//third_party/protoc_gen_validate:srcs", "//third_party/py/abseil:srcs", "//third_party/py/concurrent:srcs", "//third_party/py/dataclasses:srcs", diff --git a/third_party/cncf_udpa/BUILD b/third_party/cncf_udpa/BUILD deleted file mode 100644 index 4f8067667487f9..00000000000000 --- a/third_party/cncf_udpa/BUILD +++ /dev/null @@ -1,29 +0,0 @@ -# gRPC Bazel BUILD file. -# -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -package(default_visibility = ["//visibility:public"]) - -licenses(["notice"]) # Apache v2 - -filegroup( - name = "srcs", - srcs = glob(["**/*"]), - visibility = ["//third_party:__pkg__"], -) - -exports_files([ - "cncf_udpa_0.0.1.patch", -]) diff --git a/third_party/cncf_udpa/cncf_udpa_0.0.1.patch b/third_party/cncf_udpa/cncf_udpa_0.0.1.patch deleted file mode 100644 index 72c8dc0eeeb211..00000000000000 --- a/third_party/cncf_udpa/cncf_udpa_0.0.1.patch +++ /dev/null @@ -1,158 +0,0 @@ -diff --git a/bazel/api_build_system.bzl b/bazel/api_build_system.bzl -index 9fe869a..90320a1 100644 ---- a/bazel/api_build_system.bzl -+++ b/bazel/api_build_system.bzl -@@ -1,7 +1,7 @@ --load("@com_envoyproxy_protoc_gen_validate//bazel:pgv_proto_library.bzl", "pgv_cc_proto_library") -+#load("@com_envoyproxy_protoc_gen_validate//bazel:pgv_proto_library.bzl", "pgv_cc_proto_library") - load("@com_google_protobuf//:protobuf.bzl", _py_proto_library = "py_proto_library") --load("@io_bazel_rules_go//go:def.bzl", "go_test") --load("@io_bazel_rules_go//proto:def.bzl", "go_grpc_library", "go_proto_library") -+#load("@io_bazel_rules_go//go:def.bzl", "go_test") -+#load("@io_bazel_rules_go//proto:def.bzl", "go_grpc_library", "go_proto_library") - load("@rules_proto//proto:defs.bzl", "proto_library") - - _PY_PROTO_SUFFIX = "_py_proto" -@@ -91,18 +91,18 @@ def _xds_cc_py_proto_library( - visibility = visibility, - ) - cc_proto_library_name = name + _CC_PROTO_SUFFIX -- pgv_cc_proto_library( -- name = cc_proto_library_name, -- linkstatic = linkstatic, -- cc_deps = [_cc_proto_mapping(dep) for dep in deps] + [ -- "@com_google_googleapis//google/api:http_cc_proto", -- "@com_google_googleapis//google/api:httpbody_cc_proto", -- "@com_google_googleapis//google/api:annotations_cc_proto", -- "@com_google_googleapis//google/rpc:status_cc_proto", -- ], -- deps = [relative_name], -- visibility = ["//visibility:public"], -- ) -+# pgv_cc_proto_library( -+# name = cc_proto_library_name, -+# linkstatic = linkstatic, -+# cc_deps = [_cc_proto_mapping(dep) for dep in deps] + [ -+# "@com_google_googleapis//google/api:http_cc_proto", -+# "@com_google_googleapis//google/api:httpbody_cc_proto", -+# "@com_google_googleapis//google/api:annotations_cc_proto", -+# "@com_google_googleapis//google/rpc:status_cc_proto", -+# ], -+# deps = [relative_name], -+# visibility = ["//visibility:public"], -+# ) - _xds_py_proto_library(name, srcs, deps) - - # Optionally define gRPC services -@@ -127,24 +127,24 @@ def xds_proto_package(srcs = [], deps = [], has_services = False, visibility = [ - if has_services: - compilers = ["@io_bazel_rules_go//proto:go_grpc", "//bazel:pgv_plugin_go"] - -- go_proto_library( -- name = name + _GO_PROTO_SUFFIX, -- compilers = compilers, -- importpath = _GO_IMPORTPATH_PREFIX + native.package_name(), -- proto = name, -- visibility = ["//visibility:public"], -- deps = [_go_proto_mapping(dep) for dep in deps] + [ -- "@com_envoyproxy_protoc_gen_validate//validate:go_default_library", -- "@com_github_golang_protobuf//ptypes:go_default_library_gen", -- "@go_googleapis//google/api:annotations_go_proto", -- "@go_googleapis//google/rpc:status_go_proto", -- "@io_bazel_rules_go//proto/wkt:any_go_proto", -- "@io_bazel_rules_go//proto/wkt:duration_go_proto", -- "@io_bazel_rules_go//proto/wkt:struct_go_proto", -- "@io_bazel_rules_go//proto/wkt:timestamp_go_proto", -- "@io_bazel_rules_go//proto/wkt:wrappers_go_proto", -- ], -- ) -+# go_proto_library( -+# name = name + _GO_PROTO_SUFFIX, -+# compilers = compilers, -+# importpath = _GO_IMPORTPATH_PREFIX + native.package_name(), -+# proto = name, -+# visibility = ["//visibility:public"], -+# deps = [_go_proto_mapping(dep) for dep in deps] + [ -+# "@com_envoyproxy_protoc_gen_validate//validate:go_default_library", -+# "@com_github_golang_protobuf//ptypes:go_default_library_gen", -+# "@go_googleapis//google/api:annotations_go_proto", -+# "@go_googleapis//google/rpc:status_go_proto", -+# "@io_bazel_rules_go//proto/wkt:any_go_proto", -+# "@io_bazel_rules_go//proto/wkt:duration_go_proto", -+# "@io_bazel_rules_go//proto/wkt:struct_go_proto", -+# "@io_bazel_rules_go//proto/wkt:timestamp_go_proto", -+# "@io_bazel_rules_go//proto/wkt:wrappers_go_proto", -+# ], -+# ) - - def xds_cc_test(name, **kwargs): - native.cc_test( -@@ -152,11 +152,11 @@ def xds_cc_test(name, **kwargs): - **kwargs - ) - --def xds_go_test(name, **kwargs): -- go_test( -- name = name, -- **kwargs -- ) -+#def xds_go_test(name, **kwargs): -+# go_test( -+# name = name, -+# **kwargs -+# ) - - # Old names for backward compatibility. - # TODO(roth): Remove these once all callers are migrated to the new names. -@@ -166,5 +166,5 @@ def udpa_proto_package(srcs = [], deps = [], has_services = False, visibility = - def udpa_cc_test(name, **kwargs): - xds_cc_test(name, **kwargs) - --def udpa_go_test(name, **kwargs): -- xds_go_test(name, **kwargs) -+#def udpa_go_test(name, **kwargs): -+# xds_go_test(name, **kwargs) -diff --git a/test/build/BUILD b/test/build/BUILD -index b32a1c5..c1df7d3 100644 ---- a/test/build/BUILD -+++ b/test/build/BUILD -@@ -1,4 +1,4 @@ --load("//bazel:api_build_system.bzl", "xds_cc_test", "xds_go_test") -+load("//bazel:api_build_system.bzl", "xds_cc_test") - - licenses(["notice"]) # Apache 2 - -@@ -13,18 +13,18 @@ xds_cc_test( - ], - ) - --xds_go_test( -- name = "go_build_test", -- srcs = ["go_build_test.go"], -- importpath = "go_build_test", -- deps = [ -- "//xds/data/orca/v3:pkg_go_proto", -- "//xds/service/orca/v3:pkg_go_proto", -- "//xds/type/v3:pkg_go_proto", -- # Old names for backward compatibility. -- # TODO(roth): Remove once all callers are updated to use the new names. -- "//udpa/data/orca/v1:pkg_go_proto", -- "//udpa/service/orca/v1:pkg_go_proto", -- "//udpa/type/v1:pkg_go_proto", -- ], --) -+#xds_go_test( -+# name = "go_build_test", -+# srcs = ["go_build_test.go"], -+# importpath = "go_build_test", -+# deps = [ -+# "//xds/data/orca/v3:pkg_go_proto", -+# "//xds/service/orca/v3:pkg_go_proto", -+# "//xds/type/v3:pkg_go_proto", -+# # Old names for backward compatibility. -+# # TODO(roth): Remove once all callers are updated to use the new names. -+# "//udpa/data/orca/v1:pkg_go_proto", -+# "//udpa/service/orca/v1:pkg_go_proto", -+# "//udpa/type/v1:pkg_go_proto", -+# ], -+#) \ No newline at end of file diff --git a/third_party/protoc_gen_validate/BUILD b/third_party/protoc_gen_validate/BUILD deleted file mode 100644 index f5fe829fa9e42c..00000000000000 --- a/third_party/protoc_gen_validate/BUILD +++ /dev/null @@ -1,29 +0,0 @@ -# gRPC Bazel BUILD file. -# -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -package(default_visibility = ["//visibility:public"]) - -licenses(["notice"]) # Apache v2 - -filegroup( - name = "srcs", - srcs = glob(["**/*"]), - visibility = ["//third_party:__pkg__"], -) - -exports_files([ - "protoc_gen_validate.patch", -]) diff --git a/third_party/protoc_gen_validate/protoc_gen_validate.patch b/third_party/protoc_gen_validate/protoc_gen_validate.patch deleted file mode 100644 index c748c478b3f6cd..00000000000000 --- a/third_party/protoc_gen_validate/protoc_gen_validate.patch +++ /dev/null @@ -1,88 +0,0 @@ -diff --git a/bazel/dependency_imports.bzl b/bazel/dependency_imports.bzl -index b8c8df4..d6a09a4 100644 ---- a/bazel/dependency_imports.bzl -+++ b/bazel/dependency_imports.bzl -@@ -1,6 +1,6 @@ --load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies") -+#load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies") - load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") --load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies") -+#load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies") - load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_proto_toolchains") - load("@rules_python//python:pip.bzl", "pip_install") - -@@ -13,11 +13,11 @@ def _pgv_pip_dependencies(): - ) - - def _pgv_go_dependencies(): -- go_rules_dependencies() -- go_register_toolchains( -- version = "1.15.6", -- ) -- gazelle_dependencies() -+ #go_rules_dependencies() -+ #go_register_toolchains( -+ # version = "1.15.6", -+ #) -+ #gazelle_dependencies() - - def pgv_dependency_imports(): - # Import @com_google_protobuf's dependencies. -@@ -27,7 +27,7 @@ def pgv_dependency_imports(): - _pgv_pip_dependencies() - - # Import rules for the Go compiler. -- _pgv_go_dependencies() -+ #_pgv_go_dependencies() - - # Setup rules_proto. - rules_proto_dependencies() -diff --git a/validate/BUILD b/validate/BUILD -index 5040d64..94d5e94 100644 ---- a/validate/BUILD -+++ b/validate/BUILD -@@ -1,9 +1,9 @@ - load("@com_google_protobuf//:protobuf.bzl", "py_proto_library") --load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") -+#load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") - load("@rules_cc//cc:defs.bzl", "cc_library", "cc_proto_library") - load("@rules_java//java:defs.bzl", "java_proto_library") - load("@rules_proto//proto:defs.bzl", "proto_library") --load("@io_bazel_rules_go//go:def.bzl", "go_library") -+#load("@io_bazel_rules_go//go:def.bzl", "go_library") - - package( - default_visibility = -@@ -31,22 +31,22 @@ py_proto_library( - deps = ["@com_google_protobuf//:protobuf_python"], - ) - --go_proto_library( -- name = "validate_go_proto", -- importpath = "github.com/envoyproxy/protoc-gen-validate/validate", -- proto = ":validate_proto", --) -+#go_proto_library( -+# name = "validate_go_proto", -+# importpath = "github.com/envoyproxy/protoc-gen-validate/validate", -+# proto = ":validate_proto", -+#) - - cc_library( - name = "cc_validate", - hdrs = ["validate.h"], - ) - --go_library( -- name = "validate_go", -- embed = [":validate_go_proto"], -- importpath = "github.com/envoyproxy/protoc-gen-validate/validate", --) -+#go_library( -+# name = "validate_go", -+# embed = [":validate_go_proto"], -+# importpath = "github.com/envoyproxy/protoc-gen-validate/validate", -+#) - - java_proto_library( - name = "validate_java", \ No newline at end of file