From 67a357f2b60a6e0f5002ee80cf08d4ff12410aed Mon Sep 17 00:00:00 2001 From: Nicholas Paun Date: Wed, 21 Aug 2024 09:33:01 -0700 Subject: [PATCH 1/4] Add autoformatting for Bazel files --- .github/workflows/lint.yml | 3 ++ tools/cross/format.py | 81 +++++++++++++++++++++++--------------- 2 files changed, 52 insertions(+), 32 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index ed01099314c..99774f85b8a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -24,6 +24,8 @@ jobs: chmod +x llvm.sh sudo ./llvm.sh 18 sudo apt-get install -y --no-install-recommends clang-format-18 + # buildifier won't install properly if specifying a particular version + go install github.com/bazelbuild/buildtools/buildifier@latest - name: Install pnpm uses: pnpm/action-setup@v4 # The pnpm version will be determined by the `packageManager` field in `.npmrc` @@ -38,3 +40,4 @@ jobs: python3 ./tools/cross/format.py --check env: CLANG_FORMAT: clang-format-18 + BUILDIFIER: /github/home/go/bin/buildifier diff --git a/tools/cross/format.py b/tools/cross/format.py index a30b8bfb318..8d5734eff1f 100644 --- a/tools/cross/format.py +++ b/tools/cross/format.py @@ -6,13 +6,15 @@ import shutil import subprocess from argparse import ArgumentParser, Namespace -from typing import List, Optional, Tuple, Callable +from typing import Optional, Callable +from pathlib import Path from dataclasses import dataclass CLANG_FORMAT = os.environ.get("CLANG_FORMAT", "clang-format") PRETTIER = os.environ.get("PRETTIER", "node_modules/.bin/prettier") RUFF = os.environ.get("RUFF", "ruff") +BUILDIFIER = os.environ.get("BUILDIFIER", "buildifier") def parse_args() -> Namespace: @@ -60,11 +62,16 @@ def parse_args() -> Namespace: return options -def check_clang_format() -> bool: +def check_clang_format() -> None: try: # Run clang-format with --version to check its version output = subprocess.check_output([CLANG_FORMAT, "--version"], encoding="utf-8") - major, _, _ = re.search(r"version\s*(\d+)\.(\d+)\.(\d+)", output).groups() + match = re.search(r"version\s*(\d+)\.(\d+)\.(\d+)", output) + if not match: + logging.error("unable to read clang version") + exit(1) + + major, _, _ = match.groups() if int(major) != 18: logging.error("clang-format version must be 18") exit(1) @@ -74,17 +81,21 @@ def check_clang_format() -> bool: exit(1) -def filter_files_by_exts( - files: List[str], dir_path: str, exts: Tuple[str, ...] -) -> List[str]: +def filter_files_by_globs( + files: list[Path], dir_path: Path, globs: tuple[str, ...] +) -> list[Path]: return [ file for file in files - if (dir_path == "." or file.startswith(dir_path + "/")) and file.endswith(exts) + if file.is_relative_to(dir_path) and matches_any_glob(globs, file) ] -def clang_format(files: List[str], check: bool = False) -> bool: +def matches_any_glob(globs: tuple[str, ...], file: Path) -> bool: + return any(file.match(glob) for glob in globs) + + +def clang_format(files: list[Path], check: bool = False) -> bool: cmd = [CLANG_FORMAT] if check: cmd += ["--dry-run", "--Werror"] @@ -94,17 +105,19 @@ def clang_format(files: List[str], check: bool = False) -> bool: return result.returncode == 0 -def prettier(files: List[str], check: bool = False) -> bool: - cmd = [PRETTIER, "--log-level=warn"] - if check: - cmd.append("--check") - else: - cmd.append("--write") +def prettier(files: list[Path], check: bool = False) -> bool: + cmd = [PRETTIER, "--log-level=warn", "--check" if check else "--write"] result = subprocess.run(cmd + files) return result.returncode == 0 -def ruff(files: List[str], check: bool = False) -> bool: +def buildifier(files: list[Path], check: bool = False) -> bool: + cmd = [BUILDIFIER, "--mode=check" if check else "--mode=fix"] + result = subprocess.run(cmd + files) + return result.returncode == 0 + + +def ruff(files: list[Path], check: bool = False) -> bool: if files and not shutil.which(RUFF): msg = "Cannot find ruff, will not format Python" if check: @@ -125,13 +138,13 @@ def ruff(files: List[str], check: bool = False) -> bool: def git_get_modified_files( target: str, source: Optional[str], staged: bool -) -> List[str]: +) -> list[Path]: if staged: files_in_diff = subprocess.check_output( ["git", "diff", "--diff-filter=d", "--name-only", "--cached"], encoding="utf-8", ).splitlines() - return files_in_diff + return [Path(file) for file in files_in_diff] else: merge_base = subprocess.check_output( ["git", "merge-base", target, source or "HEAD"], encoding="utf-8" @@ -141,39 +154,45 @@ def git_get_modified_files( + ([source] if source else []), encoding="utf-8", ).splitlines() - return files_in_diff + return [Path(file) for file in files_in_diff] -def git_get_all_files() -> List[str]: - return subprocess.check_output( +def git_get_all_files() -> list[Path]: + files = subprocess.check_output( ["git", "ls-files", "--cached", "--others", "--exclude-standard"], encoding="utf-8", ).splitlines() + return [Path(file) for file in files] @dataclass class FormatConfig: directory: str - extensions: Tuple[str, ...] - formatter: Callable[[List[str], bool], bool] + globs: tuple[str, ...] + formatter: Callable[[list[Path], bool], bool] FORMATTERS = [ FormatConfig( - directory="src/workerd", extensions=(".c++", ".h"), formatter=clang_format + directory="src/workerd", globs=("*.c++", "*.h"), formatter=clang_format ), FormatConfig( directory="src", - extensions=(".js", ".ts", ".cjs", ".ejs", ".mjs", ".json"), + globs=("*.js", "*.ts", "*.cjs", "*.ejs", "*.mjs"), formatter=prettier, ), - FormatConfig(directory=".", extensions=(".py",), formatter=ruff), - # TODO: lint bazel files + FormatConfig(directory="src", globs=("*.json",), formatter=prettier), + FormatConfig(directory=".", globs=("*.py",), formatter=ruff), + FormatConfig( + directory=".", + globs=("*.bzl", "*.bazel", "WORKSPACE", "BUILD", "BUILD.*"), + formatter=buildifier, + ), ] -def format(config: FormatConfig, files: List[str], check: bool) -> bool: - matching_files = filter_files_by_exts(files, config.directory, config.extensions) +def format(config: FormatConfig, files: list[Path], check: bool) -> bool: + matching_files = filter_files_by_globs(files, Path(config.directory), config.globs) if not matching_files: return True @@ -181,13 +200,11 @@ def format(config: FormatConfig, files: List[str], check: bool) -> bool: return config.formatter(matching_files, check) -def main(): +def main() -> None: options = parse_args() check_clang_format() if options.subcommand == "git": - files = set( - git_get_modified_files(options.target, options.source, options.staged) - ) + files = git_get_modified_files(options.target, options.source, options.staged) else: files = git_get_all_files() From f457f19039b82536b35659c1f9cb898a198e6cd1 Mon Sep 17 00:00:00 2001 From: Nicholas Paun Date: Wed, 21 Aug 2024 09:59:13 -0700 Subject: [PATCH 2/4] Bulk reformat all Bazel files --- BUILD.bazel | 15 +- build/BUILD.dawn | 6 +- build/BUILD.simdutf | 2 +- build/BUILD.sqlite3 | 179 +++++----- build/capnp_embed.bzl | 65 ++-- build/pyodide_bucket.bzl | 326 +++++++++--------- build/wd_cc_benchmark.bzl | 22 +- build/wd_js_bundle.bzl | 1 - build/wd_test.bzl | 1 - build/wd_ts_bundle.bzl | 3 +- build/wd_ts_test.bzl | 2 +- rust-deps/BUILD.bazel | 13 +- src/cloudflare/internal/test/ai/BUILD.bazel | 5 +- src/cloudflare/internal/test/d1/BUILD.bazel | 5 +- .../internal/test/vectorize/BUILD.bazel | 5 +- src/pyodide/BUILD.bazel | 63 ++-- src/workerd/api/BUILD.bazel | 37 +- src/workerd/api/node/BUILD.bazel | 7 +- src/workerd/io/BUILD.bazel | 4 +- src/workerd/server/BUILD.bazel | 4 +- src/workerd/server/tests/BUILD.bazel | 2 +- .../server/tests/compile-tests/BUILD.bazel | 3 +- .../server/tests/inspector/BUILD.bazel | 15 +- src/workerd/server/tests/python/BUILD.bazel | 10 +- .../server/tests/python/import_tests.bzl | 59 ++-- .../server/tests/python/py_wd_test.bzl | 11 +- src/workerd/util/BUILD.bazel | 26 +- types/BUILD.bazel | 15 +- 28 files changed, 480 insertions(+), 426 deletions(-) diff --git a/BUILD.bazel b/BUILD.bazel index fe80c1d4d5c..c5182320260 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -8,10 +8,10 @@ load("@npm//:defs.bzl", "npm_link_all_packages") cc_capnp_library( name = "icudata-embed", srcs = ["icudata-embed.capnp"], + data = ["@com_googlesource_chromium_icu//:icudata"], + defines = ["WORKERD_ICU_DATA_EMBED"], include_prefix = ".", visibility = ["//visibility:public"], - data = ["@com_googlesource_chromium_icu//:icudata"], - defines = [ "WORKERD_ICU_DATA_EMBED" ], ) npm_link_all_packages(name = "node_modules") @@ -75,10 +75,17 @@ config_setting( # Workaround for bazel not supporting negated conditions (https://github.com/bazelbuild/bazel-skylib/issues/272) selects.config_setting_group( name = "not_dbg_build", - match_any = [":fast_build", ":opt_build"], + match_any = [ + ":fast_build", + ":opt_build", + ], ) selects.config_setting_group( name = "use_dead_strip", - match_all = ["@platforms//os:macos", ":set_dead_strip", ":not_dbg_build"], + match_all = [ + "@platforms//os:macos", + ":set_dead_strip", + ":not_dbg_build", + ], ) diff --git a/build/BUILD.dawn b/build/BUILD.dawn index deb0d02b6f7..71a2f8e1684 100644 --- a/build/BUILD.dawn +++ b/build/BUILD.dawn @@ -69,7 +69,7 @@ genrule( outs = [ "include/dawn/dawn_proc_table.h", "include/dawn/webgpu.h", - "include/dawn/wire/client/webgpu.h" + "include/dawn/wire/client/webgpu.h", ], cmd = "$(location :dawn_json_generator) " + "--dawn-json $(location src/dawn/dawn.json) " + @@ -1101,12 +1101,12 @@ cc_library( "src", "src/dawn/partition_alloc", ], + visibility = ["//visibility:public"], deps = [ - "@dawn//src/tint/lang/wgsl/features", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", + "@dawn//src/tint/lang/wgsl/features", ], - visibility = ["//visibility:public"], ) cc_library( diff --git a/build/BUILD.simdutf b/build/BUILD.simdutf index 70543c8bc47..0241f06ee02 100644 --- a/build/BUILD.simdutf +++ b/build/BUILD.simdutf @@ -2,6 +2,6 @@ cc_library( name = "simdutf", srcs = ["simdutf.cpp"], hdrs = ["simdutf.h"], - visibility = ["//visibility:public"], copts = ["-w"], + visibility = ["//visibility:public"], ) diff --git a/build/BUILD.sqlite3 b/build/BUILD.sqlite3 index 31ece3c3f7b..b107aeec313 100644 --- a/build/BUILD.sqlite3 +++ b/build/BUILD.sqlite3 @@ -19,7 +19,13 @@ SQLITE_DEFINES = [ SQLITE_DEFINES_FOR_LEMON = " ".join(["-D" + x for x in SQLITE_DEFINES]) -GIVEN_SOURCES = glob(["src/**/*.h", "src/**/*.c", "ext/**/*.h", "ext/**/*.c"]) +GIVEN_SOURCES = glob([ + "src/**/*.h", + "src/**/*.c", + "ext/**/*.h", + "ext/**/*.c", +]) + GENERATED_SOURCES = [] # Regarding src/vdbe.c: In the normal SQLite build process, the given @@ -42,73 +48,79 @@ cc_binary( genrule( name = "parse_ch", - outs = [ - "parse.h", - "parse.c", - ], srcs = [ "src/parse.y", "tool/lempar.c", ], - tools = [ - ":lemon", + outs = [ + "parse.h", + "parse.c", ], cmd = ( # lemon requires lempar.c to be in the current working # directory, and parse.y has to be in a writable directory # since the output files are created adjacent to it. - "cp $(SRCS) . " + "cp $(SRCS) . " + # Creates parse.c and parse.h - + "&& $(execpath :lemon) {} parse.y ".format(SQLITE_DEFINES_FOR_LEMON) + "&& $(execpath :lemon) {} parse.y ".format(SQLITE_DEFINES_FOR_LEMON) + # Bazel expects genrule outputs to be in RULEDIR - + "&& cp parse.h parse.c $(RULEDIR)" + "&& cp parse.h parse.c $(RULEDIR)" ), + tools = [ + ":lemon", + ], ) -GENERATED_SOURCES += ["parse.h", "parse.c"] + +GENERATED_SOURCES += [ + "parse.h", + "parse.c", +] # ======================================================================== # Constructs fts5parse.{c,h} using the Lemon parser generator. genrule( name = "fts5parse_ch", - outs = [ - "fts5parse.h", - "fts5parse.c", - ], srcs = [ "ext/fts5/fts5parse.y", "tool/lempar.c", ], - tools = [ - ":lemon", + outs = [ + "fts5parse.h", + "fts5parse.c", ], cmd = ( # Same as :parse_ch - "cp $(SRCS) . " - + "&& $(execpath :lemon) {} fts5parse.y ".format(SQLITE_DEFINES_FOR_LEMON) + "cp $(SRCS) . " + + "&& $(execpath :lemon) {} fts5parse.y ".format(SQLITE_DEFINES_FOR_LEMON) + # Bazel expects genrule outputs to be in RULEDIR - + "&& cp fts5parse.h fts5parse.c $(RULEDIR)" + "&& cp fts5parse.h fts5parse.c $(RULEDIR)" ), + tools = [ + ":lemon", + ], ) -GENERATED_SOURCES += ["fts5parse.h", "fts5parse.c"] + +GENERATED_SOURCES += [ + "fts5parse.h", + "fts5parse.c", +] # ======================================================================== # Constructs fts5.{c,h}. "FTS5" is version 5 of Full Text Search. filegroup( name = "fts5_sources", - srcs = glob(["ext/fts5/*.h", "ext/fts5/*.c"]), + srcs = glob([ + "ext/fts5/*.h", + "ext/fts5/*.c", + ]), ) - genrule( name = "fts5_ch", - outs = [ - "fts5.h", - "fts5.c", - ], srcs = [ "fts5parse.h", "fts5parse.c", @@ -116,35 +128,40 @@ genrule( "manifest.uuid", ":fts5_sources", ], - tools = [ - "ext/fts5/tool/mkfts5c.tcl", + outs = [ + "fts5.h", + "fts5.c", ], cmd = ( - "mkdir -p $(RULEDIR)/build/ext/fts5/tool " + "mkdir -p $(RULEDIR)/build/ext/fts5/tool " + # Copy all the inputs over so the directory structure is to # mkfts5c.tcl's liking. This ends up putting everything not in # :fts5_sources in there twice, once in build/ and once in # build/ext/fts5/, but it doesn't hurt anything. :shrug: - + "&& cp $(SRCS) $(RULEDIR)/build/ext/fts5 " - + "&& cp $(location manifest) $(RULEDIR)/build " - + "&& cp $(location manifest.uuid) $(RULEDIR)/build " - + "&& cp $(location fts5parse.h) $(RULEDIR)/build " - + "&& cp $(location fts5parse.c) $(RULEDIR)/build " - + "&& cp $(location ext/fts5/tool/mkfts5c.tcl) $(RULEDIR)/build/ext/fts5/tool " + "&& cp $(SRCS) $(RULEDIR)/build/ext/fts5 " + + "&& cp $(location manifest) $(RULEDIR)/build " + + "&& cp $(location manifest.uuid) $(RULEDIR)/build " + + "&& cp $(location fts5parse.h) $(RULEDIR)/build " + + "&& cp $(location fts5parse.c) $(RULEDIR)/build " + + "&& cp $(location ext/fts5/tool/mkfts5c.tcl) $(RULEDIR)/build/ext/fts5/tool " + # Okay, go. - + "&& pushd $(RULEDIR)/build >/dev/null " - + "&& tclsh ext/fts5/tool/mkfts5c.tcl " - + "&& popd >/dev/null " + "&& pushd $(RULEDIR)/build >/dev/null " + + "&& tclsh ext/fts5/tool/mkfts5c.tcl " + + "&& popd >/dev/null " + # Put the outputs where Bazel will see them. - + "&& mv $(RULEDIR)/build/fts5.c $(RULEDIR)/build/ext/fts5/fts5.h $(RULEDIR) " + "&& mv $(RULEDIR)/build/fts5.c $(RULEDIR)/build/ext/fts5/fts5.h $(RULEDIR) " + # Done. Clean up after ourselves. - + "&& rm -r $(RULEDIR)/build" + "&& rm -r $(RULEDIR)/build" ), + tools = [ + "ext/fts5/tool/mkfts5c.tcl", + ], ) + GENERATED_SOURCES += ["fts5.c"] # ======================================================================== @@ -157,20 +174,21 @@ genrule( "src/vdbe.c", ], outs = ["opcodes.h"], - tools = ["tool/mkopcodeh.tcl"], cmd = "cat $(location parse.h) $(location src/vdbe.c) | tclsh $(location tool/mkopcodeh.tcl) > $(RULEDIR)/opcodes.h", + tools = ["tool/mkopcodeh.tcl"], ) + GENERATED_SOURCES += ["opcodes.h"] genrule( name = "opcodes_c", srcs = ["opcodes.h"], outs = ["opcodes.c"], - tools = ["tool/mkopcodec.tcl"], cmd = "tclsh $(location tool/mkopcodec.tcl) $(location opcodes.h) > $(RULEDIR)/opcodes.c", + tools = ["tool/mkopcodec.tcl"], ) -GENERATED_SOURCES += ["opcodes.c"] +GENERATED_SOURCES += ["opcodes.c"] cc_binary( name = "mkkeywordhash", @@ -180,10 +198,11 @@ cc_binary( genrule( name = "keywordhash_h", - tools = [":mkkeywordhash"], outs = ["keywordhash.h"], cmd = "$(execpath :mkkeywordhash) > $(RULEDIR)/keywordhash.h", + tools = [":mkkeywordhash"], ) + GENERATED_SOURCES += ["keywordhash.h"] # ======================================================================== @@ -197,11 +216,6 @@ cc_binary( genrule( name = "sqlite3_h", - tools = [ - ":mksourceid", - "tool/mksqlite3h.tcl", - ], - outs = ["sqlite3.h"], srcs = [ # The first few dependencies come from the "sqlite3.h" target # in main.mk in the SQLite source distribution. @@ -216,6 +230,7 @@ genrule( "ext/session/sqlite3session.h", "ext/fts5/fts5.h", ], + outs = ["sqlite3.h"], # mksqlite3h.tcl expects to run in a directory with a very # particular structure, so we have to set that up for it. @@ -223,29 +238,34 @@ genrule( # We use $(RULEDIR)/build since RULEDIR is guaranteed to be # writable. cmd = ( - "mkdir -p $(RULEDIR)/build/src $(RULEDIR)/build/ext/rtree $(RULEDIR)/build/ext/session $(RULEDIR)/build/ext/fts5 " + "mkdir -p $(RULEDIR)/build/src $(RULEDIR)/build/ext/rtree $(RULEDIR)/build/ext/session $(RULEDIR)/build/ext/fts5 " + # TODO(cleanup): come up with a less-repetitive way to do this. - + "&& cp $(location tool/mksqlite3h.tcl) $(RULEDIR)/build " - + "&& cp $(location src/sqlite.h.in) $(RULEDIR)/build/src/sqlite.h.in " - + "&& cp $(location manifest) $(RULEDIR)/build/manifest " - + "&& cp $(location VERSION) $(RULEDIR)/build/VERSION " - + "&& cp $(location ext/rtree/sqlite3rtree.h) $(RULEDIR)/build/ext/rtree/sqlite3rtree.h " - + "&& cp $(location ext/session/sqlite3session.h) $(RULEDIR)/build/ext/session/sqlite3session.h " - + "&& cp $(location ext/fts5/fts5.h) $(RULEDIR)/build/ext/fts5/fts5.h " + "&& cp $(location tool/mksqlite3h.tcl) $(RULEDIR)/build " + + "&& cp $(location src/sqlite.h.in) $(RULEDIR)/build/src/sqlite.h.in " + + "&& cp $(location manifest) $(RULEDIR)/build/manifest " + + "&& cp $(location VERSION) $(RULEDIR)/build/VERSION " + + "&& cp $(location ext/rtree/sqlite3rtree.h) $(RULEDIR)/build/ext/rtree/sqlite3rtree.h " + + "&& cp $(location ext/session/sqlite3session.h) $(RULEDIR)/build/ext/session/sqlite3session.h " + + "&& cp $(location ext/fts5/fts5.h) $(RULEDIR)/build/ext/fts5/fts5.h " + # It also expects to invoke mksourceid. - + "&& cp $(execpath :mksourceid) $(RULEDIR)/build/mksourceid " + "&& cp $(execpath :mksourceid) $(RULEDIR)/build/mksourceid " + # Okay, go. - + "&& pushd $(RULEDIR)/build >/dev/null " - + "&& tclsh mksqlite3h.tcl . > ../sqlite3.h " - + "&& popd >/dev/null " + "&& pushd $(RULEDIR)/build >/dev/null " + + "&& tclsh mksqlite3h.tcl . > ../sqlite3.h " + + "&& popd >/dev/null " + # Done. Clean up after ourselves. - + "&& rm -r $(RULEDIR)/build" + "&& rm -r $(RULEDIR)/build" ), + tools = [ + "tool/mksqlite3h.tcl", + ":mksourceid", + ], ) + GENERATED_SOURCES += ["sqlite3.h"] # ======================================================================== @@ -254,14 +274,11 @@ GENERATED_SOURCES += ["sqlite3.h"] genrule( name = "amalgamation", - tools = [ - "tool/mksqlite3c.tcl", - ], + srcs = GIVEN_SOURCES + GENERATED_SOURCES, outs = [ "sqlite3.c", "sqlite3ext.h", ], - srcs = GIVEN_SOURCES + GENERATED_SOURCES, # mksqlite3c.tcl expects to run in a directory with a very # particular structure, so we have to set that up for it. @@ -270,38 +287,44 @@ genrule( # writable. ("tsrc" matches the directory name in the SQLite # Makefile.) cmd = ( - "mkdir -p $(RULEDIR)/build/tsrc $(RULEDIR)/build/tool " + "mkdir -p $(RULEDIR)/build/tsrc $(RULEDIR)/build/tool " + # Copy everything in. There's no subdirectories; everything # but mksqlite3c.tcl just goes in tsrc. - + "&& cp $(SRCS) $(RULEDIR)/build/tsrc " - + "&& cp $(location tool/mksqlite3c.tcl) $(RULEDIR)/build/tool " + "&& cp $(SRCS) $(RULEDIR)/build/tsrc " + + "&& cp $(location tool/mksqlite3c.tcl) $(RULEDIR)/build/tool " + # Build the thing. # # This step ("make sqlite3c") is where the SQLite Makefile # would also construct tclsqlite3.c, but we don't use that # file, so we don't bother building it. - + "&& pushd $(RULEDIR)/build >/dev/null " - + "&& tclsh tool/mksqlite3c.tcl " - + "&& popd >/dev/null " + "&& pushd $(RULEDIR)/build >/dev/null " + + "&& tclsh tool/mksqlite3c.tcl " + + "&& popd >/dev/null " + # Copy the outputs somewhere that Bazel will find them. - + "&& cp $(RULEDIR)/build/sqlite3.c $(RULEDIR)/build/tsrc/sqlite3ext.h $(RULEDIR)/ " + "&& cp $(RULEDIR)/build/sqlite3.c $(RULEDIR)/build/tsrc/sqlite3ext.h $(RULEDIR)/ " + # Done. Clean up after ourselves. - + "&& rm -r $(RULEDIR)/build" + "&& rm -r $(RULEDIR)/build" ), + tools = [ + "tool/mksqlite3c.tcl", + ], ) # ======================================================================== # Actually builds the library. cc_library( name = "sqlite3", - hdrs = ["sqlite3.h", "sqlite3ext.h"], srcs = ["sqlite3.c"], - visibility = ["//visibility:public"], - include_prefix = ".", + hdrs = [ + "sqlite3.h", + "sqlite3ext.h", + ], copts = ["-w"], # Ignore all warnings. This is not our code, we can't fix the warnings. defines = SQLITE_DEFINES, + include_prefix = ".", + visibility = ["//visibility:public"], ) diff --git a/build/capnp_embed.bzl b/build/capnp_embed.bzl index 71d0522672d..6aba244af84 100644 --- a/build/capnp_embed.bzl +++ b/build/capnp_embed.bzl @@ -3,42 +3,41 @@ load("@capnp-cpp//src/capnp:cc_capnp_library.bzl", "capnp_provider") def _capnp_embed_impl(ctx): - return [ - capnp_provider( - includes = [ctx.file.src.dirname], - inputs = [ctx.file.src], - src_prefix = "", - ) - ] + return [ + capnp_provider( + includes = [ctx.file.src.dirname], + inputs = [ctx.file.src], + src_prefix = "", + ), + ] _capnp_embed = rule( - attrs = { - "src": attr.label(allow_single_file = True), - "deps": attr.label_list(), - }, - implementation = _capnp_embed_impl + attrs = { + "src": attr.label(allow_single_file = True), + "deps": attr.label_list(), + }, + implementation = _capnp_embed_impl, ) def capnp_embed( - name, - src, - visibility = None, - target_compatible_with = None, - deps = [], -): - """ - Bazel rule to include `src` in a Cap'n Proto search path for embedding. + name, + src, + visibility = None, + target_compatible_with = None, + deps = []): + """ + Bazel rule to include `src` in a Cap'n Proto search path for embedding. - This is useful for including embedding the output of a `genrule` in a Cap'n Proto schema. - The generated target should be included in `cc_capnp_library` `deps`. - """ - _capnp_embed( - name = name + "_gen", - src = src, - visibility = visibility, - target_compatible_with = target_compatible_with, - deps = deps - ) - native.cc_library( - name = name - ) + This is useful for including embedding the output of a `genrule` in a Cap'n Proto schema. + The generated target should be included in `cc_capnp_library` `deps`. + """ + _capnp_embed( + name = name + "_gen", + src = src, + visibility = visibility, + target_compatible_with = target_compatible_with, + deps = deps, + ) + native.cc_library( + name = name, + ) diff --git a/build/pyodide_bucket.bzl b/build/pyodide_bucket.bzl index 83e4079f568..065542e091d 100644 --- a/build/pyodide_bucket.bzl +++ b/build/pyodide_bucket.bzl @@ -9,167 +9,167 @@ PYODIDE_ALL_WHEELS_ZIP_SHA256 = "c17feb45fdcb4b41eab9c719e69c9e062a8fc88344fcb6b # IMPORTANT: when updating this file in git, check the diff to make sure none of the imports below are being removed unexpectedly PYODIDE_IMPORTS_TO_TEST = { - "aiohttp": [ - "aiohttp" - ], - "aiosignal": [ - "aiosignal" - ], - "annotated-types": [ - "annotated_types" - ], - "anyio": [ - "anyio" - ], - "async-timeout": [ - "async_timeout" - ], - "attrs": [ - "attr", - "attrs" - ], - "certifi": [ - "certifi" - ], - "charset-normalizer": [ - "charset_normalizer" - ], - "distro": [ - "distro" - ], - "fastapi": [ - "fastapi" - ], - "frozenlist": [ - "frozenlist" - ], - "h11": [ - "h11" - ], - "hashlib": [ - "_hashlib" - ], - "httpcore": [ - "httpcore" - ], - "httpx": [ - "httpx" - ], - "idna": [ - "idna" - ], - "jsonpatch": [ - "jsonpatch" - ], - "jsonpointer": [ - "jsonpointer" - ], - "langchain": [ - "langchain" - ], - "langchain-core": [ - "langchain_core", - "langchain_core.callbacks", - "langchain_core.language_models.llms", - "langchain_core.output_parsers", - "langchain_core.prompts" - ], - "langchain_openai": [ - "langchain_openai", - "langchain_openai.chat_models.base" - ], - "langsmith": [ - "langsmith", - "langsmith.client" - ], - "lzma": [ - "_lzma", - "lzma" - ], - "micropip": [ - "micropip" - ], - "multidict": [ - "multidict" - ], - "numpy": [ - "numpy" - ], - "openai": [ - "openai" - ], - "packaging": [ - "packaging" - ], - "pydantic": [ - "pydantic" - ], - "pydantic_core": [ - "pydantic_core" - ], - "pydecimal": [ - "_pydecimal" - ], - "pydoc_data": [ - "pydoc_data" - ], - "pyyaml": [ - "_yaml", - "yaml" - ], - "regex": [ - "regex" - ], - "requests": [ - "requests" - ], - "six": [ - "six" - ], - "sniffio": [ - "sniffio" - ], - "sqlite3": [ - "_sqlite3", - "sqlite3" - ], - "ssl": [ - "_ssl", - "ssl" - ], - "starlette": [ - "starlette", - "starlette.applications", - "starlette.authentication", - "starlette.background", - "starlette.concurrency", - "starlette.config", - "starlette.convertors", - "starlette.datastructures", - "starlette.endpoints", - "starlette.exceptions", - "starlette.formparsers", - "starlette.middleware", - "starlette.middleware.base", - "starlette.requests", - "starlette.responses", - "starlette.routing", - "starlette.schemas" - ], - "tenacity": [ - "tenacity" - ], - "tiktoken": [ - "tiktoken", - "tiktoken_ext" - ], - "typing-extensions": [ - "typing_extensions" - ], - "urllib3": [ - "urllib3" - ], - "yarl": [ - "yarl" - ] + "aiohttp": [ + "aiohttp", + ], + "aiosignal": [ + "aiosignal", + ], + "annotated-types": [ + "annotated_types", + ], + "anyio": [ + "anyio", + ], + "async-timeout": [ + "async_timeout", + ], + "attrs": [ + "attr", + "attrs", + ], + "certifi": [ + "certifi", + ], + "charset-normalizer": [ + "charset_normalizer", + ], + "distro": [ + "distro", + ], + "fastapi": [ + "fastapi", + ], + "frozenlist": [ + "frozenlist", + ], + "h11": [ + "h11", + ], + "hashlib": [ + "_hashlib", + ], + "httpcore": [ + "httpcore", + ], + "httpx": [ + "httpx", + ], + "idna": [ + "idna", + ], + "jsonpatch": [ + "jsonpatch", + ], + "jsonpointer": [ + "jsonpointer", + ], + "langchain": [ + "langchain", + ], + "langchain-core": [ + "langchain_core", + "langchain_core.callbacks", + "langchain_core.language_models.llms", + "langchain_core.output_parsers", + "langchain_core.prompts", + ], + "langchain_openai": [ + "langchain_openai", + "langchain_openai.chat_models.base", + ], + "langsmith": [ + "langsmith", + "langsmith.client", + ], + "lzma": [ + "_lzma", + "lzma", + ], + "micropip": [ + "micropip", + ], + "multidict": [ + "multidict", + ], + "numpy": [ + "numpy", + ], + "openai": [ + "openai", + ], + "packaging": [ + "packaging", + ], + "pydantic": [ + "pydantic", + ], + "pydantic_core": [ + "pydantic_core", + ], + "pydecimal": [ + "_pydecimal", + ], + "pydoc_data": [ + "pydoc_data", + ], + "pyyaml": [ + "_yaml", + "yaml", + ], + "regex": [ + "regex", + ], + "requests": [ + "requests", + ], + "six": [ + "six", + ], + "sniffio": [ + "sniffio", + ], + "sqlite3": [ + "_sqlite3", + "sqlite3", + ], + "ssl": [ + "_ssl", + "ssl", + ], + "starlette": [ + "starlette", + "starlette.applications", + "starlette.authentication", + "starlette.background", + "starlette.concurrency", + "starlette.config", + "starlette.convertors", + "starlette.datastructures", + "starlette.endpoints", + "starlette.exceptions", + "starlette.formparsers", + "starlette.middleware", + "starlette.middleware.base", + "starlette.requests", + "starlette.responses", + "starlette.routing", + "starlette.schemas", + ], + "tenacity": [ + "tenacity", + ], + "tiktoken": [ + "tiktoken", + "tiktoken_ext", + ], + "typing-extensions": [ + "typing_extensions", + ], + "urllib3": [ + "urllib3", + ], + "yarl": [ + "yarl", + ], } diff --git a/build/wd_cc_benchmark.bzl b/build/wd_cc_benchmark.bzl index 1a409fbd733..f0119f3b1f1 100644 --- a/build/wd_cc_benchmark.bzl +++ b/build/wd_cc_benchmark.bzl @@ -16,17 +16,17 @@ def wd_cc_benchmark( # bazel does not support shared linkage on macOS and it is broken on Windows, so only # enable this on Linux. linkstatic = select({ - "@platforms//os:linux": 0, - "//conditions:default": 1, + "@platforms//os:linux": 0, + "//conditions:default": 1, }), linkopts = linkopts + select({ - "@//:use_dead_strip": ["-Wl,-dead_strip", "-Wl,-no_exported_symbols"], - "//conditions:default": [""], + "@//:use_dead_strip": ["-Wl,-dead_strip", "-Wl,-no_exported_symbols"], + "//conditions:default": [""], }), visibility = visibility, deps = deps + [ - "@com_google_benchmark//:benchmark_main", - "//src/workerd/tests:bench-tools" + "@com_google_benchmark//:benchmark_main", + "//src/workerd/tests:bench-tools", ], # use the same malloc we use for server malloc = "//src/workerd/server:malloc", @@ -36,9 +36,9 @@ def wd_cc_benchmark( # generate benchmark report native.genrule( - name = name + "@benchmark.csv", - outs = [name + ".benchmark.csv"], - srcs = [name], - cmd = "./$(location {}) --benchmark_format=csv > \"$@\"".format(name), - tags = ["off-by-default", "benchmark_report"], + name = name + "@benchmark.csv", + outs = [name + ".benchmark.csv"], + srcs = [name], + cmd = "./$(location {}) --benchmark_format=csv > \"$@\"".format(name), + tags = ["off-by-default", "benchmark_report"], ) diff --git a/build/wd_js_bundle.bzl b/build/wd_js_bundle.bzl index 30f369a5073..a97f612c753 100644 --- a/build/wd_js_bundle.bzl +++ b/build/wd_js_bundle.bzl @@ -216,7 +216,6 @@ def wd_js_bundle_capnp( ) return data - def wd_js_bundle(name, import_name, *args, **kwargs): data = wd_js_bundle_capnp(name + ".capnp", import_name, *args, **kwargs) cc_capnp_library( diff --git a/build/wd_test.bzl b/build/wd_test.bzl index 8a08a32c209..a182acd9524 100644 --- a/build/wd_test.bzl +++ b/build/wd_test.bzl @@ -47,7 +47,6 @@ def wd_test( "$(location {})".format(src), ] + args - _wd_test( name = name, data = data, diff --git a/build/wd_ts_bundle.bzl b/build/wd_ts_bundle.bzl index cd8903a7faf..0f35cd2975a 100644 --- a/build/wd_ts_bundle.bzl +++ b/build/wd_ts_bundle.bzl @@ -1,7 +1,7 @@ load("@aspect_rules_ts//ts:defs.bzl", "ts_config", "ts_project") +load("@capnp-cpp//src/capnp:cc_capnp_library.bzl", "cc_capnp_library") load("@npm//:eslint/package_json.bzl", eslint_bin = "bin") load("@workerd//:build/wd_js_bundle.bzl", "wd_js_bundle_capnp") -load("@capnp-cpp//src/capnp:cc_capnp_library.bzl", "cc_capnp_library") def _to_js(file_name): if file_name.endswith(".ts"): @@ -100,7 +100,6 @@ def wd_ts_bundle_capnp( ) return data - def wd_ts_bundle(name, import_name, *args, **kwargs): data = wd_ts_bundle_capnp(name + ".capnp", import_name, *args, **kwargs) cc_capnp_library( diff --git a/build/wd_ts_test.bzl b/build/wd_ts_test.bzl index b6d96bc8234..c1cae449d77 100644 --- a/build/wd_ts_test.bzl +++ b/build/wd_ts_test.bzl @@ -1,6 +1,6 @@ load("@aspect_rules_js//js:defs.bzl", "js_test") -load("//:build/wd_ts_project.bzl", "wd_ts_project") load("//:build/typescript.bzl", "js_name", "module_name") +load("//:build/wd_ts_project.bzl", "wd_ts_project") def wd_ts_test(src, deps = [], **kwargs): """Bazel rule to compile and run a TypeScript test""" diff --git a/rust-deps/BUILD.bazel b/rust-deps/BUILD.bazel index 07ba20ff357..9df61dc83d3 100644 --- a/rust-deps/BUILD.bazel +++ b/rust-deps/BUILD.bazel @@ -10,6 +10,7 @@ selects.config_setting_group( "@platforms//cpu:x86_64", ], ) + selects.config_setting_group( name = "linux_arm64", match_all = [ @@ -17,6 +18,7 @@ selects.config_setting_group( "@platforms//cpu:aarch64", ], ) + selects.config_setting_group( name = "macos_x64", match_all = [ @@ -24,6 +26,7 @@ selects.config_setting_group( "@platforms//cpu:x86_64", ], ) + selects.config_setting_group( name = "macos_arm64", match_all = [ @@ -31,6 +34,7 @@ selects.config_setting_group( "@platforms//cpu:aarch64", ], ) + selects.config_setting_group( name = "win_x64", match_all = [ @@ -38,6 +42,7 @@ selects.config_setting_group( "@platforms//cpu:x86_64", ], ) + CARGO_BAZEL = select({ ":linux_x64": "@cargo_bazel_linux_x64//file:downloaded", ":linux_arm64": "@cargo_bazel_linux_arm64//file:downloaded", @@ -54,6 +59,8 @@ crates_vendor( cargo_bazel = CARGO_BAZEL, mode = "remote", packages = PACKAGES, + # Not needed, we have a well-defined set of supported platforms + render_config = render_config(generate_target_compatible_with = False), supported_platform_triples = [ "aarch64-apple-darwin", "x86_64-apple-darwin", @@ -61,8 +68,6 @@ crates_vendor( "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc", ], - # Not needed, we have a well-defined set of supported platforms - render_config = render_config(generate_target_compatible_with = False), ) # TODO(cleanup): Switching to rust_library() would reduce the overhead of this by not creating a @@ -70,12 +75,12 @@ crates_vendor( rust_static_library( name = "rust-deps", srcs = ["src/lib.rs"], - stamp = -1, # default to bazel --stamp flag # When stamping is enabled this will be replaced by the corresponding # value in ./bazel-out/volatile-status.txt rustc_env = { - "WORKERD_VERSION": "{WORKERD_VERSION}" + "WORKERD_VERSION": "{WORKERD_VERSION}", }, + stamp = -1, # default to bazel --stamp flag visibility = ["//visibility:public"], deps = [ # On Windows, CXX is broken under Bazel (https://github.com/dtolnay/cxx/pull/125). diff --git a/src/cloudflare/internal/test/ai/BUILD.bazel b/src/cloudflare/internal/test/ai/BUILD.bazel index 4f1d267c545..d8613a14a23 100644 --- a/src/cloudflare/internal/test/ai/BUILD.bazel +++ b/src/cloudflare/internal/test/ai/BUILD.bazel @@ -9,5 +9,8 @@ wd_test( wd_test( src = "python-ai-api-test.wd-test", args = ["--experimental"], - data = glob(["*.js", "*.py"]), + data = glob([ + "*.js", + "*.py", + ]), ) diff --git a/src/cloudflare/internal/test/d1/BUILD.bazel b/src/cloudflare/internal/test/d1/BUILD.bazel index 32dc957cefd..702e399248f 100644 --- a/src/cloudflare/internal/test/d1/BUILD.bazel +++ b/src/cloudflare/internal/test/d1/BUILD.bazel @@ -9,5 +9,8 @@ wd_test( wd_test( src = "python-d1-api-test.wd-test", args = ["--experimental"], - data = glob(["*.py", "*.js"]), + data = glob([ + "*.py", + "*.js", + ]), ) diff --git a/src/cloudflare/internal/test/vectorize/BUILD.bazel b/src/cloudflare/internal/test/vectorize/BUILD.bazel index d16bb9c4be1..d64efdcc987 100644 --- a/src/cloudflare/internal/test/vectorize/BUILD.bazel +++ b/src/cloudflare/internal/test/vectorize/BUILD.bazel @@ -7,5 +7,8 @@ wd_test( wd_test( src = "python-vectorize-api-test.wd-test", - data = glob(["*.py", "*.js"]), + data = glob([ + "*.py", + "*.js", + ]), ) diff --git a/src/pyodide/BUILD.bazel b/src/pyodide/BUILD.bazel index d68bec68543..71fc7b9ec16 100644 --- a/src/pyodide/BUILD.bazel +++ b/src/pyodide/BUILD.bazel @@ -1,8 +1,9 @@ load("@bazel_skylib//rules:copy_file.bzl", "copy_file") -load("@bazel_skylib//rules:write_file.bzl", "write_file") load("@bazel_skylib//rules:expand_template.bzl", "expand_template") +load("@bazel_skylib//rules:write_file.bzl", "write_file") load("@capnp-cpp//src/capnp:cc_capnp_library.bzl", "cc_capnp_library") load("//:build/capnp_embed.bzl", "capnp_embed") +load("//:build/pyodide_bucket.bzl", "PYODIDE_PACKAGE_BUCKET_URL") load("//:build/wd_ts_bundle.bzl", "wd_ts_bundle_capnp") copy_file( @@ -20,7 +21,7 @@ capnp_embed( copy_file( name = "python_entrypoint_file", src = "python-entrypoint.js", - out = "generated/python-entrypoint.js" + out = "generated/python-entrypoint.js", ) capnp_embed( @@ -53,9 +54,9 @@ cc_capnp_library( visibility = ["//visibility:public"], deps = [ ":pyodide_extra_file_embed", + ":pyodide_lock_file_embed", ":pyodide_packages_archive_embed", ":python_entrypoint_file_embed", - ":pyodide_lock_file_embed" ], ) @@ -119,16 +120,14 @@ REPLACEMENTS = [ ], [ "reportUndefinedSymbols()", - "reportUndefinedSymbolsNoOp()" + "reportUndefinedSymbolsNoOp()", ], [ "crypto.getRandomValues(", "getRandomValues(Module, ", - ] + ], ] -load("//:build/pyodide_bucket.bzl", "PYODIDE_PACKAGE_BUCKET_URL") - PYODIDE_BUCKET_MODULE = json.encode({ "PYODIDE_PACKAGE_BUCKET_URL": PYODIDE_PACKAGE_BUCKET_URL, }) @@ -148,29 +147,31 @@ expand_template( data = wd_ts_bundle_capnp( name = "pyodide.capnp", - modules = ["python-entrypoint-helper.ts"], + eslintrc_json = ".eslintrc.json", import_name = "pyodide", internal_data_modules = ["generated/python_stdlib.zip"] + glob([ "internal/*.py", "internal/patches/*.py", "internal/topLevelEntropy/*.py", ]), - internal_json_modules = ["generated/pyodide-lock.json", "generated/pyodide-bucket.json"], + internal_json_modules = [ + "generated/pyodide-lock.json", + "generated/pyodide-bucket.json", + ], internal_modules = [ "generated/pyodide.asm.js", - ] + glob([ - "internal/*.ts", - "internal/*.js", - "internal/topLevelEntropy/*.ts", - "internal/topLevelEntropy/*.js", - "types/*.ts", - "types/*/*.ts", - ], allow_empty = True), + ] + glob( + [ + "internal/*.ts", + "internal/*.js", + "internal/topLevelEntropy/*.ts", + "internal/topLevelEntropy/*.js", + "types/*.ts", + "types/*/*.ts", + ], + allow_empty = True, + ), internal_wasm_modules = ["generated/pyodide.asm.wasm"], - schema_id = "0xbcc8f57c63814005", - tsconfig_json = "tsconfig.json", - eslintrc_json = ".eslintrc.json", - lint = False, js_deps = [ "pyodide.asm.js@rule", "pyodide.asm.wasm@rule", @@ -178,25 +179,29 @@ data = wd_ts_bundle_capnp( "python_stdlib.zip@rule", "pyodide-bucket.json@rule", ], + lint = False, + modules = ["python-entrypoint-helper.ts"], + schema_id = "0xbcc8f57c63814005", + tsconfig_json = "tsconfig.json", ) cc_capnp_library( name = "pyodide", srcs = ["pyodide.capnp"], + data = data, + include_prefix = "pyodide", strip_include_prefix = "", visibility = ["//visibility:public"], - data = data, deps = ["@workerd//src/workerd/jsg:modules_capnp"], - include_prefix = "pyodide", ) - genrule( name = "pyodide.capnp.bin@rule", - tools = ["@capnp-cpp//src/capnp:capnp_tool"], - srcs = ["pyodide.capnp", "//src/workerd/jsg:modules.capnp"] + data, + srcs = [ + "pyodide.capnp", + "//src/workerd/jsg:modules.capnp", + ] + data, outs = ["pyodide.capnp.bin"], - visibility = ["//visibility:public"], cmd = " ".join([ # Annoying logic to deal with different paths in workerd vs downstream. # Either need "-I src" in workerd or -I external/workerd/src downstream @@ -208,5 +213,7 @@ genrule( "-I $$INCLUDE", "-o binary", "> $@", - ]) + ]), + tools = ["@capnp-cpp//src/capnp:capnp_tool"], + visibility = ["//visibility:public"], ) diff --git a/src/workerd/api/BUILD.bazel b/src/workerd/api/BUILD.bazel index d07a35cb498..a742ec1c324 100644 --- a/src/workerd/api/BUILD.bazel +++ b/src/workerd/api/BUILD.bazel @@ -71,10 +71,10 @@ wd_cc_library( ":html-rewriter", "//src/pyodide", "//src/pyodide:pyodide_extra_capnp", + "//src/workerd/api/node", "//src/workerd/io", "//src/workerd/jsg:rtti", "//src/workerd/server:workerd_capnp", - "//src/workerd/api/node", ], ) @@ -146,19 +146,21 @@ wd_cc_capnp_library( visibility = ["//visibility:public"], ) -[kj_test( - src = f, - deps = [ - "//src/workerd/io", - ], -) for f in [ - "actor-state-test.c++", - "basics-test.c++", - "crypto/aes-test.c++", - "crypto/impl-test.c++", - "streams/queue-test.c++", - "streams/standard-test.c++", - "util-test.c++", +[ + kj_test( + src = f, + deps = [ + "//src/workerd/io", + ], + ) + for f in [ + "actor-state-test.c++", + "basics-test.c++", + "crypto/aes-test.c++", + "crypto/impl-test.c++", + "streams/queue-test.c++", + "streams/standard-test.c++", + "util-test.c++", ] ] @@ -219,14 +221,17 @@ kj_test( wd_test( src = "tests/js-rpc-socket-test.wd-test", - args = ["--experimental", "--no-verbose"], + args = [ + "--experimental", + "--no-verbose", + ], data = ["tests/js-rpc-test.js"], ) [wd_test( src = f, - data = [f.removesuffix(".ts-wd-test") + ".ts"], args = ["--experimental"], + data = [f.removesuffix(".ts-wd-test") + ".ts"], ) for f in glob( ["**/*.ts-wd-test"], )] diff --git a/src/workerd/api/node/BUILD.bazel b/src/workerd/api/node/BUILD.bazel index ede2ac760f5..0be107d8faf 100644 --- a/src/workerd/api/node/BUILD.bazel +++ b/src/workerd/api/node/BUILD.bazel @@ -4,12 +4,15 @@ load("//:build/wd_test.bzl", "wd_test") wd_cc_library( name = "node", - srcs = glob(["**/*.c++"], exclude = ["**/*-test.c++"]), + srcs = glob( + ["**/*.c++"], + exclude = ["**/*-test.c++"], + ), hdrs = glob(["**/*.h"]), implementation_deps = [ "@capnp-cpp//src/kj/compat:kj-gzip", - "@simdutf", "@nbytes", + "@simdutf", ], visibility = ["//visibility:public"], deps = [ diff --git a/src/workerd/io/BUILD.bazel b/src/workerd/io/BUILD.bazel index e0136bba70a..a3ae7f42010 100644 --- a/src/workerd/io/BUILD.bazel +++ b/src/workerd/io/BUILD.bazel @@ -126,8 +126,8 @@ wd_cc_library( ":worker-interface_capnp", "//src/workerd/jsg:memory-tracker", "//src/workerd/util:own-util", - "@capnp-cpp//src/capnp:capnpc", "@capnp-cpp//src/capnp:capnp-rpc", + "@capnp-cpp//src/capnp:capnpc", "@capnp-cpp//src/kj:kj-async", ], ) @@ -184,8 +184,8 @@ wd_cc_library( visibility = ["//visibility:public"], deps = [ ":worker-interface_capnp", - "@capnp-cpp//src/capnp:capnpc", "@capnp-cpp//src/capnp:capnp-rpc", + "@capnp-cpp//src/capnp:capnpc", "@capnp-cpp//src/capnp/compat:http-over-capnp", ], ) diff --git a/src/workerd/server/BUILD.bazel b/src/workerd/server/BUILD.bazel index 2684c59e312..f711f1a5da4 100644 --- a/src/workerd/server/BUILD.bazel +++ b/src/workerd/server/BUILD.bazel @@ -120,12 +120,12 @@ wd_cc_library( "//src/workerd/api:html-rewriter", "//src/workerd/api:pyodide", "//src/workerd/api:rtti", + "//src/workerd/api/node", "//src/workerd/io", "//src/workerd/io:worker-entrypoint", "//src/workerd/jsg", "//src/workerd/util:perfetto", "@capnp-cpp//src/kj/compat:kj-tls", - "//src/workerd/api/node" ], ) @@ -164,10 +164,8 @@ kj_test( ], ) - copy_file( name = "pyodide.capnp.bin@rule", src = "//src/pyodide:pyodide.capnp.bin@rule", out = "pyodide.capnp.bin", ) - diff --git a/src/workerd/server/tests/BUILD.bazel b/src/workerd/server/tests/BUILD.bazel index de8f8075e51..94e6ab9b9d8 100644 --- a/src/workerd/server/tests/BUILD.bazel +++ b/src/workerd/server/tests/BUILD.bazel @@ -3,7 +3,7 @@ load("@aspect_rules_js//npm:defs.bzl", "npm_package") js_library( name = "server-harness_js_lib", - srcs = [ "server-harness.mjs" ], + srcs = ["server-harness.mjs"], ) npm_package( diff --git a/src/workerd/server/tests/compile-tests/BUILD.bazel b/src/workerd/server/tests/compile-tests/BUILD.bazel index 16d0f0a2114..0e6df7404cd 100644 --- a/src/workerd/server/tests/compile-tests/BUILD.bazel +++ b/src/workerd/server/tests/compile-tests/BUILD.bazel @@ -9,10 +9,9 @@ sh_test( ], data = [ "compile-helloworld-test.ok", - "//src/workerd/server:workerd", "//samples:helloworld/config.capnp", "//samples:helloworld/worker.js", + "//src/workerd/server:workerd", ], tags = ["no-qemu"], ) - diff --git a/src/workerd/server/tests/inspector/BUILD.bazel b/src/workerd/server/tests/inspector/BUILD.bazel index 7b36bc8a121..a6ad636e4af 100644 --- a/src/workerd/server/tests/inspector/BUILD.bazel +++ b/src/workerd/server/tests/inspector/BUILD.bazel @@ -2,18 +2,17 @@ load("@aspect_rules_js//js:defs.bzl", "js_test") js_test( name = "inspector-test", + data = [ + ":config.capnp", + ":index.mjs", + "//:node_modules/@workerd/test", + "//:node_modules/chrome-remote-interface", + "//src/workerd/server:workerd", + ], entry_point = "driver.mjs", env = { "WORKERD_BINARY": "$(rootpath //src/workerd/server:workerd)", "WORKERD_CONFIG": "$(rootpath :config.capnp)", }, - data = [ - "//:node_modules/chrome-remote-interface", - "//:node_modules/@workerd/test", - "//src/workerd/server:workerd", - ":config.capnp", - ":index.mjs", - ], tags = ["js-test"], ) - diff --git a/src/workerd/server/tests/python/BUILD.bazel b/src/workerd/server/tests/python/BUILD.bazel index 94bdaa39f57..318b6483243 100644 --- a/src/workerd/server/tests/python/BUILD.bazel +++ b/src/workerd/server/tests/python/BUILD.bazel @@ -1,9 +1,9 @@ +load("@bazel_skylib//rules:copy_file.bzl", "copy_file") +load("//:build/pyodide_bucket.bzl", "PYODIDE_IMPORTS_TO_TEST") load("//:build/wd_test.bzl", "wd_test") - +load("//src/workerd/server/tests/python:import_tests.bzl", "gen_import_tests") load("//src/workerd/server/tests/python:py_wd_test.bzl", "py_wd_test") -load("@bazel_skylib//rules:copy_file.bzl", "copy_file") - # pyodide_dev.capnp.bin represents a custom pyodide version "dev" that is generated # at build time using the latest contents of the src/pyodide directory. # This is used to run tests to ensure that they are always run against the latest build of @@ -11,7 +11,7 @@ load("@bazel_skylib//rules:copy_file.bzl", "copy_file") copy_file( name = "pyodide_dev.capnp.bin@rule", src = "//src/pyodide:pyodide.capnp.bin", - out = "pyodide-bundle-cache/pyodide_dev.capnp.bin" + out = "pyodide-bundle-cache/pyodide_dev.capnp.bin", ) py_wd_test( @@ -58,6 +58,4 @@ py_wd_test( ), ) -load("//src/workerd/server/tests/python:import_tests.bzl", "gen_import_tests") -load("//:build/pyodide_bucket.bzl", "PYODIDE_IMPORTS_TO_TEST") gen_import_tests(PYODIDE_IMPORTS_TO_TEST) diff --git a/src/workerd/server/tests/python/import_tests.bzl b/src/workerd/server/tests/python/import_tests.bzl index 02b13da96dd..81967d041e8 100644 --- a/src/workerd/server/tests/python/import_tests.bzl +++ b/src/workerd/server/tests/python/import_tests.bzl @@ -1,15 +1,14 @@ load("@bazel_skylib//rules:write_file.bzl", "write_file") - load("//src/workerd/server/tests/python:py_wd_test.bzl", "py_wd_test") def generate_import_py_file(imports): - res = "" - for imp in imports: - res += "import "+imp+"\n" + res = "" + for imp in imports: + res += "import " + imp + "\n" - res += "def test():\n" - res += " pass" - return res + res += "def test():\n" + res += " pass" + return res WD_FILE_TEMPLATE = """ using Workerd = import "/workerd/workerd.capnp"; @@ -34,29 +33,29 @@ const unitTests :Workerd.Config = ( );""" def generate_wd_test_file(requirement): - return WD_FILE_TEMPLATE.format(requirement, requirement) + return WD_FILE_TEMPLATE.format(requirement, requirement) # to_test is a dictionary from library name to list of imports def gen_import_tests(to_test): - for lib in to_test.keys(): - worker_py_fname = "import/{}/worker.py".format(lib) - wd_test_fname = "import/{}/import.wd-test".format(lib) - write_file( - name = worker_py_fname + "@rule", - out = worker_py_fname, - content = [generate_import_py_file(to_test[lib])], - tags = ["slow"], - ) - write_file( - name = wd_test_fname + "@rule", - out = wd_test_fname, - content = [generate_wd_test_file(lib)], - tags = ["slow"], - ) - - py_wd_test( - src = wd_test_fname, - args = ["--experimental", "--pyodide-package-disk-cache-dir", "../all_pyodide_wheels"], - data = [worker_py_fname, "@all_pyodide_wheels//:whls"], - tags = ["slow"], - ) + for lib in to_test.keys(): + worker_py_fname = "import/{}/worker.py".format(lib) + wd_test_fname = "import/{}/import.wd-test".format(lib) + write_file( + name = worker_py_fname + "@rule", + out = worker_py_fname, + content = [generate_import_py_file(to_test[lib])], + tags = ["slow"], + ) + write_file( + name = wd_test_fname + "@rule", + out = wd_test_fname, + content = [generate_wd_test_file(lib)], + tags = ["slow"], + ) + + py_wd_test( + src = wd_test_fname, + args = ["--experimental", "--pyodide-package-disk-cache-dir", "../all_pyodide_wheels"], + data = [worker_py_fname, "@all_pyodide_wheels//:whls"], + tags = ["slow"], + ) diff --git a/src/workerd/server/tests/python/py_wd_test.bzl b/src/workerd/server/tests/python/py_wd_test.bzl index a6fdd0b7545..0392a368b2a 100644 --- a/src/workerd/server/tests/python/py_wd_test.bzl +++ b/src/workerd/server/tests/python/py_wd_test.bzl @@ -1,12 +1,11 @@ load("//:build/wd_test.bzl", "wd_test") def py_wd_test( - src, - data = [], - name = None, - args = [], - **kwargs -): + src, + data = [], + name = None, + args = [], + **kwargs): data += ["pyodide_dev.capnp.bin@rule"] args += ["--pyodide-bundle-disk-cache-dir", "$(location pyodide_dev.capnp.bin@rule)/.."] diff --git a/src/workerd/util/BUILD.bazel b/src/workerd/util/BUILD.bazel index 39a1dac9b1c..33f0236190a 100644 --- a/src/workerd/util/BUILD.bazel +++ b/src/workerd/util/BUILD.bazel @@ -172,17 +172,19 @@ wd_cc_library( exports_files(["autogate.h"]) -[kj_test( - src = f, - deps = [ - ":util", - ], -) for f in [ - "batch-queue-test.c++", - "mimetype-test.c++", - "wait-list-test.c++", - "duration-exceeded-logger-test.c++", - "string-buffer-test.c++", +[ + kj_test( + src = f, + deps = [ + ":util", + ], + ) + for f in [ + "batch-queue-test.c++", + "mimetype-test.c++", + "wait-list-test.c++", + "duration-exceeded-logger-test.c++", + "string-buffer-test.c++", ] ] @@ -212,4 +214,4 @@ kj_test( deps = [ ":uuid", ], -) \ No newline at end of file +) diff --git a/types/BUILD.bazel b/types/BUILD.bazel index 2ea0d5feaee..951ac8527fb 100644 --- a/types/BUILD.bazel +++ b/types/BUILD.bazel @@ -5,12 +5,18 @@ load("//:build/wd_ts_type_test.bzl", "wd_ts_type_test") wd_ts_project( name = "types_lib", - srcs = glob(["src/**/*", "scripts/*.ts"], exclude = ["src/worker/**/*"]), + srcs = glob( + [ + "src/**/*", + "scripts/*.ts", + ], + exclude = ["src/worker/**/*"], + ), deps = [ "//:node_modules/@types", - "//:node_modules/esbuild", "//:node_modules/@workerd/jsg", "//:node_modules/capnp-ts", + "//:node_modules/esbuild", "//:node_modules/prettier", "//:node_modules/typescript", ], @@ -29,9 +35,8 @@ js_run_binary( srcs = [ "scripts/config.capnp", ":types_worker", - "//src/workerd/server:workerd", "//:node_modules/prettier", - + "//src/workerd/server:workerd", ], out_dirs = ["definitions"], silent_on_success = False, # Always enable logging for debugging @@ -51,9 +56,9 @@ js_binary( js_run_binary( name = "types_worker", srcs = [ - "//:node_modules/esbuild", "//:node_modules/@workerd/jsg", "//:node_modules/capnp-ts", + "//:node_modules/esbuild", "//:node_modules/typescript", "//src/workerd/tools:param_extractor", ] + glob( From 194861931590b7397e70675d3d9045c0d49bbdd6 Mon Sep 17 00:00:00 2001 From: Nicholas Paun Date: Wed, 21 Aug 2024 10:00:01 -0700 Subject: [PATCH 3/4] Update git-blame-ignore-revs for buildifier --- .git-blame-ignore-revs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 194d0328222..c9b792aaa13 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -6,3 +6,6 @@ # Apply ruff format to the project d6d0607a845e6f71084ce272a1c1e8c50e244bdd + +# Apply buildifier to the project +f457f19039b82536b35659c1f9cb898a198e6cd1 From 86b225a20c795a84ec592aafae73b9c25d3f977f Mon Sep 17 00:00:00 2001 From: Nicholas Paun Date: Wed, 21 Aug 2024 12:04:39 -0700 Subject: [PATCH 4/4] I am going to launch Github Actions into the sun --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 99774f85b8a..057911ef57a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -26,6 +26,7 @@ jobs: sudo apt-get install -y --no-install-recommends clang-format-18 # buildifier won't install properly if specifying a particular version go install github.com/bazelbuild/buildtools/buildifier@latest + echo "BUILDIFIER=$HOME/go/bin/buildifier" >> $GITHUB_ENV - name: Install pnpm uses: pnpm/action-setup@v4 # The pnpm version will be determined by the `packageManager` field in `.npmrc` @@ -40,4 +41,3 @@ jobs: python3 ./tools/cross/format.py --check env: CLANG_FORMAT: clang-format-18 - BUILDIFIER: /github/home/go/bin/buildifier