From 9c40d6776145c3273a6d01a33bab94c82451af25 Mon Sep 17 00:00:00 2001 From: Alessandro Patti Date: Sat, 23 Dec 2023 11:50:33 +0100 Subject: [PATCH] Add digest functions flag --- cache/grpcproxy/grpcproxy_test.go | 2 +- config/BUILD.bazel | 2 + config/config.go | 42 +++++++++++++++- config/config_test.go | 81 +++++++++++++++++++++++++++++++ main.go | 5 +- server/grpc.go | 33 +++++++++---- server/grpc_test.go | 2 +- server/http.go | 19 ++++++-- server/http_test.go | 16 +++--- utils/flags/BUILD.bazel | 1 + utils/flags/flags.go | 7 +++ 11 files changed, 184 insertions(+), 26 deletions(-) diff --git a/cache/grpcproxy/grpcproxy_test.go b/cache/grpcproxy/grpcproxy_test.go index 4216b8377..1e37b58d0 100644 --- a/cache/grpcproxy/grpcproxy_test.go +++ b/cache/grpcproxy/grpcproxy_test.go @@ -239,7 +239,7 @@ func newFixture(t *testing.T, proxy cache.Proxy, storageMode string) *fixture { } grpcServer := grpc.NewServer() go func() { - err := server.ServeGRPC(listener, grpcServer, false, false, true, diskCache, logger, logger) + err := server.ServeGRPC(listener, grpcServer, false, false, true, diskCache, logger, logger, hashing.DigestFunctions()) if err != nil { logger.Printf(err.Error()) } diff --git a/config/BUILD.bazel b/config/BUILD.bazel index 5e1409d42..4515ce4d7 100644 --- a/config/BUILD.bazel +++ b/config/BUILD.bazel @@ -17,8 +17,10 @@ go_library( "//cache/azblobproxy:go_default_library", "//cache/gcsproxy:go_default_library", "//cache/grpcproxy:go_default_library", + "//cache/hashing:go_default_library", "//cache/httpproxy:go_default_library", "//cache/s3proxy:go_default_library", + "//genproto/build/bazel/remote/execution/v2:go_default_library", "@com_github_azure_azure_sdk_for_go_sdk_azcore//:go_default_library", "@com_github_azure_azure_sdk_for_go_sdk_azidentity//:go_default_library", "@com_github_grpc_ecosystem_go_grpc_prometheus//:go_default_library", diff --git a/config/config.go b/config/config.go index 51655d00d..982219815 100644 --- a/config/config.go +++ b/config/config.go @@ -17,7 +17,9 @@ import ( "github.com/buchgr/bazel-remote/v2/cache" "github.com/buchgr/bazel-remote/v2/cache/azblobproxy" + "github.com/buchgr/bazel-remote/v2/cache/hashing" "github.com/buchgr/bazel-remote/v2/cache/s3proxy" + pb "github.com/buchgr/bazel-remote/v2/genproto/build/bazel/remote/execution/v2" "github.com/urfave/cli/v2" yaml "gopkg.in/yaml.v3" @@ -114,6 +116,7 @@ type Config struct { LogTimezone string `yaml:"log_timezone"` MaxBlobSize int64 `yaml:"max_blob_size"` MaxProxyBlobSize int64 `yaml:"max_proxy_blob_size"` + DigestFunctions []pb.DigestFunction_Value // Fields that are created by combinations of the flags above. ProxyBackend cache.Proxy @@ -125,6 +128,9 @@ type Config struct { type YamlConfig struct { Config `yaml:",inline"` + // Complext types that are converted later + DigestFunctionNames []string `yaml:"digest_functions"` + // Deprecated fields, retained for backwards compatibility when // parsing config files. @@ -169,7 +175,8 @@ func newFromArgs(dir string, maxSize int, storageMode string, zstdImplementation accessLogLevel string, logTimezone string, maxBlobSize int64, - maxProxyBlobSize int64) (*Config, error) { + maxProxyBlobSize int64, + digestFunctions []pb.DigestFunction_Value) (*Config, error) { c := Config{ HTTPAddress: httpAddress, @@ -205,6 +212,7 @@ func newFromArgs(dir string, maxSize int, storageMode string, zstdImplementation LogTimezone: logTimezone, MaxBlobSize: maxBlobSize, MaxProxyBlobSize: maxProxyBlobSize, + DigestFunctions: digestFunctions, } err := validateConfig(&c) @@ -234,6 +242,7 @@ func newFromYamlFile(path string) (*Config, error) { func newFromYaml(data []byte) (*Config, error) { yc := YamlConfig{ + DigestFunctionNames: []string{"sha256"}, Config: Config{ StorageMode: "zstd", ZstdImplementation: "go", @@ -270,6 +279,16 @@ func newFromYaml(data []byte) (*Config, error) { sort.Float64s(c.MetricsDurationBuckets) } + dfs := make([]pb.DigestFunction_Value, 0) + for _, dfn := range yc.DigestFunctionNames { + df := hashing.DigestFunction(dfn) + if df == pb.DigestFunction_UNKNOWN { + return nil, fmt.Errorf("unknown digest function %s", dfn) + } + dfs = append(dfs, hashing.DigestFunction(dfn)) + } + c.DigestFunctions = dfs + err = validateConfig(&c) if err != nil { return nil, err @@ -462,6 +481,15 @@ func validateConfig(c *Config) error { return errors.New("'log_timezone' must be set to either \"UTC\", \"local\" or \"none\"") } + if c.DigestFunctions == nil { + return errors.New("at least on digest function must be supported") + } + for _, df := range c.DigestFunctions { + if !hashing.Supported(df) { + return fmt.Errorf("unsupported hashing function %s", df) + } + } + return nil } @@ -590,6 +618,17 @@ func get(ctx *cli.Context) (*Config, error) { } } + dfs := make([]pb.DigestFunction_Value, 0) + if ctx.String("digest_functions") != "" { + for _, dfn := range strings.Split(ctx.String("digest_functions"), ",") { + df := hashing.DigestFunction(dfn) + if df == pb.DigestFunction_UNKNOWN { + return nil, fmt.Errorf("unknown digest function %s", dfn) + } + dfs = append(dfs, df) + } + } + return newFromArgs( ctx.String("dir"), ctx.Int("max_size"), @@ -623,5 +662,6 @@ func get(ctx *cli.Context) (*Config, error) { ctx.String("log_timezone"), ctx.Int64("max_blob_size"), ctx.Int64("max_proxy_blob_size"), + dfs, ) } diff --git a/config/config_test.go b/config/config_test.go index 429e87081..845f05882 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -9,6 +9,8 @@ import ( "testing" "time" + pb "github.com/buchgr/bazel-remote/v2/genproto/build/bazel/remote/execution/v2" + "github.com/google/go-cmp/cmp" ) @@ -60,6 +62,7 @@ log_timezone: local MetricsDurationBuckets: []float64{.5, 1, 2.5, 5, 10, 20, 40, 80, 160, 320}, AccessLogLevel: "none", LogTimezone: "local", + DigestFunctions: []pb.DigestFunction_Value{pb.DigestFunction_SHA256}, } if !reflect.DeepEqual(config, expectedConfig) { @@ -103,6 +106,7 @@ gcs_proxy: MetricsDurationBuckets: []float64{.5, 1, 2.5, 5, 10, 20, 40, 80, 160, 320}, AccessLogLevel: "all", LogTimezone: "UTC", + DigestFunctions: []pb.DigestFunction_Value{pb.DigestFunction_SHA256}, } if !cmp.Equal(config, expectedConfig) { @@ -147,6 +151,7 @@ http_proxy: MetricsDurationBuckets: []float64{.5, 1, 2.5, 5, 10, 20, 40, 80, 160, 320}, AccessLogLevel: "all", LogTimezone: "UTC", + DigestFunctions: []pb.DigestFunction_Value{pb.DigestFunction_SHA256}, } if !cmp.Equal(config, expectedConfig) { @@ -224,6 +229,7 @@ s3_proxy: MetricsDurationBuckets: []float64{.5, 1, 2.5, 5, 10, 20, 40, 80, 160, 320}, AccessLogLevel: "all", LogTimezone: "UTC", + DigestFunctions: []pb.DigestFunction_Value{pb.DigestFunction_SHA256}, } if !cmp.Equal(config, expectedConfig) { @@ -258,6 +264,7 @@ profile_address: :7070 MetricsDurationBuckets: []float64{.5, 1, 2.5, 5, 10, 20, 40, 80, 160, 320}, AccessLogLevel: "all", LogTimezone: "UTC", + DigestFunctions: []pb.DigestFunction_Value{pb.DigestFunction_SHA256}, } if !cmp.Equal(config, expectedConfig) { @@ -306,6 +313,7 @@ endpoint_metrics_duration_buckets: [.005, .1, 5] MetricsDurationBuckets: []float64{0.005, 0.1, 5}, AccessLogLevel: "all", LogTimezone: "UTC", + DigestFunctions: []pb.DigestFunction_Value{pb.DigestFunction_SHA256}, } if !cmp.Equal(config, expectedConfig) { @@ -438,6 +446,7 @@ storage_mode: zstd MetricsDurationBuckets: []float64{.5, 1, 2.5, 5, 10, 20, 40, 80, 160, 320}, AccessLogLevel: "all", LogTimezone: "UTC", + DigestFunctions: []pb.DigestFunction_Value{pb.DigestFunction_SHA256}, } if !cmp.Equal(config, expectedConfig) { @@ -472,6 +481,7 @@ storage_mode: zstd MetricsDurationBuckets: []float64{.5, 1, 2.5, 5, 10, 20, 40, 80, 160, 320}, AccessLogLevel: "all", LogTimezone: "UTC", + DigestFunctions: []pb.DigestFunction_Value{pb.DigestFunction_SHA256}, } if !cmp.Equal(config, expectedConfig) { @@ -495,3 +505,74 @@ func TestSocketPathMissing(t *testing.T) { t.Fatal("Expected the error message to mention the missing 'http_address' key/flag") } } + +func TestDigestFunctions(t *testing.T) { + t.Run("Default", func(t *testing.T) { + yaml := `dir: /opt/cache-dir +max_size: 42 +` + config, err := newFromYaml([]byte(yaml)) + if err != nil { + t.Fatal(err) + } + if len(config.DigestFunctions) != 1 { + t.Fatal("Expected exactly one digest function") + } + if config.DigestFunctions[0] != pb.DigestFunction_SHA256 { + t.Fatal("Expected sha256 digest function") + } + err = validateConfig(config) + if err != nil { + t.Fatal(err) + } + }) + + t.Run("Success", func(t *testing.T) { + yaml := `dir: /opt/cache-dir +max_size: 42 +digest_functions: [sha256] +` + config, err := newFromYaml([]byte(yaml)) + if err != nil { + t.Fatal(err) + } + if len(config.DigestFunctions) != 1 { + t.Fatal("Expected exactly one digest function") + } + if config.DigestFunctions[0] != pb.DigestFunction_SHA256 { + t.Fatal("Expected sha256 digest function") + } + err = validateConfig(config) + if err != nil { + t.Fatal(err) + } + }) + + t.Run("UnknownFunction", func(t *testing.T) { + yaml := `dir: /opt/cache-dir +max_size: 42 +digest_functions: [sha256, foo] +` + _, err := newFromYaml([]byte(yaml)) + if err == nil { + t.Fatal("Expected error") + } + if !strings.Contains(err.Error(), "unknown") { + t.Fatalf("Unexpected error: %s", err.Error()) + } + }) + + t.Run("UnsupportedFunction", func(t *testing.T) { + yaml := `dir: /opt/cache-dir +max_size: 42 +digest_functions: [md5] +` + _, err := newFromYaml([]byte(yaml)) + if err == nil { + t.Fatal("Expected error") + } + if !strings.Contains(err.Error(), "unsupported") { + t.Fatalf("Unexpected error: %s", err.Error()) + } + }) +} diff --git a/main.go b/main.go index 9472701b8..579d9c34f 100644 --- a/main.go +++ b/main.go @@ -239,7 +239,7 @@ func startHttpServer(c *config.Config, httpServer **http.Server, checkClientCertForWrites := c.TLSCaFile != "" validateAC := !c.DisableHTTPACValidation h := server.NewHTTPCache(diskCache, c.AccessLogger, c.ErrorLogger, validateAC, - c.EnableACKeyInstanceMangling, checkClientCertForReads, checkClientCertForWrites, gitCommit) + c.EnableACKeyInstanceMangling, checkClientCertForReads, checkClientCertForWrites, gitCommit, c.DigestFunctions) cacheHandler := h.CacheHandler var basicAuthenticator auth.BasicAuth @@ -429,7 +429,8 @@ func startGrpcServer(c *config.Config, grpcServer **grpc.Server, validateAC, c.EnableACKeyInstanceMangling, enableRemoteAssetAPI, - diskCache, c.AccessLogger, c.ErrorLogger) + diskCache, c.AccessLogger, c.ErrorLogger, + c.DigestFunctions) } // A http.HandlerFunc wrapper which requires successful basic diff --git a/server/grpc.go b/server/grpc.go index c9650abf8..573921d16 100644 --- a/server/grpc.go +++ b/server/grpc.go @@ -2,6 +2,7 @@ package server import ( "context" + "fmt" "net" "net/http" @@ -30,11 +31,12 @@ import ( const grpcHealthServiceName = "/grpc.health.v1.Health/Check" type grpcServer struct { - cache disk.Cache - accessLogger cache.Logger - errorLogger cache.Logger - depsCheck bool - mangleACKeys bool + cache disk.Cache + accessLogger cache.Logger + errorLogger cache.Logger + depsCheck bool + mangleACKeys bool + digestFunctions map[pb.DigestFunction_Value]bool } var readOnlyMethods = map[string]struct{}{ @@ -55,26 +57,33 @@ func ListenAndServeGRPC( validateACDeps bool, mangleACKeys bool, enableRemoteAssetAPI bool, - c disk.Cache, a cache.Logger, e cache.Logger) error { + c disk.Cache, a cache.Logger, e cache.Logger, + digestFunctions []pb.DigestFunction_Value) error { listener, err := net.Listen(network, addr) if err != nil { return err } - return ServeGRPC(listener, srv, validateACDeps, mangleACKeys, enableRemoteAssetAPI, c, a, e) + return ServeGRPC(listener, srv, validateACDeps, mangleACKeys, enableRemoteAssetAPI, c, a, e, digestFunctions) } func ServeGRPC(l net.Listener, srv *grpc.Server, validateACDepsCheck bool, mangleACKeys bool, enableRemoteAssetAPI bool, - c disk.Cache, a cache.Logger, e cache.Logger) error { + c disk.Cache, a cache.Logger, e cache.Logger, + digestFunctions []pb.DigestFunction_Value) error { + dfs := make(map[pb.DigestFunction_Value]bool) + for _, df := range digestFunctions { + dfs[df] = true + } s := &grpcServer{ cache: c, accessLogger: a, errorLogger: e, - depsCheck: validateACDepsCheck, - mangleACKeys: mangleACKeys, + depsCheck: validateACDepsCheck, + mangleACKeys: mangleACKeys, + digestFunctions: dfs, } pb.RegisterActionCacheServer(srv, s) pb.RegisterCapabilitiesServer(srv, s) @@ -129,10 +138,14 @@ func (s *grpcServer) GetCapabilities(ctx context.Context, func (s *grpcServer) getHasher(df pb.DigestFunction_Value) (hashing.Hasher, error) { var err error var hasher hashing.Hasher + switch df { case pb.DigestFunction_UNKNOWN: hasher, err = hashing.Get(hashing.LegacyFn) default: + if _, ok := s.digestFunctions[df]; !ok { + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("unsupported digest function %s", df)) + } hasher, err = hashing.Get(df) } if err != nil { diff --git a/server/grpc_test.go b/server/grpc_test.go index 212c4214b..a46319d54 100644 --- a/server/grpc_test.go +++ b/server/grpc_test.go @@ -105,7 +105,7 @@ func grpcTestSetupInternal(t *testing.T, mangleACKeys bool) (tc grpcTestFixture) validateAC, mangleACKeys, enableRemoteAssetAPI, - diskCache, accessLogger, errorLogger) + diskCache, accessLogger, errorLogger, hashing.DigestFunctions()) if err2 != nil { fmt.Println(err2) os.Exit(1) diff --git a/server/http.go b/server/http.go index 5531bc440..2b64d7321 100644 --- a/server/http.go +++ b/server/http.go @@ -47,6 +47,7 @@ type httpCache struct { gitCommit string checkClientCertForReads bool checkClientCertForWrites bool + dfs map[pb.DigestFunction_Value]bool } type statusPageData struct { @@ -64,12 +65,16 @@ type statusPageData struct { // accessLogger will print one line for each HTTP request to stdout. // errorLogger will print unexpected server errors. Inexistent files and malformed URLs will not // be reported. -func NewHTTPCache(cache disk.Cache, accessLogger cache.Logger, errorLogger cache.Logger, validateAC bool, mangleACKeys bool, checkClientCertForReads bool, checkClientCertForWrites bool, commit string) HTTPCache { +func NewHTTPCache(cache disk.Cache, accessLogger cache.Logger, errorLogger cache.Logger, validateAC bool, mangleACKeys bool, checkClientCertForReads bool, checkClientCertForWrites bool, commit string, digestFunctions []pb.DigestFunction_Value) HTTPCache { _, _, numItems, _ := cache.Stats() errorLogger.Printf("Loaded %d existing disk cache items.", numItems) + dfs := make(map[pb.DigestFunction_Value]bool) + for _, df := range digestFunctions { + dfs[df] = true + } hc := &httpCache{ cache: cache, accessLogger: accessLogger, @@ -78,6 +83,7 @@ func NewHTTPCache(cache disk.Cache, accessLogger cache.Logger, errorLogger cache mangleACKeys: mangleACKeys, checkClientCertForReads: checkClientCertForReads, checkClientCertForWrites: checkClientCertForWrites, + dfs: dfs, } if commit != "{STABLE_GIT_COMMIT}" { @@ -209,12 +215,19 @@ func (h *httpCache) CacheHandler(w http.ResponseWriter, r *http.Request) { var err error hasher := hashing.DefaultHasher if dfn := r.Header.Get("X-Digest-Function"); dfn != "" { - hasher, err = hashing.Get(hashing.DigestFunction(dfn)) - if err != nil { + df := hashing.DigestFunction(dfn) + if _, ok := h.dfs[df]; !ok { http.Error(w, fmt.Sprintf("Unsupported digest function %s", dfn), http.StatusInternalServerError) h.logResponse(http.StatusInternalServerError, r) return } + + hasher, err = hashing.Get(df) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + h.logResponse(http.StatusInternalServerError, r) + return + } } kind, hash, instance, err := parseRequestURL(r.URL.Path, h.validateAC, hasher) diff --git a/server/http_test.go b/server/http_test.go index cde3b5497..a7aae12c3 100644 --- a/server/http_test.go +++ b/server/http_test.go @@ -37,7 +37,7 @@ func TestDownloadFile(t *testing.T) { if err != nil { t.Fatal(err) } - h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "") + h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "", hashing.DigestFunctions()) rr := httptest.NewRecorder() handler := http.HandlerFunc(h.CacheHandler) @@ -105,7 +105,7 @@ func TestUploadFilesConcurrently(t *testing.T) { if err != nil { t.Fatal(err) } - h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "") + h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "", hashing.DigestFunctions()) handler := http.HandlerFunc(h.CacheHandler) var wg sync.WaitGroup @@ -169,7 +169,7 @@ func TestUploadSameFileConcurrently(t *testing.T) { if err != nil { t.Fatal(err) } - h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "") + h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "", hashing.DigestFunctions()) handler := http.HandlerFunc(h.CacheHandler) var wg sync.WaitGroup @@ -210,7 +210,7 @@ func TestUploadCorruptedFile(t *testing.T) { if err != nil { t.Fatal(err) } - h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "") + h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "", hashing.DigestFunctions()) rr := httptest.NewRecorder() handler := http.HandlerFunc(h.CacheHandler) handler.ServeHTTP(rr, r) @@ -254,7 +254,7 @@ func TestUploadEmptyActionResult(t *testing.T) { mangle := false checkClientCertForReads := false checkClientCertForWrites := false - h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), validate, mangle, checkClientCertForReads, checkClientCertForWrites, "") + h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), validate, mangle, checkClientCertForReads, checkClientCertForWrites, "", hashing.DigestFunctions()) rr := httptest.NewRecorder() handler := http.HandlerFunc(h.CacheHandler) handler.ServeHTTP(rr, r) @@ -316,7 +316,7 @@ func testEmptyBlobAvailable(t *testing.T, method string) { mangle := false checkClientCertForReads := false checkClientCertForWrites := false - h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), validate, mangle, checkClientCertForReads, checkClientCertForWrites, "") + h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), validate, mangle, checkClientCertForReads, checkClientCertForWrites, "", hashing.DigestFunctions()) rr := httptest.NewRecorder() handler := http.HandlerFunc(h.CacheHandler) handler.ServeHTTP(rr, r) @@ -339,7 +339,7 @@ func TestStatusPage(t *testing.T) { if err != nil { t.Fatal(err) } - h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "") + h := NewHTTPCache(c, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "", hashing.DigestFunctions()) rr := httptest.NewRecorder() handler := http.HandlerFunc(h.StatusPageHandler) handler.ServeHTTP(rr, r) @@ -483,7 +483,7 @@ func TestRemoteReturnsNotFound(t *testing.T) { t.Fatal(err) } - h := NewHTTPCache(emptyCache, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "") + h := NewHTTPCache(emptyCache, testutils.NewSilentLogger(), testutils.NewSilentLogger(), true, false, false, false, "", hashing.DigestFunctions()) // create a fake http.Request _, hash := testutils.RandomDataAndHash(1024, hashing.DefaultHasher) url, _ := url.Parse(fmt.Sprintf("http://localhost:8080/ac/%s", hash)) diff --git a/utils/flags/BUILD.bazel b/utils/flags/BUILD.bazel index 2b06046d1..afa2b429e 100644 --- a/utils/flags/BUILD.bazel +++ b/utils/flags/BUILD.bazel @@ -10,6 +10,7 @@ go_library( visibility = ["//visibility:public"], deps = [ "//cache/azblobproxy:go_default_library", + "//cache/hashing:go_default_library", "//cache/s3proxy:go_default_library", "@com_github_urfave_cli_v2//:go_default_library", ], diff --git a/utils/flags/flags.go b/utils/flags/flags.go index 10e3b0d40..8db03c194 100644 --- a/utils/flags/flags.go +++ b/utils/flags/flags.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/buchgr/bazel-remote/v2/cache/azblobproxy" + "github.com/buchgr/bazel-remote/v2/cache/hashing" "github.com/buchgr/bazel-remote/v2/cache/s3proxy" "github.com/urfave/cli/v2" @@ -449,5 +450,11 @@ func GetCliFlags() []cli.Flag { DefaultText: "UTC, ie use UTC timezone", EnvVars: []string{"BAZEL_REMOTE_LOG_TIMEZONE"}, }, + &cli.StringFlag{ + Name: "digest_functions", + Usage: fmt.Sprintf("A comma-separated list of digest functions that should be supported. Possible values are %v", hashing.DigestFunctions()), + Value: "sha256", + EnvVars: []string{"BAZEL_REMOTE_DIGEST_FUNCTIONS"}, + }, } }