From 338583c18aeb68f9b0f4173495664ce6d318ff93 Mon Sep 17 00:00:00 2001 From: Ashutosh Narkar Date: Tue, 9 Jun 2020 16:15:27 -0700 Subject: [PATCH] Add support for OPA bundle signatures These changes add support for digital signatures for policy bundles which can be used to verify their authenticity. Bundle signature verification involves the following steps: * Verify the JWT signature * Verify the files in the JWT payload exist in the bundle * Verify the file content of the files in bundle match with those in the payload This commit adds a new `sign` command to generate a digital signature for policy bundles. For more details, run "opa sign --help" The signatures generated by the 'sign' command can be verified by the 'build' command. The 'build' command can also sign the bundle it generates. The 'run' command can verify a signed bundle or skip verification altogether. OPA 'sign', 'build' and 'run' can be used to sign/verify bundles in bundle mode (--bundle) mode only. Verification can be also be performed when bundle downloading is enabled. Fixes: #1757 Signed-off-by: Ashutosh Narkar --- CHANGELOG.md | 13 + bundle/bundle.go | 355 ++++++++++++-- bundle/bundle_test.go | 443 +++++++++++++++++- bundle/file.go | 60 ++- bundle/hash.go | 141 ++++++ bundle/hash_test.go | 123 +++++ bundle/keys.go | 181 +++++++ bundle/keys_test.go | 343 ++++++++++++++ bundle/sign.go | 56 +++ bundle/sign_test.go | 182 +++++++ bundle/verify.go | 161 +++++++ bundle/verify_test.go | 245 ++++++++++ cmd/build.go | 122 ++++- cmd/build_test.go | 26 + cmd/check.go | 2 +- cmd/deps.go | 2 +- cmd/eval.go | 3 + cmd/flags.go | 32 ++ cmd/oracle.go | 2 +- cmd/run.go | 98 +++- cmd/run_test.go | 31 +- cmd/sign.go | 280 +++++++++++ cmd/sign_test.go | 185 ++++++++ compile/compile.go | 64 ++- compile/compile_test.go | 10 + config/config.go | 1 + docs/content/configuration.md | 54 ++- docs/content/management.md | 162 ++++++- download/download.go | 9 +- {topdown/internal => internal}/jwx/.gitignore | 0 {topdown/internal => internal}/jwx/LICENSE | 0 {topdown/internal => internal}/jwx/Makefile | 0 .../jwx/buffer/buffer.go | 0 .../jwx/buffer/buffer_test.go | 0 .../internal => internal}/jwx/jwa/elliptic.go | 0 .../internal => internal}/jwx/jwa/key_type.go | 0 .../jwx/jwa/parameters.go | 2 +- .../jwx/jwa/signature.go | 0 .../internal => internal}/jwx/jwk/ecdsa.go | 2 +- .../jwx/jwk/ecdsa_test.go | 6 +- .../internal => internal}/jwx/jwk/headers.go | 2 +- .../jwx/jwk/headers_test.go | 4 +- .../jwx/jwk/interface.go | 2 +- {topdown/internal => internal}/jwx/jwk/jwk.go | 2 +- .../internal => internal}/jwx/jwk/jwk_test.go | 2 +- .../internal => internal}/jwx/jwk/key_ops.go | 0 {topdown/internal => internal}/jwx/jwk/rsa.go | 2 +- .../internal => internal}/jwx/jwk/rsa_test.go | 4 +- .../jwx/jwk/symmetric.go | 2 +- .../jwx/jwk/symmetric_test.go | 4 +- .../internal => internal}/jwx/jws/headers.go | 2 +- .../jwx/jws/headers_test.go | 4 +- .../jwx/jws/interface.go | 0 {topdown/internal => internal}/jwx/jws/jws.go | 8 +- .../internal => internal}/jwx/jws/jws_test.go | 10 +- .../internal => internal}/jwx/jws/message.go | 0 .../jwx/jws/sign/ecdsa.go | 2 +- .../jwx/jws/sign/ecdsa_test.go | 2 +- .../jwx/jws/sign/hmac.go | 2 +- .../jwx/jws/sign/hmac_test.go | 2 +- .../jwx/jws/sign/interface.go | 2 +- .../internal => internal}/jwx/jws/sign/rsa.go | 2 +- internal/jwx/jws/sign/sign.go | 59 +++ .../jwx/jws/verify/ecdsa.go | 2 +- .../jwx/jws/verify/ecdsa_test.go | 2 +- .../jwx/jws/verify/hmac.go | 4 +- .../jwx/jws/verify/hmac_test.go | 2 +- .../jwx/jws/verify/interface.go | 2 +- .../jwx/jws/verify/rsa.go | 2 +- .../jwx/jws/verify/rsa_test.go | 2 +- internal/jwx/jws/verify/verify.go | 57 +++ .../jwx/jws/verify/verify_test.go | 0 internal/runtime/init/init.go | 75 ++- internal/runtime/init/init_test.go | 76 ++- loader/loader.go | 85 +++- loader/loader_test.go | 103 ++++ plugins/bundle/config.go | 65 ++- plugins/bundle/config_test.go | 83 +++- plugins/bundle/plugin.go | 3 +- plugins/discovery/config.go | 80 +++- plugins/discovery/config_test.go | 21 +- plugins/discovery/discovery.go | 37 +- plugins/discovery/discovery_test.go | 151 +++++- plugins/plugins.go | 24 + rego/rego.go | 84 ++-- runtime/runtime.go | 12 +- tester/runner.go | 2 +- topdown/internal/jwx/jws/sign/sign.go | 21 - topdown/internal/jwx/jws/verify/verify.go | 22 - topdown/tokens.go | 4 +- topdown/tokens_test.go | 4 +- 91 files changed, 4236 insertions(+), 304 deletions(-) create mode 100644 bundle/hash.go create mode 100644 bundle/hash_test.go create mode 100644 bundle/keys.go create mode 100644 bundle/keys_test.go create mode 100644 bundle/sign.go create mode 100644 bundle/sign_test.go create mode 100644 bundle/verify.go create mode 100644 bundle/verify_test.go create mode 100644 cmd/sign.go create mode 100644 cmd/sign_test.go rename {topdown/internal => internal}/jwx/.gitignore (100%) rename {topdown/internal => internal}/jwx/LICENSE (100%) rename {topdown/internal => internal}/jwx/Makefile (100%) rename {topdown/internal => internal}/jwx/buffer/buffer.go (100%) rename {topdown/internal => internal}/jwx/buffer/buffer_test.go (100%) rename {topdown/internal => internal}/jwx/jwa/elliptic.go (100%) rename {topdown/internal => internal}/jwx/jwa/key_type.go (100%) rename {topdown/internal => internal}/jwx/jwa/parameters.go (93%) rename {topdown/internal => internal}/jwx/jwa/signature.go (100%) rename {topdown/internal => internal}/jwx/jwk/ecdsa.go (98%) rename {topdown/internal => internal}/jwx/jwk/ecdsa_test.go (97%) rename {topdown/internal => internal}/jwx/jwk/headers.go (98%) rename {topdown/internal => internal}/jwx/jwk/headers_test.go (97%) rename {topdown/internal => internal}/jwx/jwk/interface.go (96%) rename {topdown/internal => internal}/jwx/jwk/jwk.go (98%) rename {topdown/internal => internal}/jwx/jwk/jwk_test.go (98%) rename {topdown/internal => internal}/jwx/jwk/key_ops.go (100%) rename {topdown/internal => internal}/jwx/jwk/rsa.go (97%) rename {topdown/internal => internal}/jwx/jwk/rsa_test.go (98%) rename {topdown/internal => internal}/jwx/jwk/symmetric.go (93%) rename {topdown/internal => internal}/jwx/jwk/symmetric_test.go (96%) rename {topdown/internal => internal}/jwx/jws/headers.go (98%) rename {topdown/internal => internal}/jwx/jws/headers_test.go (96%) rename {topdown/internal => internal}/jwx/jws/interface.go (100%) rename {topdown/internal => internal}/jwx/jws/jws.go (96%) rename {topdown/internal => internal}/jwx/jws/jws_test.go (98%) rename {topdown/internal => internal}/jwx/jws/message.go (100%) rename {topdown/internal => internal}/jwx/jws/sign/ecdsa.go (96%) rename {topdown/internal => internal}/jwx/jws/sign/ecdsa_test.go (92%) rename {topdown/internal => internal}/jwx/jws/sign/hmac.go (95%) rename {topdown/internal => internal}/jwx/jws/sign/hmac_test.go (92%) rename {topdown/internal => internal}/jwx/jws/sign/interface.go (95%) rename {topdown/internal => internal}/jwx/jws/sign/rsa.go (97%) create mode 100644 internal/jwx/jws/sign/sign.go rename {topdown/internal => internal}/jwx/jws/verify/ecdsa.go (96%) rename {topdown/internal => internal}/jwx/jws/verify/ecdsa_test.go (92%) rename {topdown/internal => internal}/jwx/jws/verify/hmac.go (84%) rename {topdown/internal => internal}/jwx/jws/verify/hmac_test.go (91%) rename {topdown/internal => internal}/jwx/jws/verify/interface.go (94%) rename {topdown/internal => internal}/jwx/jws/verify/rsa.go (97%) rename {topdown/internal => internal}/jwx/jws/verify/rsa_test.go (92%) create mode 100644 internal/jwx/jws/verify/verify.go rename {topdown/internal => internal}/jwx/jws/verify/verify_test.go (100%) delete mode 100644 topdown/internal/jwx/jws/sign/sign.go delete mode 100644 topdown/internal/jwx/jws/verify/verify.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 87a1da0db3..b154befa0d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,19 @@ project adheres to [Semantic Versioning](http://semver.org/). ## Unreleased +### Features + +#### Bundle Signing + +OPA now support digital signatures for policy bundles. Specifically, a signed bundle is a normal OPA bundle that +includes a file named ".signatures.json" that dictates which files should be included in the bundle, what their SHA +hashes are, and of course is cryptographically secure. When OPA receives a new bundle, it checks that it has been +properly signed using a (public) key that OPA has been configured with out-of-band. Only if that verification succeeds +does OPA activate the new bundle; otherwise, OPA continues using its existing bundle and reports an activation +failure via the status API and error logging. For more information see https://openpolicyagent.org/docs/latest/management/#signing. +Thanks to @ashish246 who co-designed the feature and provided valuable input to the development process with his +proof-of-concept [#1757](https://github.com/open-policy-agent/opa/issues/1757). + ## 0.21.1 This release fixes [#2497](https://github.com/open-policy-agent/opa/issues/2497) where the comprehension indexing optimization produced incorrect results for nested comprehensions that close over variables in the outer scope. This issue only affects policies containing nested comprehensions that are recognized by the indexer (which is a relatively small percentage). diff --git a/bundle/bundle.go b/bundle/bundle.go index 9aa4d751c2..b835d47808 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -9,6 +9,7 @@ import ( "archive/tar" "bytes" "compress/gzip" + "encoding/hex" "encoding/json" "fmt" "io" @@ -31,21 +32,58 @@ import ( // Common file extensions and file names. const ( - RegoExt = ".rego" - WasmFile = "/policy.wasm" - manifestExt = ".manifest" - dataFile = "data.json" - yamlDataFile = "data.yaml" + RegoExt = ".rego" + WasmFile = "/policy.wasm" + ManifestExt = ".manifest" + SignaturesFile = "signatures.json" + dataFile = "data.json" + yamlDataFile = "data.yaml" + defaultHashingAlg = "SHA-256" + BundleLimitBytes = (1024 * 1024 * 1024) + 1 // limit bundle reads to 1GB to protect against gzip bombs ) -const bundleLimitBytes = (1024 * 1024 * 1024) + 1 // limit bundle reads to 1GB to protect against gzip bombs - // Bundle represents a loaded bundle. The bundle can contain data and policies. type Bundle struct { - Manifest Manifest - Data map[string]interface{} - Modules []ModuleFile - Wasm []byte + Signatures SignaturesConfig + Manifest Manifest + Data map[string]interface{} + Modules []ModuleFile + Wasm []byte +} + +// SignaturesConfig represents an array of JWTs that encapsulate the signatures for the bundle. +type SignaturesConfig struct { + Signatures []string `json:"signatures,omitempty"` +} + +// isEmpty returns if the SignaturesConfig is empty. +func (s SignaturesConfig) isEmpty() bool { + return reflect.DeepEqual(s, SignaturesConfig{}) +} + +// DecodedSignature represents the decoded JWT payload. +type DecodedSignature struct { + Files []FileInfo `json:"files"` + KeyID string `json:"keyid"` + Scope string `json:"scope"` + IssuedAt int64 `json:"iat"` + Issuer string `json:"iss"` +} + +// FileInfo contains the hashing algorithm used, resulting digest etc. +type FileInfo struct { + Name string `json:"name"` + Hash string `json:"hash"` + Algorithm string `json:"algorithm"` +} + +// NewFile returns a new FileInfo. +func NewFile(name, hash, alg string) FileInfo { + return FileInfo{ + Name: name, + Hash: hash, + Algorithm: alg, + } } // Manifest represents the manifest from a bundle. The manifest may contain @@ -193,6 +231,9 @@ type Reader struct { includeManifestInData bool metrics metrics.Metrics baseDir string + verificationConfig *VerificationConfig + skipVerify bool + files map[string]FileInfo // files in the bundle signature payload } // NewReader is deprecated. Use NewCustomReader instead. @@ -206,6 +247,7 @@ func NewCustomReader(loader DirectoryLoader) *Reader { nr := Reader{ loader: loader, metrics: metrics.New(), + files: make(map[string]FileInfo), } return &nr } @@ -230,29 +272,64 @@ func (r *Reader) WithBaseDir(dir string) *Reader { return r } +// WithBundleVerificationConfig sets the key configuration used to verify a signed bundle +func (r *Reader) WithBundleVerificationConfig(config *VerificationConfig) *Reader { + r.verificationConfig = config + return r +} + +// WithSkipBundleVerification skips verification of a signed bundle +func (r *Reader) WithSkipBundleVerification(skipVerify bool) *Reader { + r.skipVerify = skipVerify + return r +} + // Read returns a new Bundle loaded from the reader. func (r *Reader) Read() (Bundle, error) { var bundle Bundle + var descriptors []*Descriptor + var err error bundle.Data = map[string]interface{}{} - for { - f, err := r.loader.NextFile() - if err == io.EOF { - break - } - if err != nil { - return bundle, errors.Wrap(err, "bundle read failed") - } + bundle.Signatures, descriptors, err = listSignaturesAndDescriptors(r.loader, r.skipVerify) + if err != nil { + return bundle, err + } + + err = r.checkSignaturesAndDescriptors(bundle.Signatures) + if err != nil { + return bundle, err + } + for _, f := range descriptors { var buf bytes.Buffer - n, err := f.Read(&buf, bundleLimitBytes) + n, err := f.Read(&buf, BundleLimitBytes) f.Close() // always close, even on error + if err != nil && err != io.EOF { return bundle, err - } else if err == nil && n >= bundleLimitBytes { - return bundle, fmt.Errorf("bundle exceeded max size (%v bytes)", bundleLimitBytes-1) + } else if err == nil && n >= BundleLimitBytes { + return bundle, fmt.Errorf("bundle exceeded max size (%v bytes)", BundleLimitBytes-1) + } + + // verify the file content + if !bundle.Signatures.isEmpty() { + path := f.Path() + if r.baseDir != "" { + path = f.URL() + } + path = strings.TrimPrefix(path, "/") + + // check if the file is to be excluded from bundle verification + if r.isFileExcluded(path) { + delete(r.files, path) + } else { + if err = r.verifyBundleFile(path, buf); err != nil { + return bundle, err + } + } } // Normalize the paths to use `/` separators @@ -309,13 +386,22 @@ func (r *Reader) Read() (Bundle, error) { return bundle, err } - } else if strings.HasSuffix(path, manifestExt) { + } else if strings.HasSuffix(path, ManifestExt) { if err := util.NewJSONDecoder(&buf).Decode(&bundle.Manifest); err != nil { return bundle, errors.Wrap(err, "bundle load failed on manifest decode") } } } + // check if the bundle signatures specify any files that weren't found in the bundle + if len(r.files) != 0 { + extra := []string{} + for k := range r.files { + extra = append(extra, k) + } + return bundle, fmt.Errorf("file(s) %v specified in bundle signatures but not found in the target bundle", extra) + } + if err := bundle.Manifest.validateAndInjectDefaults(bundle); err != nil { return bundle, err } @@ -343,6 +429,48 @@ func (r *Reader) Read() (Bundle, error) { return bundle, nil } +func (r *Reader) isFileExcluded(path string) bool { + for _, e := range r.verificationConfig.Exclude { + match, _ := filepath.Match(e, path) + if match { + return true + } + } + return false +} + +func (r *Reader) checkSignaturesAndDescriptors(signatures SignaturesConfig) error { + if r.skipVerify { + return nil + } + + if signatures.isEmpty() && r.verificationConfig != nil { + return fmt.Errorf("bundle missing .signatures.json file") + } + + if !signatures.isEmpty() { + if r.verificationConfig == nil { + return fmt.Errorf("verification key not provided") + } + + // verify the JWT signatures included in the `.signatures.json` file + if err := r.verifyBundleSignature(signatures); err != nil { + return err + } + } + return nil +} + +func (r *Reader) verifyBundleSignature(sc SignaturesConfig) error { + var err error + r.files, err = VerifyBundleSignature(sc, r.verificationConfig) + return err +} + +func (r *Reader) verifyBundleFile(path string, data bytes.Buffer) error { + return VerifyBundleFile(path, data, r.files) +} + func (r *Reader) fullPath(path string) string { if r.baseDir != "" { path = filepath.Join(r.baseDir, path) @@ -363,6 +491,7 @@ type Writer struct { usePath bool disableFormat bool w io.Writer + signingConfig *SigningConfig } // NewWriter returns a bundle writer that writes to w. @@ -407,26 +536,7 @@ func (w *Writer) Write(bundle Bundle) error { path = module.Path } - doFormat := !w.disableFormat - bs := module.Raw - if bs == nil { - var err error - bs, err = format.Ast(module.Parsed) - if err != nil { - return err - } - doFormat = false // do not reformat - } - - if doFormat { - var err error - bs, err = format.Source(path, module.Raw) - if err != nil { - return err - } - } - - if err := archive.WriteFile(tw, path, bs); err != nil { + if err := archive.WriteFile(tw, path, module.Raw); err != nil { return err } } @@ -439,6 +549,10 @@ func (w *Writer) Write(bundle Bundle) error { return err } + if err := writeSignatures(tw, bundle); err != nil { + return err + } + if err := tw.Close(); err != nil { return err } @@ -462,7 +576,118 @@ func writeManifest(tw *tar.Writer, bundle Bundle) error { return err } - return archive.WriteFile(tw, manifestExt, buf.Bytes()) + return archive.WriteFile(tw, ManifestExt, buf.Bytes()) +} + +func writeSignatures(tw *tar.Writer, bundle Bundle) error { + + if bundle.Signatures.isEmpty() { + return nil + } + + bs, err := json.MarshalIndent(bundle.Signatures, "", " ") + if err != nil { + return err + } + + return archive.WriteFile(tw, fmt.Sprintf(".%v", SignaturesFile), bs) +} + +func hashBundleFiles(hash SignatureHasher, data map[string]interface{}, manifest Manifest, wasm []byte) ([]FileInfo, error) { + + files := []FileInfo{} + + bytes, err := hash.HashFile(data) + if err != nil { + return files, err + } + files = append(files, NewFile(strings.TrimPrefix("data.json", "/"), hex.EncodeToString(bytes), defaultHashingAlg)) + + if len(wasm) != 0 { + bytes, err := hash.HashFile(wasm) + if err != nil { + return files, err + } + files = append(files, NewFile(strings.TrimPrefix(WasmFile, "/"), hex.EncodeToString(bytes), defaultHashingAlg)) + } + + bytes, err = hash.HashFile(manifest) + if err != nil { + return files, err + } + files = append(files, NewFile(strings.TrimPrefix(ManifestExt, "/"), hex.EncodeToString(bytes), defaultHashingAlg)) + + return files, err +} + +// FormatModules formats Rego modules +func (b *Bundle) FormatModules(useModulePath bool) error { + var err error + + for i, module := range b.Modules { + if module.Raw == nil { + module.Raw, err = format.Ast(module.Parsed) + if err != nil { + return err + } + } else { + path := module.URL + if useModulePath { + path = module.Path + } + + module.Raw, err = format.Source(path, module.Raw) + if err != nil { + return err + } + } + b.Modules[i].Raw = module.Raw + } + return nil +} + +// GenerateSignature generates the signature for the given bundle. +func (b *Bundle) GenerateSignature(signingConfig *SigningConfig, keyID string, useModulePath bool) error { + + hash, err := NewSignatureHasher(HashingAlgorithm(defaultHashingAlg)) + if err != nil { + return err + } + + files := []FileInfo{} + + for _, module := range b.Modules { + bytes, err := hash.HashFile(module.Raw) + if err != nil { + return err + } + + path := module.URL + if useModulePath { + path = module.Path + } + files = append(files, NewFile(strings.TrimPrefix(path, "/"), hex.EncodeToString(bytes), defaultHashingAlg)) + } + + result, err := hashBundleFiles(hash, b.Data, b.Manifest, b.Wasm) + if err != nil { + return err + } + files = append(files, result...) + + // generate signed token + token, err := GenerateSignedToken(files, signingConfig, keyID) + if err != nil { + return err + } + + if b.Signatures.isEmpty() { + b.Signatures = SignaturesConfig{} + } + + b.Signatures.Signatures = []string{string(token)} + + return nil } // ParsedModules returns a map of parsed modules with names that are @@ -484,6 +709,7 @@ func (b Bundle) Equal(other Bundle) bool { if !reflect.DeepEqual(b.Data, other.Data) { return false } + if len(b.Modules) != len(other.Modules) { return false } @@ -753,3 +979,44 @@ func modulePathWithPrefix(bundleName string, modulePath string) string { return filepath.Join(prefix, modulePath) } + +// IsStructuredDoc checks if the file name equals a structured file extension ex. ".json" +func IsStructuredDoc(name string) bool { + return filepath.Base(name) == dataFile || filepath.Base(name) == yamlDataFile || + filepath.Base(name) == SignaturesFile || filepath.Base(name) == ManifestExt +} + +func listSignaturesAndDescriptors(loader DirectoryLoader, skipVerify bool) (SignaturesConfig, []*Descriptor, error) { + descriptors := []*Descriptor{} + var signatures SignaturesConfig + + for { + f, err := loader.NextFile() + if err == io.EOF { + break + } + + if err != nil { + return signatures, nil, errors.Wrap(err, "bundle read failed") + } + + // check for the signatures file + if !skipVerify && strings.HasSuffix(f.Path(), SignaturesFile) { + var buf bytes.Buffer + n, err := f.Read(&buf, BundleLimitBytes) + f.Close() // always close, even on error + if err != nil && err != io.EOF { + return signatures, nil, err + } else if err == nil && n >= BundleLimitBytes { + return signatures, nil, fmt.Errorf("bundle exceeded max size (%v bytes)", BundleLimitBytes-1) + } + + if err := util.NewJSONDecoder(&buf).Decode(&signatures); err != nil { + return signatures, nil, errors.Wrap(err, "bundle load failed on signatures decode") + } + } else if !strings.HasSuffix(f.Path(), SignaturesFile) { + descriptors = append(descriptors, f) + } + } + return signatures, descriptors, nil +} diff --git a/bundle/bundle_test.go b/bundle/bundle_test.go index e4592887f0..0918e27a42 100644 --- a/bundle/bundle_test.go +++ b/bundle/bundle_test.go @@ -8,6 +8,8 @@ import ( "bytes" "compress/gzip" "encoding/json" + "fmt" + "path/filepath" "reflect" "strings" @@ -125,6 +127,273 @@ func TestReadWithManifestInData(t *testing.T) { } } +func TestReadWithSignaturesSkipVerify(t *testing.T) { + signedBadTokenHS256 := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6Ii5tYW5pZmVzdCIsImhhc2giOiI2MDdhMmMzOGExNDQxZGI1OGQyY2I4Nzk4MmM0MmFhOTFhNDM0MmVmNDIyYTZiNTQyZWRkZWJlZWY2ZjA0MTJmIiwiYWxnb3JpdGhtIjoiU0hBLTI1NiJ9LHsibmFtZSI6ImEvYi9jL2RhdGEuanNvbiIsImhhc2giOiI0MmNmZTY3NjhiNTdiYjVmNzUwM2MxNjVjMjhkZDA3YWM1YjgxMzU1NGViYzg1MGYyY2MzNTg0M2U3MTM3YjFkIiwiYWxnb3JpdGhtIjoiU0hBLTI1NiJ9LHsibmFtZSI6Imh0dHAvcG9saWN5L3BvbGljeS5yZWdvIiwiaGFzaCI6ImE2MTVlZWFlZTIxZGU1MTc5ZGUwODBkZThjMzA1MmM4ZGE5MDExMzg0MDZiYTcxYzM4YzAzMjg0NWY3ZDU0ZjQiLCJhbGdvcml0aG0iOiJTSEEtMjU2In1dLCJpYXQiOjE1OTIyNDgwMjcsImlzcyI6IkpXVFNlcnZpY2UiLCJrZXlpZCI6ImZvbyIsInNjb3BlIjoid3JpdGUifQ.sQTuw9tBp6DvvQG-MXSxTzJA3hSnKYxjX5fnxiR22JA` + + files := [][2]string{ + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + {"/.signatures.json", fmt.Sprintf(`{"signatures": ["%v"]}`, signedBadTokenHS256)}, + {"/a/b/c/data.json", "[1,2,3]"}, + {"/http/policy/policy.rego", `package example`}, + } + + vc := NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "write", nil) + + buf := archive.MustWriteTarGz(files) + + loader := NewTarballLoaderWithBaseURL(buf, "/foo/bar") + reader := NewCustomReader(loader).WithBaseDir("/foo/bar").WithBundleVerificationConfig(vc).WithSkipBundleVerification(true) + _, err := reader.Read() + if err != nil { + t.Fatalf("Unexpected error %v", err) + } +} + +func TestReadWithSignatures(t *testing.T) { + + signedTokenHS256 := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6Ii5tYW5pZmVzdCIsImhhc2giOiI1MDdhMmMzOGExNDQxZGI1OGQyY2I4Nzk4MmM0MmFhOTFhNDM0MmVmNDIyYTZiNTQyZWRkZWJlZWY2ZjA0MTJmIiwiYWxnb3JpdGhtIjoiU0hBLTI1NiJ9LHsibmFtZSI6ImEvYi9jL2RhdGEuanNvbiIsImhhc2giOiI0MmNmZTY3NjhiNTdiYjVmNzUwM2MxNjVjMjhkZDA3YWM1YjgxMzU1NGViYzg1MGYyY2MzNTg0M2U3MTM3YjFkIiwiYWxnb3JpdGhtIjoiU0hBLTI1NiJ9LHsibmFtZSI6Imh0dHAvcG9saWN5L3BvbGljeS5yZWdvIiwiaGFzaCI6ImE2MTVlZWFlZTIxZGU1MTc5ZGUwODBkZThjMzA1MmM4ZGE5MDExMzg0MDZiYTcxYzM4YzAzMjg0NWY3ZDU0ZjQiLCJhbGdvcml0aG0iOiJTSEEtMjU2In1dLCJpYXQiOjE1OTIyNDgwMjcsImlzcyI6IkpXVFNlcnZpY2UiLCJrZXlpZCI6ImZvbyIsInNjb3BlIjoid3JpdGUifQ.dQ-ojK0xW3RtnGwT29lVevZIXEMXqVKMazSKlAGIdII` + otherSignedTokenHS256 := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6ImEvYi9jL2RhdGEuanNvbiIsImhhc2giOiJmOWNhYzA3MTQ3MDVkMjBkMWEyMDg4MDE4NWNkZWQ2ZTBmNmQwNDA2NjJkMmViYjA5NjFkM2Q5ZjMxN2Q4YWNiIn1dLCJpYXQiOjE1OTIyNDgwMjcsImlzcyI6IkpXVFNlcnZpY2UiLCJrZXlpZCI6ImZvbyIsInNjb3BlIjoid3JpdGUifQ.0CiL8qnOShUsMrmQCTPJUeB6dvMOhhgx1uNdhRP84lA` + + tests := map[string]struct { + files [][2]string + vc *VerificationConfig + wantErr bool + err error + }{ + "no_signature_verification_config": { + [][2]string{{"/.signatures.json", `{"signatures": []}`}}, + nil, + true, fmt.Errorf("verification key not provided"), + }, + "no_signatures_file": { + [][2]string{{"/.manifest", `{"revision": "quickbrownfaux"}`}}, + NewVerificationConfig(map[string]*KeyConfig{}, "", "", nil), + true, fmt.Errorf("bundle missing .signatures.json file"), + }, + "no_signatures": { + [][2]string{{"/.signatures.json", `{"signatures": []}`}}, + NewVerificationConfig(map[string]*KeyConfig{}, "", "", nil), + true, fmt.Errorf(".signatures.json: missing JWT (expected exactly one)"), + }, + "digest_mismatch": { + [][2]string{ + {"/.signatures.json", fmt.Sprintf(`{"signatures": ["%v"]}`, signedTokenHS256)}, + {"/a/b/c/data.json", "[1,2,3]"}, + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + }, + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "write", nil), + true, fmt.Errorf("a/b/c/data.json: digest mismatch (want: 42cfe6768b57bb5f7503c165c28dd07ac5b813554ebc850f2cc35843e7137b1d, got: a615eeaee21de5179de080de8c3052c8da901138406ba71c38c032845f7d54f4)"), + }, + "no_hashing_alg": { + [][2]string{ + {"/.signatures.json", fmt.Sprintf(`{"signatures": ["%v"]}`, otherSignedTokenHS256)}, + {"/a/b/c/data.json", "[1,2,3]"}, + }, + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "write", nil), + true, fmt.Errorf("no hashing algorithm provided for file a/b/c/data.json"), + }, + "exclude_files": { + [][2]string{ + {"/.signatures.json", fmt.Sprintf(`{"signatures": ["%v"]}`, signedTokenHS256)}, + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + {"/a/b/c/data.json", "[1,2,3]"}, + {"/http/policy/policy.rego", `package example`}, + }, + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "write", []string{".*", "a/b/c/data.json", "http/policy/policy.rego"}), + false, nil, + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + buf := archive.MustWriteTarGz(tc.files) + reader := NewReader(buf).WithBundleVerificationConfig(tc.vc) + _, err := reader.Read() + + if tc.wantErr { + if err == nil { + t.Fatal("Expected error but got nil") + } + + if tc.err != nil && tc.err.Error() != err.Error() { + t.Fatalf("Expected error message %v but got %v", tc.err.Error(), err.Error()) + } + } else { + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } + }) + } +} + +func TestReadWithSignaturesWithBaseDir(t *testing.T) { + signedTokenHS256 := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6ImZvby9iYXIvLm1hbmlmZXN0IiwiaGFzaCI6IjUwN2EyYzM4YTE0NDFkYjU4ZDJjYjg3OTgyYzQyYWE5MWE0MzQyZWY0MjJhNmI1NDJlZGRlYmVlZjZmMDQxMmYiLCJhbGdvcml0aG0iOiJTSEEtMjU2In0seyJuYW1lIjoiZm9vL2Jhci9hL2IvYy9kYXRhLmpzb24iLCJoYXNoIjoiYTYxNWVlYWVlMjFkZTUxNzlkZTA4MGRlOGMzMDUyYzhkYTkwMTEzODQwNmJhNzFjMzhjMDMyODQ1ZjdkNTRmNCIsImFsZ29yaXRobSI6IlNIQS0yNTYifSx7Im5hbWUiOiJmb28vYmFyL2h0dHAvcG9saWN5L3BvbGljeS5yZWdvIiwiaGFzaCI6ImY2NjQ0NjFlMzAzYjM3YzIwYzVlMGJlMjkwMDg4MTY3OGNkZjhlODYwYWE0MzNhNWExNGQ0OTRiYTNjNjY2NDkiLCJhbGdvcml0aG0iOiJTSEEtMjU2In1dLCJpYXQiOjE1OTIyNDgwMjcsImlzcyI6IkpXVFNlcnZpY2UiLCJrZXlpZCI6ImZvbyIsInNjb3BlIjoid3JpdGUifQ.OKnnl06TeW8PYB9xzLzZiFWQXu6i0Lns2xJjQ3da3X0` + + files := [][2]string{ + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + {"/.signatures.json", fmt.Sprintf(`{"signatures": ["%v"]}`, signedTokenHS256)}, + {"/a/b/c/data.json", "[1,2,3]"}, + {"/http/policy/policy.rego", `package example`}, + } + + vc := NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "write", nil) + + buf := archive.MustWriteTarGz(files) + + loader := NewTarballLoaderWithBaseURL(buf, "/foo/bar") + reader := NewCustomReader(loader).WithBaseDir("/foo/bar").WithBundleVerificationConfig(vc) + _, err := reader.Read() + if err != nil { + t.Fatalf("Unexpected error %v", err) + } +} + +func TestReadWithSignaturesExtraFiles(t *testing.T) { + signedTokenHS256 := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6Ii5tYW5pZmVzdCIsImhhc2giOiI1MDdhMmMzOGExNDQxZGI1OGQyY2I4Nzk4MmM0MmFhOTFhNDM0MmVmNDIyYTZiNTQyZWRkZWJlZWY2ZjA0MTJmIiwiYWxnb3JpdGhtIjoiU0hBLTI1NiJ9LHsibmFtZSI6ImEvYi9jL2RhdGEuanNvbiIsImhhc2giOiI0MmNmZTY3NjhiNTdiYjVmNzUwM2MxNjVjMjhkZDA3YWM1YjgxMzU1NGViYzg1MGYyY2MzNTg0M2U3MTM3YjFkIiwiYWxnb3JpdGhtIjoiU0hBLTI1NiJ9LHsibmFtZSI6Imh0dHAvcG9saWN5L3BvbGljeS5yZWdvIiwiaGFzaCI6ImE2MTVlZWFlZTIxZGU1MTc5ZGUwODBkZThjMzA1MmM4ZGE5MDExMzg0MDZiYTcxYzM4YzAzMjg0NWY3ZDU0ZjQiLCJhbGdvcml0aG0iOiJTSEEtMjU2In1dLCJpYXQiOjE1OTIyNDgwMjcsImlzcyI6IkpXVFNlcnZpY2UiLCJrZXlpZCI6ImZvbyIsInNjb3BlIjoid3JpdGUifQ.dQ-ojK0xW3RtnGwT29lVevZIXEMXqVKMazSKlAGIdII` + + files := [][2]string{ + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + {"/.signatures.json", fmt.Sprintf(`{"signatures": ["%v"]}`, signedTokenHS256)}, + } + + vc := NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "write", nil) + + buf := archive.MustWriteTarGz(files) + reader := NewReader(buf).WithBundleVerificationConfig(vc) + _, err := reader.Read() + if err == nil { + t.Fatal("Expected error but got nil") + } + + expected := []string{ + "file(s) [a/b/c/data.json http/policy/policy.rego] specified in bundle signatures but not found in the target bundle", + "file(s) [http/policy/policy.rego a/b/c/data.json] specified in bundle signatures but not found in the target bundle", + } + + var found bool + if err.Error() == expected[0] || err.Error() == expected[1] { + found = true + } + + if !found { + t.Fatalf("Expected error message to be one of %v but got %v", expected, err.Error()) + } +} + +func TestVerifyBundleFileHash(t *testing.T) { + // add files to the bundle and reader + // compare the hash the for target files + files := [][2]string{ + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + {"/a/b/c/data.json", "[1,2,3]"}, + {"/a/b/d/data.json", "true"}, + {"/a/b/y/data.yaml", `foo: 1`}, + {"/example/example.rego", `package example`}, + {"/policy.wasm", `modules-compiled-as-wasm-binary`}, + {"/data.json", `{"x": {"y": true}, "a": {"b": {"z": true}}}}`}, + } + + buf := archive.MustWriteTarGz(files) + reader := NewReader(buf) + reader.files = map[string]FileInfo{} + + expDigests := make([]string, len(files)) + expDigests[0] = "a005c38a509dc2d5a7407b9494efb2ad" + expDigests[1] = "60f7b5dc86ded48785436192a08dbfd04894d7f1b417c4f8d3714679a7f78cb3c833f16a8559a1cf1f32968747dc1d95ef34826263dacf125ded8f5c374be4c0" + expDigests[2] = "b326b5062b2f0e69046810717534cb09" + expDigests[3] = "20f27a640a233e6524fe7d138898583cd43475724806feb26be7f214e1d10b29edf6a0d3cb08f82107a45686b61b8fdabab6406cf4e70efe134f42238dbd70ab" + expDigests[4] = "ceecc199d432a4eeae305914ea4816cb" + expDigests[5] = "4f73765168fd8b5c294b739436da312cc5e979faf09f67bf576d36ea79a4f79c70cbb3c33d06ff65f531a9f42abd0a8f4daacc554cb521837e876dc28f56ce89" + expDigests[6] = "36669864a622563256817033b1fc53db" + + // populate the files on the reader + // this simulates the files seen by the reader after + // decoding the signatures in the "signatures.json" file + for i, f := range files { + file := FileInfo{ + Name: f[0], + Hash: expDigests[i], + } + + if i%2 == 0 { + file.Algorithm = MD5.String() + } else { + file.Algorithm = SHA512.String() + } + + reader.files[f[0]] = file + } + + for _, f := range files { + buf := bytes.NewBufferString(f[1]) + err := reader.verifyBundleFile(f[0], *buf) + if err != nil { + t.Fatal(err) + } + } + + // check there are no files left on the reader + if len(reader.files) != 0 { + t.Fatalf("Expected no files on the reader but got %v", len(reader.files)) + } +} + +func TestIsFileExcluded(t *testing.T) { + cases := []struct { + note string + file string + pattern []string + exp bool + }{ + { + note: "exact", + file: "data.json", + pattern: []string{"data.json"}, + exp: true, + }, + { + note: "hidden", + file: ".manifest", + pattern: []string{".*"}, + exp: true, + }, + { + note: "no_match", + file: "data.json", + pattern: []string{".*"}, + exp: false, + }, + { + note: "dir_match", + file: "/a/b/data.json", + pattern: []string{"/a/b/*"}, + exp: true, + }, + { + note: "dir_no_match", + file: "/a/b/c/data.json", + pattern: []string{"/a/b/*"}, + exp: false, + }, + } + + for _, tc := range cases { + t.Run(tc.note, func(t *testing.T) { + + buf := archive.MustWriteTarGz([][2]string{}) + vc := NewVerificationConfig(map[string]*KeyConfig{}, "", "", tc.pattern) + reader := NewReader(buf).WithBundleVerificationConfig(vc) + actual := reader.isFileExcluded(tc.file) + + if actual != tc.exp { + t.Fatalf("Expected file exclude result for %v %v but got %v", tc.file, tc.exp, actual) + } + }) + } +} + func TestReadRootValidation(t *testing.T) { cases := []struct { note string @@ -215,7 +484,6 @@ func TestReadRootValidation(t *testing.T) { } }) } - } func TestRootPathsContain(t *testing.T) { @@ -407,13 +675,19 @@ func TestRoundtrip(t *testing.T) { }, } + if err := bundle.GenerateSignature(NewSigningConfig("secret", "HS256", ""), "foo", false); err != nil { + t.Fatal("Unexpected error:", err) + } + var buf bytes.Buffer if err := NewWriter(&buf).Write(bundle); err != nil { t.Fatal("Unexpected error:", err) } - bundle2, err := NewReader(&buf).Read() + vc := NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "foo", "", nil) + + bundle2, err := NewReader(&buf).WithBundleVerificationConfig(vc).Read() if err != nil { t.Fatal("Unexpected error:", err) } @@ -422,6 +696,9 @@ func TestRoundtrip(t *testing.T) { t.Fatal("Exp:", bundle, "\n\nGot:", bundle2) } + if !reflect.DeepEqual(bundle2.Signatures, bundle.Signatures) { + t.Fatal("Expected signatures to be same") + } } func TestWriterUsePath(t *testing.T) { @@ -455,6 +732,168 @@ func TestWriterUsePath(t *testing.T) { } } +func TestGenerateSignature(t *testing.T) { + signatures := SignaturesConfig{Signatures: []string{"some_token"}} + + bundle := Bundle{ + Data: map[string]interface{}{ + "foo": map[string]interface{}{ + "bar": []interface{}{json.Number("1"), json.Number("2"), json.Number("3")}, + "baz": true, + "qux": "hello", + }, + }, + Modules: []ModuleFile{ + { + URL: "/foo/corge/corge.rego", + Path: "/foo/corge/corge.rego", + Parsed: ast.MustParseModule(`package foo.corge`), + Raw: []byte("package foo.corge\n"), + }, + }, + Wasm: []byte("modules-compiled-as-wasm-binary"), + Manifest: Manifest{ + Revision: "quickbrownfaux", + }, + Signatures: signatures, + } + + sc := NewSigningConfig("secret", "HS256", "") + + err := bundle.GenerateSignature(sc, "", false) + if err != nil { + t.Fatal("Unexpected error:", err) + } + + if reflect.DeepEqual(signatures, bundle.Signatures) { + t.Fatal("Expected signatures to be different") + } + + current := bundle.Signatures + err = bundle.GenerateSignature(sc, "", false) + if err != nil { + t.Fatal("Unexpected error:", err) + } + + if !reflect.DeepEqual(current, bundle.Signatures) { + t.Fatal("Expected signatures to be same") + } +} + +func TestFormatModulesRaw(t *testing.T) { + + bundle1 := Bundle{ + Modules: []ModuleFile{ + { + URL: "/foo/corge/corge.rego", + Path: "/foo/corge/corge.rego", + Parsed: ast.MustParseModule(`package foo.corge`), + Raw: []byte("package foo.corge\n"), + }, + }, + } + + bundle2 := Bundle{ + Modules: []ModuleFile{ + { + URL: "/foo/corge/corge.rego", + Path: "/foo/corge/corge.rego", + Parsed: ast.MustParseModule(`package foo.corge`), + Raw: []byte("package foo.corge"), + }, + }, + } + + tests := map[string]struct { + bundle Bundle + exp bool + }{ + "equal": {bundle: bundle1, exp: true}, + "not_equal": {bundle: bundle2, exp: false}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + orig := tc.bundle.Modules[0].Raw + err := tc.bundle.FormatModules(false) + if err != nil { + t.Fatal("Unexpected error:", err) + } + + actual := bytes.Equal(orig, tc.bundle.Modules[0].Raw) + if actual != tc.exp { + t.Fatalf("Expected result %v but got %v", tc.exp, actual) + } + }) + } +} + +func TestFormatModulesParsed(t *testing.T) { + + bundle := Bundle{ + Modules: []ModuleFile{ + { + URL: "/foo/corge/corge.rego", + Path: "/foo/corge/corge.rego", + Parsed: ast.MustParseModule(`package foo.corge`), + Raw: nil, + }, + }, + } + + tests := map[string]struct { + bundle Bundle + }{ + "parsed": {bundle: bundle}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + err := tc.bundle.FormatModules(false) + if err != nil { + t.Fatal("Unexpected error:", err) + } + + exp := []byte("package foo.corge\n") + if !bytes.Equal(tc.bundle.Modules[0].Raw, exp) { + t.Fatalf("Expected raw policy %v but got %v", exp, tc.bundle.Modules[0].Raw) + } + }) + } +} + +func TestHashBundleFiles(t *testing.T) { + h, _ := NewSignatureHasher(SHA256) + + tests := map[string]struct { + data map[string]interface{} + manifest Manifest + wasm []byte + exp int + }{ + "no_content": {map[string]interface{}{}, Manifest{}, []byte{}, 2}, + "data": {map[string]interface{}{"foo": "bar"}, Manifest{}, []byte{}, 2}, + "data_and_manifest": {map[string]interface{}{"foo": "bar"}, Manifest{Revision: "quickbrownfaux"}, []byte{}, 2}, + "data_and_manifest_and_wasm": {map[string]interface{}{"foo": "bar"}, Manifest{Revision: "quickbrownfaux"}, []byte("modules-compiled-as-wasm-binary"), 3}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + f, err := hashBundleFiles(h, tc.data, tc.manifest, tc.wasm) + if err != nil { + t.Fatal("Unexpected error:", err) + } + + if len(f) != tc.exp { + t.Fatalf("Expected %v file(s) to be added to the signature but got %v", tc.exp, len(f)) + } + }) + } +} + func TestWriterUseURL(t *testing.T) { bundle := Bundle{ diff --git a/bundle/file.go b/bundle/file.go index 02d748bbeb..db8beb1198 100644 --- a/bundle/file.go +++ b/bundle/file.go @@ -2,6 +2,7 @@ package bundle import ( "archive/tar" + "bytes" "compress/gzip" "io" "os" @@ -136,6 +137,10 @@ func (d *dirLoader) NextFile() (*Descriptor, error) { // Trim off the root directory and return path as if chrooted cleanedPath := strings.TrimPrefix(fileName, d.root) + if d.root == "." && filepath.Base(fileName) == ManifestExt { + cleanedPath = fileName + } + if !strings.HasPrefix(cleanedPath, "/") { cleanedPath = "/" + cleanedPath } @@ -148,6 +153,13 @@ type tarballLoader struct { baseURL string r io.Reader tr *tar.Reader + files []file + idx int +} + +type file struct { + name string + reader io.Reader } // NewTarballLoader is deprecated. Use NewTarballLoaderWithBaseURL instead. @@ -181,19 +193,43 @@ func (t *tarballLoader) NextFile() (*Descriptor, error) { t.tr = tar.NewReader(gr) } - for { - header, err := t.tr.Next() - // Eventually we will get an io.EOF error when finished - // iterating through the archive - if err != nil { - return nil, err - } + if t.files == nil { + t.files = []file{} + + for { + header, err := t.tr.Next() + if err == io.EOF { + break + } + + if err != nil { + return nil, err + } + + // Keep iterating on the archive until we find a normal file + if header.Typeflag == tar.TypeReg { + f := file{name: header.Name} - // Keep iterating on the archive until we find a normal file - if header.Typeflag == tar.TypeReg { - // no need to close this descriptor after reading - f := newDescriptor(path.Join(t.baseURL, header.Name), header.Name, t.tr) - return f, nil + var buf bytes.Buffer + if _, err := io.Copy(&buf, t.tr); err != nil { + return nil, errors.Wrapf(err, "failed to copy file %s", header.Name) + } + + f.reader = &buf + + t.files = append(t.files, f) + } } } + + // If done reading files then just return io.EOF + // errors for each NextFile() call + if t.idx >= len(t.files) { + return nil, io.EOF + } + + f := t.files[t.idx] + t.idx++ + + return newDescriptor(path.Join(t.baseURL, f.name), f.name, f.reader), nil } diff --git a/bundle/hash.go b/bundle/hash.go new file mode 100644 index 0000000000..b7f582cbca --- /dev/null +++ b/bundle/hash.go @@ -0,0 +1,141 @@ +// Copyright 2020 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package bundle + +import ( + "bytes" + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/json" + "fmt" + "hash" + "io" + "sort" + "strings" +) + +// HashingAlgorithm represents a subset of hashing algorithms implemented in Go +type HashingAlgorithm string + +// Supported values for HashingAlgorithm +const ( + MD5 HashingAlgorithm = "MD5" + SHA1 HashingAlgorithm = "SHA-1" + SHA224 HashingAlgorithm = "SHA-224" + SHA256 HashingAlgorithm = "SHA-256" + SHA384 HashingAlgorithm = "SHA-384" + SHA512 HashingAlgorithm = "SHA-512" + SHA512224 HashingAlgorithm = "SHA-512-224" + SHA512256 HashingAlgorithm = "SHA-512-256" +) + +// String returns the string representation of a HashingAlgorithm +func (alg HashingAlgorithm) String() string { + return string(alg) +} + +// SignatureHasher computes a signature digest for a file with (structured or unstructured) data and policy +type SignatureHasher interface { + HashFile(v interface{}) ([]byte, error) +} + +type hasher struct { + h func() hash.Hash // hash function factory +} + +// NewSignatureHasher returns a signature hasher suitable for a particular hashing algorithm +func NewSignatureHasher(alg HashingAlgorithm) (SignatureHasher, error) { + h := &hasher{} + + switch alg { + case MD5: + h.h = md5.New + case SHA1: + h.h = sha1.New + case SHA224: + h.h = sha256.New224 + case SHA256: + h.h = sha256.New + case SHA384: + h.h = sha512.New384 + case SHA512: + h.h = sha512.New + case SHA512224: + h.h = sha512.New512_224 + case SHA512256: + h.h = sha512.New512_256 + default: + return nil, fmt.Errorf("unsupported hashing algorithm: %s", alg) + } + + return h, nil +} + +// HashFile hashes the file content, JSON or binary, both in golang native format. +func (h *hasher) HashFile(v interface{}) ([]byte, error) { + hf := h.h() + walk(v, hf) + return hf.Sum(nil), nil +} + +// walk hashes the file content, JSON or binary, both in golang native format. +// +// Computation for unstructured documents is a hash of the document. +// +// Computation for the types of structured JSON document is as follows: +// +// object: Hash {, then each key (in alphabetical order) and digest of the value, then comma (between items) and finally }. +// +// array: Hash [, then digest of the value, then comma (between items) and finally ]. +func walk(v interface{}, h io.Writer) { + + switch x := v.(type) { + case map[string]interface{}: + h.Write([]byte("{")) + + var keys []string + for k := range x { + keys = append(keys, k) + } + sort.Strings(keys) + + for i, key := range keys { + if i > 0 { + h.Write([]byte(",")) + } + + h.Write(encodePrimitive(key)) + h.Write([]byte(":")) + walk(x[key], h) + } + + h.Write([]byte("}")) + case []interface{}: + h.Write([]byte("[")) + + for i, e := range x { + if i > 0 { + h.Write([]byte(",")) + } + walk(e, h) + } + + h.Write([]byte("]")) + case []byte: + h.Write(x) + default: + h.Write(encodePrimitive(x)) + } +} + +func encodePrimitive(v interface{}) []byte { + var buf bytes.Buffer + encoder := json.NewEncoder(&buf) + encoder.SetEscapeHTML(false) + encoder.Encode(v) + return []byte(strings.Trim(string(buf.Bytes()), "\n")) +} diff --git a/bundle/hash_test.go b/bundle/hash_test.go new file mode 100644 index 0000000000..9b8502f626 --- /dev/null +++ b/bundle/hash_test.go @@ -0,0 +1,123 @@ +// Copyright 2020 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package bundle + +import ( + "bytes" + "encoding/json" + "testing" +) + +func TestHashFile(t *testing.T) { + + mapInput := map[string]interface{}{ + "key1": []interface{}{ + "element1", + "element2", + }, + "key2": map[string]interface{}{ + "a": 0, + "b": 1, + "c": json.Number("123.45678911111111111111111111111111111111111111111111111"), + }, + } + + arrayInput := []interface{}{ + []string{"foo", "bar"}, + mapInput, + `package example`, + []string{"$", "α", "©", "™"}, + } + + tests := map[string]struct { + input interface{} + algorithm HashingAlgorithm + }{ + "map": {mapInput, SHA256}, + "array": {arrayInput, MD5}, + "string": {"abc", SHA256}, + "string_with_html_chars": {"", SHA256}, + "null": {`null`, SHA512}, + "bool": {false, SHA256}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + h, _ := NewSignatureHasher(tc.algorithm) + + // compute hash from the raw bytes + a := encodePrimitive(tc.input) + hash := h.(*hasher).h() + hash.Write(a) + d1 := hash.Sum(nil) + + // compute hash on the input + d2, err := h.(*hasher).HashFile(tc.input) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + if !bytes.Equal(d1, d2) { + t.Fatalf("Digests are not equal. Expected: %x but got: %x", d1, d2) + } + }) + } +} + +func TestHashFileBytes(t *testing.T) { + + mapInput := map[string]interface{}{ + "key1": []interface{}{ + "element1", + "element2", + }, + "key2": map[string]interface{}{ + "a": 0, + "b": 1, + "c": json.Number("123.45678911111111111111111111111111111111111111111111111"), + }, + } + + arrayInput := []interface{}{ + []string{"foo", "bar"}, + mapInput, + `package example`, + []string{"$", "α", "©", "™"}, + } + + arrayBytes, _ := json.Marshal(arrayInput) + mapBytes, _ := json.Marshal(mapInput) + + tests := map[string]struct { + input []byte + algorithm HashingAlgorithm + }{ + "map_byte_array": {mapBytes, SHA256}, + "array_byte_array": {arrayBytes, MD5}, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + h, _ := NewSignatureHasher(tc.algorithm) + + // compute hash from the raw bytes + hash := h.(*hasher).h() + hash.Write(tc.input) + d1 := hash.Sum(nil) + + // compute hash on the input + d2, err := h.(*hasher).HashFile(tc.input) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + if !bytes.Equal(d1, d2) { + t.Fatalf("Digests are not equal. Expected: %x but got: %x", d1, d2) + } + }) + } +} diff --git a/bundle/keys.go b/bundle/keys.go new file mode 100644 index 0000000000..35b7cae538 --- /dev/null +++ b/bundle/keys.go @@ -0,0 +1,181 @@ +// Copyright 2020 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +// Package bundle provide helpers that assist in creating the verification and signing key configuration +package bundle + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "reflect" + + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jws/sign" + + "github.com/open-policy-agent/opa/util" +) + +const ( + defaultTokenSigningAlg = "RS256" +) + +// VerificationConfig represents the key configuration used to verify a signed bundle +type VerificationConfig struct { + PublicKeys map[string]*KeyConfig + KeyID string `json:"keyid"` + Scope string `json:"scope"` + Exclude []string `json:"exclude_files"` +} + +// NewVerificationConfig return a new VerificationConfig +func NewVerificationConfig(keys map[string]*KeyConfig, id, scope string, exclude []string) *VerificationConfig { + return &VerificationConfig{ + PublicKeys: keys, + KeyID: id, + Scope: scope, + Exclude: exclude, + } +} + +// ValidateAndInjectDefaults validates the config and inserts default values +func (vc *VerificationConfig) ValidateAndInjectDefaults(keys map[string]*KeyConfig) error { + vc.PublicKeys = keys + + if vc.KeyID != "" { + found := false + for key := range keys { + if key == vc.KeyID { + found = true + break + } + } + + if !found { + return fmt.Errorf("key id %s not found", vc.KeyID) + } + } + return nil +} + +// GetPublicKey returns the public key corresponding to the given key id +func (vc *VerificationConfig) GetPublicKey(id string) (*KeyConfig, error) { + var kc *KeyConfig + var ok bool + + if kc, ok = vc.PublicKeys[id]; !ok { + return nil, fmt.Errorf("verification key corresponding to ID %v not found", id) + } + return kc, nil +} + +// KeyConfig holds the actual public keys used to verify a signed bundle +type KeyConfig struct { + Key string `json:"key"` + Algorithm string `json:"algorithm"` + Scope string `json:"scope"` +} + +// NewKeyConfig return a new KeyConfig +func NewKeyConfig(key, alg, scope string) *KeyConfig { + return &KeyConfig{ + Key: key, + Algorithm: alg, + Scope: scope, + } +} + +// ParseKeysConfig returns a map containing the public key and the signing algorithm +func ParseKeysConfig(raw json.RawMessage) (map[string]*KeyConfig, error) { + keys := map[string]*KeyConfig{} + var obj map[string]json.RawMessage + + if err := util.Unmarshal(raw, &obj); err == nil { + for k := range obj { + var keyConfig KeyConfig + if err = util.Unmarshal(obj[k], &keyConfig); err != nil { + return nil, err + } + + if err = keyConfig.validateAndInjectDefaults(k); err != nil { + return nil, err + } + + keys[k] = &keyConfig + } + } else { + return nil, err + } + return keys, nil +} + +func (k *KeyConfig) validateAndInjectDefaults(id string) error { + if k.Key == "" { + return fmt.Errorf("invalid keys configuration: verification key empty for key ID %v", id) + } + + if k.Algorithm == "" { + k.Algorithm = defaultTokenSigningAlg + } + + return nil +} + +// Equal returns true if this key config is equal to the other. +func (k *KeyConfig) Equal(other *KeyConfig) bool { + return reflect.DeepEqual(k, other) +} + +// SigningConfig represents the key configuration used to generate a signed bundle +type SigningConfig struct { + Key string + Algorithm string + ClaimsPath string +} + +// NewSigningConfig return a new SigningConfig +func NewSigningConfig(key, alg, claimsPath string) *SigningConfig { + if alg == "" { + alg = defaultTokenSigningAlg + } + + return &SigningConfig{ + Key: key, + Algorithm: alg, + ClaimsPath: claimsPath, + } +} + +// GetPrivateKey returns the private key or secret from the signing config +func (s *SigningConfig) GetPrivateKey() (interface{}, error) { + var priv string + if _, err := os.Stat(s.Key); err == nil { + bs, err := ioutil.ReadFile(s.Key) + if err != nil { + return nil, err + } + priv = string(bs) + } else if os.IsNotExist(err) { + priv = s.Key + } else { + return nil, err + } + return sign.GetSigningKey(priv, jwa.SignatureAlgorithm(s.Algorithm)) +} + +// GetClaims returns the claims by reading the file specified in the signing config +func (s *SigningConfig) GetClaims() (map[string]interface{}, error) { + var claims map[string]interface{} + + bs, err := ioutil.ReadFile(s.ClaimsPath) + if err != nil { + return claims, err + } + + if err := util.UnmarshalJSON(bs, &claims); err != nil { + return claims, err + } + return claims, nil +} diff --git a/bundle/keys_test.go b/bundle/keys_test.go new file mode 100644 index 0000000000..96dda818de --- /dev/null +++ b/bundle/keys_test.go @@ -0,0 +1,343 @@ +// Copyright 2020 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package bundle + +import ( + "crypto/rsa" + "fmt" + "path/filepath" + "reflect" + "testing" + + "github.com/open-policy-agent/opa/util/test" +) + +func TestValidateAndInjectDefaultsVerificationConfig(t *testing.T) { + + tests := map[string]struct { + publicKeys map[string]*KeyConfig + vc *VerificationConfig + wantErr bool + err error + }{ + "valid_config_no_key": { + map[string]*KeyConfig{}, + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "", nil), + false, nil, + }, + "valid_config_with_key": { + map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "foo", "", nil), + false, nil, + }, + "valid_config_with_key_not_found": { + map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "bar", "", nil), + true, fmt.Errorf("key id bar not found"), + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + err := tc.vc.ValidateAndInjectDefaults(tc.publicKeys) + if tc.wantErr { + if err == nil { + t.Fatal("Expected error but got nil") + } + + if tc.err != nil && tc.err.Error() != err.Error() { + t.Fatalf("Expected error message %v but got %v", tc.err.Error(), err.Error()) + } + } else { + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } + + if !reflect.DeepEqual(tc.vc.PublicKeys, tc.publicKeys) { + t.Fatalf("Expected public keys %v but got %v", tc.publicKeys, tc.vc.PublicKeys) + } + }) + } +} + +func TestGetPublicKey(t *testing.T) { + tests := map[string]struct { + input string + vc *VerificationConfig + kc *KeyConfig + wantErr bool + err error + }{ + "key_found": { + "foo", + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "", nil), + &KeyConfig{Key: "secret", Algorithm: "HS256"}, + false, nil, + }, + "key_not_found": { + "foo", + NewVerificationConfig(map[string]*KeyConfig{}, "", "", nil), + nil, + true, fmt.Errorf("verification key corresponding to ID foo not found"), + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + kc, err := tc.vc.GetPublicKey(tc.input) + if tc.wantErr { + if err == nil { + t.Fatal("Expected error but got nil") + } + + if tc.err != nil && tc.err.Error() != err.Error() { + t.Fatalf("Expected error message %v but got %v", tc.err.Error(), err.Error()) + } + } else { + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } + + if !reflect.DeepEqual(kc, tc.kc) { + t.Fatalf("Expected key config %v but got %v", tc.kc, kc) + } + }) + } +} + +func TestParseKeysConfig(t *testing.T) { + + key := `-----BEGIN PUBLIC KEY----- MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA7nJwME0QNM6g0Ou9Sylj lcIY4cnBcs8oWVHe74bJ7JTgYmDOk2CA14RE3wJNkUKERP/cRdesKDA/BToJXJUr oYvhjXxUYn+i3wK5vOGRY9WUtTF9paIIpIV4USUOwDh3ufhA9K3tyh+ZVsqn80em 0Lj2ME0EgScuk6u0/UYjjNvcmnQl+uDmghG8xBZh7TZW2+aceMwlb4LJIP36VRhg jKQGIxg2rW8ROXgJaFbNRCbiOUUqlq9SUZuhHo8TNOARXXxp9R4Fq7Cl7ZbwWtNP wAtM1y+Z+iyu/i91m0YLlU2XBOGLu9IA8IZjPlbCnk/SygpV9NNwTY9DSQ0QfXcP TGlsbFwzRzTlhH25wEl3j+2Ub9w/NX7Yo+j/Ei9eGZ8cq0bcvEwDeIo98HeNZWrL UUArayRYvh8zutOlzqehw8waFk9AxpfEp9oWekSz8gZw9OL773EhnglYxxjkPHNz k66CufLuTEf6uE9NLE5HnlQMbiqBFirIyAWGKyU3v2tphKvcogxmzzWA51p0GY0l GZvlLNt2NrJv2oGecyl3BLqHnBi+rGAosa/8XgfQT8RIk7YR/tDPDmPfaqSIc0po +NcHYEH82Yv+gfKSK++1fyssGCsSRJs8PFMuPGgv62fFrE/EHSsHJaNWojSYce/T rxm2RaHhw/8O4oKcfrbaRf8CAwEAAQ== -----END PUBLIC KEY-----` + + config := fmt.Sprintf(`{"foo": {"algorithm": "HS256", "key": "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ"}, + "bar": {"key": %v} + }`, key) + + tests := map[string]struct { + input string + result map[string]*KeyConfig + wantErr bool + err error + }{ + "valid_config_one_key": { + `{"foo": {"algorithm": "HS256", "key": "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ"}}`, + map[string]*KeyConfig{"foo": {Key: "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ", Algorithm: "HS256"}}, + false, nil, + }, + "valid_config_two_key": { + config, + map[string]*KeyConfig{ + "foo": {Key: "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ", Algorithm: "HS256"}, + "bar": {Key: key, Algorithm: "RS256"}, + }, + false, nil, + }, + "invalid_config_no_key": { + `{"foo": {"algorithm": "HS256"}}`, + nil, + true, fmt.Errorf("invalid keys configuration: verification key empty for key ID foo"), + }, + "valid_config_default_alg": { + `{"foo": {"key": "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ"}}`, + map[string]*KeyConfig{"foo": {Key: "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ", Algorithm: "RS256"}}, + false, nil, + }, + "invalid_raw_key_config": { + `{"bar": [1,2,3]}`, + nil, + true, fmt.Errorf("json: cannot unmarshal array into Go value of type bundle.KeyConfig"), + }, + "invalid_raw_config": { + `[1,2,3]`, + nil, + true, fmt.Errorf("json: cannot unmarshal array into Go value of type map[string]json.RawMessage"), + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + kc, err := ParseKeysConfig([]byte(tc.input)) + if tc.wantErr { + if err == nil { + t.Fatal("Expected error but got nil") + } + + if tc.err != nil && tc.err.Error() != err.Error() { + t.Fatalf("Expected error message %v but got %v", tc.err.Error(), err.Error()) + } + } else { + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } + + if !reflect.DeepEqual(kc, tc.result) { + t.Fatalf("Expected key config %v but got %v", tc.result, kc) + } + }) + } +} + +func TestGetPrivateKey(t *testing.T) { + privateKey := `-----BEGIN RSA PRIVATE KEY----- +MIIJKgIBAAKCAgEA7nJwME0QNM6g0Ou9SyljlcIY4cnBcs8oWVHe74bJ7JTgYmDO +k2CA14RE3wJNkUKERP/cRdesKDA/BToJXJUroYvhjXxUYn+i3wK5vOGRY9WUtTF9 +paIIpIV4USUOwDh3ufhA9K3tyh+ZVsqn80em0Lj2ME0EgScuk6u0/UYjjNvcmnQl ++uDmghG8xBZh7TZW2+aceMwlb4LJIP36VRhgjKQGIxg2rW8ROXgJaFbNRCbiOUUq +lq9SUZuhHo8TNOARXXxp9R4Fq7Cl7ZbwWtNPwAtM1y+Z+iyu/i91m0YLlU2XBOGL +u9IA8IZjPlbCnk/SygpV9NNwTY9DSQ0QfXcPTGlsbFwzRzTlhH25wEl3j+2Ub9w/ +NX7Yo+j/Ei9eGZ8cq0bcvEwDeIo98HeNZWrLUUArayRYvh8zutOlzqehw8waFk9A +xpfEp9oWekSz8gZw9OL773EhnglYxxjkPHNzk66CufLuTEf6uE9NLE5HnlQMbiqB +FirIyAWGKyU3v2tphKvcogxmzzWA51p0GY0lGZvlLNt2NrJv2oGecyl3BLqHnBi+ +rGAosa/8XgfQT8RIk7YR/tDPDmPfaqSIc0po+NcHYEH82Yv+gfKSK++1fyssGCsS +RJs8PFMuPGgv62fFrE/EHSsHJaNWojSYce/Trxm2RaHhw/8O4oKcfrbaRf8CAwEA +AQKCAgAP38h+PrMkgNkN75PDjDbYAnr7lR3u0cHC6INp+NQ6jtK9WeqGvzb0ohaf +rhyR3hbGLS5x6+DHMCcR5wI2iqvD7ncOn0dS42JpbFoHLBEsz0w+H9RYkYf3w/b1 +l/z6aQf3doKEh4u8GAxyTb2OoaeGX7nsD0SMgJpGNHkxH1lAiGaQVcktgYl3AU1K +1J6iVyrDKwAhvp2DZfaT3rSqs5vB4S2TaopBU5KW+9nMe3Lg5aHL5EHolDVrv2uj +iCzkKUKesaiwK9Z+zpzNS24m7chyZY4xCTc8A3uG6ovu0WP2BZtXNNjDoUB0ws2a +mdYNCg1ja/q6+NSSJUZ6d4cwgxuefsSK9MDWVe/JdhWoj+BRZzyPJ8TLsmXFvq48 +8RgR6DigT8/CO6yRANl+hvLBa50N3goNLvg9yWBzm0sU+YSe01jniNHmffHRPIgy +Hu06L2JfVNRjIbCSH2dmt7BjZP0oZNsNFHe3xCDlgi0wRFx32bD0z+EEXoP51dhL +7fmgAio+pKVDkpMpWTTHB0M3f7p+121cBu16pJVWrFwCuyVrtUCSpA7OIbFaS89I +Fp/3tQ9HuAfLvKhupvgqiNCzRwxD00lMfJKA8tiG6nQM1Kq9xHDq7Qqge8PCgW/C +R3+TpDK5MCDOkWyDGWpcNHnYbWW0J6K8bkpCPHFqasxZO8qYiQKCAQEA+JZ9+KfZ +bz3jDk3vBdAvxeodJ9G3N+3/BWtgnrrwwaVSmAID8AJms5Bc63MqIbzMSmCS3aoO +7Gvu9oaFR2sLQYIj8zUb6K8nZX3cU4NPd7zpPBIYZr6AHie1Z/pn9krnjiXc49FQ +KYYiwIu4gAdpDEO9eSNovdR82mIAtWyxgRp2ORa0K1G/RLYQDSZ+J7a6vZgqYNRq +AD3JuVt1IBRXGZF5tm60iAmjwBn+6iV6BToYb8R5WvswvGKbNLaD0vULPAjs4gLi +2/IurNxMgMyAwgYIhX0xtso8ssGbZe3eA1OHesFHkaUDOOa5KKDE2hWQPQynqvHh +VuxTxCnK3IT+iwKCAQEA9Y6KJ3ks8wDpM9vZY4WA6/6sz4ndYNLQSDS4eBxD5bVG +s+V0gWRd+YB93sJfhmc1RhCYusaUWPbL0e77L2tzIWea8xvXwW5iVaYh9EeDA1Ei +whi5SagSPmvLvrIkcOwJ1Hg1vtt9VLDsc4xTRHAxKe36Zgl2xIk/2s6hagKt71cM +Kpurle9WrmxcMvGPQO97NolcHjBYzxCbCfAUNOnf37o7sdmM6KOpPadZqiHtGPKf +sZbwC+itZk9dua2rLvUFZS512MoN2Alnz05LWoQqDe1b/FSeGw7DTAipvIBqdug8 +BHrIy16zWQrVz5Z0+ihZV1veVGzGKHpnESKb57iY3QKCAQEAwmRk0/rmBKCfmwME +pEYd5aXi8M2Fej4pi+JhJx9GwBd5FBeXXqtyBn8guppPWxyZoJwOnTqr+uOYdb3S +IXwqzCpp1Hk2funhY/NdRQ1NKnRW6zu3SzkzVOF2cX4WqDoBA17GcnyvNBmJuYpJ +WAzzb7zVQRKYiMHOdLPom/cIg83en1wKvklpyeCZgr8ULhgtxa9ljFzvG4s14TYM +zG47gmoJhMjjcfIf1Ew/1HhECCxbCaPZxnThsp9lgX4sbd5jz6mnHEJnhtnG+DQ5 +uwqwsYkoRsMVCjzx5FOUIsw1LeK28h6Mye8BKxD5wDSgW247YhIwV3RY47Fg++g2 +k+WIawKCAQEAkUa8W7AoNLhkP8cg7O1OIdDxgnOpIqB2k1GFlaH7VYqTAtmMvQSZ +SISJc2IBy+2BqislgNL9b0jLuy8tMpfabHf0R0JAunLJAK0iR3iLfUniS3z/GiGy +cXWq++4++wPaqPZZrcoDczidG5t4o/PQUmM2Emok9w/QVG6NNr/REdmpHAgvUqxf +1x/KyGT7gMpuVgycEExALnk/kHiWK9v2FFIFASqZYAV7mjtJJAugT3MzoYiQCiul +cvMfmzuxHD3f7EW5eQHJgPfHj/FdSXcJvmWgVz/krlNknbY+XYSH+ENbRrcx1of3 +iYWMi50TJfD7MmDqv33/GnGYSp30KPqgjQKCAQEA3Hyf8czYt7eiffpfg6Z3XFFW +Pl3bDs3k1LRrA12Iyffzr+b0Z/4DRP9QtZDtf1E3X8LtRYAoW/eW/sXkEUeOx+se +QQuByKOofo+HOoOgpMfl5cCsEtGCEhIRuainDJFBF1n//5qeo1sKnXEkjq6B5Zmh +IRRh9s+w6b2kK4u0JvIp+t4F9+XG3jCggw95C0tORmOTQmM3hOXgDJSQegrUXJQP +zTj3rbKGqKWYIxFHsQCY5+3bHZVQyXTwS+N+n1zetBd5Jhhf/lT6CWyuNyfh2M1Z +EXrJfkELSzO66/ZSjyyWEczXHLyr+Q719BsaGsxie117zSNF6B6UXiitjCr/qQ== +-----END RSA PRIVATE KEY-----` + + files := map[string]string{ + "private.pem": privateKey, + } + + test.WithTempFS(files, func(rootDir string) { + + sc := NewSigningConfig(filepath.Join(rootDir, "private.pem"), "", "") + + result, err := sc.GetPrivateKey() + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + _, ok := result.(*rsa.PrivateKey) + if !ok { + t.Fatalf("Expected key type *rsa.PrivateKey but got %T", result) + } + + // key file does not exist, check that error generated with RS56 as the signing algorithm + sc = NewSigningConfig("private.pem", "", "") + _, err = sc.GetPrivateKey() + if err == nil { + t.Fatal("Expected error but got nil") + } + + errMsg := "failed to parse PEM block containing the key" + if err.Error() != errMsg { + t.Fatalf("Expected error message %v but got %v", errMsg, err.Error()) + } + + // secret provided on command-line + sc = NewSigningConfig("mysecret", "HS256", "") + result, err = sc.GetPrivateKey() + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + hmackey, ok := result.([]byte) + if !ok { + t.Fatalf("Expected key type []byte but got %T", result) + } + + if string(hmackey) != "mysecret" { + t.Fatalf("Expected HMAC key %v but got %v", "mysecret", string(hmackey)) + } + }) +} + +func TestGetClaimsErrors(t *testing.T) { + files := map[string]string{ + "claims.json": `["foo", "read"]`, + } + + test.WithTempFS(files, func(rootDir string) { + //json unmarshal error + sc := NewSigningConfig("secret", "HS256", filepath.Join(rootDir, "claims.json")) + _, err := sc.GetClaims() + if err == nil { + t.Fatal("Expected error but got nil") + } + + // claims.json does not exist + sc = NewSigningConfig("secret", "HS256", "claims.json") + _, err = sc.GetClaims() + if err == nil { + t.Fatal("Expected error but got nil") + } + + errMsg := "open claims.json: no such file or directory" + if err.Error() != errMsg { + t.Fatalf("Expected error message %v but got %v", errMsg, err.Error()) + } + }) +} + +func TestKeyConfigEqual(t *testing.T) { + tests := map[string]struct { + a *KeyConfig + b *KeyConfig + exp bool + }{ + "equal": { + NewKeyConfig("foo", "RS256", "read"), + NewKeyConfig("foo", "RS256", "read"), + true, + }, + "not_equal": { + NewKeyConfig("foo", "RS256", "read"), + NewKeyConfig("foo", "RS256", "write"), + false, + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + actual := tc.a.Equal(tc.b) + + if actual != tc.exp { + t.Fatalf("Expected config equal result %v but got %v", tc.exp, actual) + } + }) + } +} diff --git a/bundle/sign.go b/bundle/sign.go new file mode 100644 index 0000000000..65efbf132f --- /dev/null +++ b/bundle/sign.go @@ -0,0 +1,56 @@ +// Copyright 2020 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +// Package bundle provide helpers that assist in the creating a signed bundle +package bundle + +import ( + "encoding/json" + + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jws" +) + +// GenerateSignedToken generates a signed token given the list of files to be +// included in the payload and the bundle signing config. The keyID if non-empty, +// represents the value for the "keyid" claim in the token +func GenerateSignedToken(files []FileInfo, sc *SigningConfig, keyID string) (string, error) { + payload, err := generatePayload(files, sc, keyID) + if err != nil { + return "", err + } + + privKey, err := sc.GetPrivateKey() + if err != nil { + return "", err + } + + // generate signed token + token, err := jws.SignWithOption(payload, jwa.SignatureAlgorithm(sc.Algorithm), privKey) + if err != nil { + return "", err + } + return string(token), nil +} + +func generatePayload(files []FileInfo, sc *SigningConfig, keyID string) ([]byte, error) { + payload := make(map[string]interface{}) + payload["files"] = files + + if sc.ClaimsPath != "" { + claims, err := sc.GetClaims() + if err != nil { + return nil, err + } + + for claim, value := range claims { + payload[claim] = value + } + } else { + if keyID != "" { + payload["keyid"] = keyID + } + } + return json.Marshal(payload) +} diff --git a/bundle/sign_test.go b/bundle/sign_test.go new file mode 100644 index 0000000000..7b407ab5d8 --- /dev/null +++ b/bundle/sign_test.go @@ -0,0 +1,182 @@ +// Copyright 2020 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package bundle + +import ( + "encoding/json" + "io/ioutil" + "path/filepath" + "testing" + + "github.com/open-policy-agent/opa/util" + + "github.com/open-policy-agent/opa/util/test" + + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jws" +) + +func TestGenerateSignedToken(t *testing.T) { + + files := [][2]string{ + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + {"/a/b/c/data.json", "[1,2,3]"}, + {"/a/b/d/data.json", "true"}, + {"/a/b/y/data.yaml", `foo: 1`}, + {"/example/example.rego", `package example`}, + {"/policy.wasm", `modules-compiled-as-wasm-binary`}, + {"/data.json", `{"x": {"y": true}, "a": {"b": {"z": true}}}}`}, + } + + input := []FileInfo{} + + expDigests := make([]string, len(files)) + expDigests[0] = "a005c38a509dc2d5a7407b9494efb2ad" + expDigests[1] = "60f7b5dc86ded48785436192a08dbfd04894d7f1b417c4f8d3714679a7f78cb3c833f16a8559a1cf1f32968747dc1d95ef34826263dacf125ded8f5c374be4c0" + expDigests[2] = "b326b5062b2f0e69046810717534cb09" + expDigests[3] = "20f27a640a233e6524fe7d138898583cd43475724806feb26be7f214e1d10b29edf6a0d3cb08f82107a45686b61b8fdabab6406cf4e70efe134f42238dbd70ab" + expDigests[4] = "655578028abb7b9006e93aff9dda8620" + expDigests[5] = "6347e9be8e3051dc054fbbd3db72fb3f7ae7051c4ef6353e29895aa495452179e10e434fb4a60316e06916464bcc5d4ecabbb2797e04c0213943cf8e69f4c0ae" + expDigests[6] = "36669864a622563256817033b1fc53db" + + for i, f := range files { + file := FileInfo{ + Name: f[0], + Hash: expDigests[i], + } + + if i%2 == 0 { + file.Algorithm = MD5.String() + } else { + file.Algorithm = SHA512.String() + } + + input = append(input, file) + } + + sc := NewSigningConfig("secret", "HS256", "") + token, err := GenerateSignedToken(input, sc, "") + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + // verify the signed token + _, err = jws.Verify([]byte(token), jwa.SignatureAlgorithm("HS256"), []byte("secret")) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } +} + +func TestGenerateSignedTokenWithClaims(t *testing.T) { + + files := [][2]string{ + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + {"/a/b/d/data.json", "true"}, + {"/example/example.rego", `package example`}, + {"/data.json", `{"x": {"y": true}, "a": {"b": {"z": true}}}}`}, + } + + input := []FileInfo{} + + expDigests := make([]string, len(files)) + expDigests[0] = "a005c38a509dc2d5a7407b9494efb2ad" + expDigests[1] = "b326b5062b2f0e69046810717534cb09" + expDigests[2] = "655578028abb7b9006e93aff9dda8620" + expDigests[3] = "36669864a622563256817033b1fc53db" + + for i, f := range files { + file := FileInfo{ + Name: f[0], + Hash: expDigests[i], + Algorithm: MD5.String(), + } + input = append(input, file) + } + + test.WithTempFS(map[string]string{}, func(rootDir string) { + claims := make(map[string]interface{}) + claims["keyid"] = "foo" + claims["scope"] = "read" + + claimBytes, err := json.Marshal(claims) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + // create claims file + claimsFile := filepath.Join(rootDir, "claims.json") + if err := ioutil.WriteFile(claimsFile, claimBytes, 0644); err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + sc := NewSigningConfig("secret", "HS256", filepath.Join(rootDir, "claims.json")) + token, err := GenerateSignedToken(input, sc, "") + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + // verify the signed token + _, err = jws.Verify([]byte(token), jwa.SignatureAlgorithm("HS256"), []byte("secret")) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + }) +} + +func TestGeneratePayloadWithKeyID(t *testing.T) { + + files := [][2]string{ + {"/.manifest", `{"revision": "quickbrownfaux"}`}, + } + + input := []FileInfo{} + + file := FileInfo{ + Name: files[0][0], + Hash: "a005c38a509dc2d5a7407b9494efb2ad", + Algorithm: MD5.String(), + } + input = append(input, file) + + sc := NewSigningConfig("secret", "HS256", "") + keyID := "default" + + // non-empty key id + bytes, err := generatePayload(input, sc, keyID) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + payload := make(map[string]interface{}) + err = util.UnmarshalJSON(bytes, &payload) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + if _, ok := payload["keyid"]; !ok { + t.Fatal("Expected claim \"keyid\" in token") + } + + if payload["keyid"] != keyID { + t.Fatalf("Expected key id %v but got %v", keyID, payload["keyid"]) + } + + // empty key id + bytes, err = generatePayload(input, sc, "") + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + payload = make(map[string]interface{}) + err = util.UnmarshalJSON(bytes, &payload) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + if _, ok := payload["keyid"]; ok { + t.Fatal("Unexpected claim \"keyid\" in token") + } +} diff --git a/bundle/verify.go b/bundle/verify.go new file mode 100644 index 0000000000..069494bb6a --- /dev/null +++ b/bundle/verify.go @@ -0,0 +1,161 @@ +// Copyright 2020 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +// Package bundle provide helpers that assist in the bundle signature verification process +package bundle + +import ( + "bytes" + "encoding/base64" + "encoding/hex" + "fmt" + + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jws" + "github.com/open-policy-agent/opa/internal/jwx/jws/verify" + "github.com/open-policy-agent/opa/util" +) + +// VerifyBundleSignature verifies the bundle signature using the given public keys or secret. +// If a signature is verified, it keeps track of the files specified in the JWT payload +func VerifyBundleSignature(sc SignaturesConfig, bvc *VerificationConfig) (map[string]FileInfo, error) { + files := map[string]FileInfo{} + + if len(sc.Signatures) == 0 { + return files, fmt.Errorf(".signatures.json: missing JWT (expected exactly one)") + } + + if len(sc.Signatures) > 1 { + return files, fmt.Errorf(".signatures.json: multiple JWTs not supported (expected exactly one)") + } + + for _, token := range sc.Signatures { + + // decode JWT to check if the payload specifies the key to use for JWT signature verification + parts, err := jws.SplitCompact(token) + if err != nil { + return files, err + } + + payload, err := base64.RawURLEncoding.DecodeString(parts[1]) + if err != nil { + return files, err + } + + var buf bytes.Buffer + buf.Write(payload) + + var jpl DecodedSignature + if err := util.UnmarshalJSON(buf.Bytes(), &jpl); err != nil { + return files, err + } + + // verify the JWT signature + err = verifyJWTSignature(token, jpl, bvc) + if err != nil { + return files, err + } + + // build the map of file names to their info + for _, file := range jpl.Files { + files[file.Name] = file + } + } + return files, nil +} + +func verifyJWTSignature(token string, payload DecodedSignature, bvc *VerificationConfig) error { + // check for the id of the key to use for JWT signature verification + // first in the OPA config. If not found, then check the JWT payload + keyID := bvc.KeyID + if keyID == "" { + keyID = payload.KeyID + } + + if keyID == "" { + return fmt.Errorf("verification key ID is empty") + } + + // now that we have the keyID, fetch the actual key + keyConfig, err := bvc.GetPublicKey(keyID) + if err != nil { + return err + } + + // verify JWT signature + alg := jwa.SignatureAlgorithm(keyConfig.Algorithm) + key, err := verify.GetSigningKey(keyConfig.Key, alg) + if err != nil { + return err + } + + _, err = jws.Verify([]byte(token), alg, key) + if err != nil { + return err + } + + // verify the scope + scope := bvc.Scope + if scope == "" { + scope = keyConfig.Scope + } + + if payload.Scope != scope { + return fmt.Errorf("scope mismatch") + } + return nil +} + +// VerifyBundleFile verifies the hash of a file in the bundle matches to that provided in the bundle's signature +func VerifyBundleFile(path string, data bytes.Buffer, files map[string]FileInfo) error { + var file FileInfo + var ok bool + + if file, ok = files[path]; !ok { + return fmt.Errorf("file %v not included in bundle signature", path) + } + + if file.Algorithm == "" { + return fmt.Errorf("no hashing algorithm provided for file %v", path) + } + + hash, err := NewSignatureHasher(HashingAlgorithm(file.Algorithm)) + if err != nil { + return err + } + + // hash the file content + // For unstructured files, hash the byte stream of the file + // For structured files, read the byte stream and parse into a JSON structure; + // then recursively order the fields of all objects alphabetically and then apply + // the hash function to result to compute the hash. This ensures that the digital signature is + // independent of whitespace and other non-semantic JSON features. + var value interface{} + if IsStructuredDoc(path) { + err := util.Unmarshal(data.Bytes(), &value) + if err != nil { + return err + } + } else { + value = data.Bytes() + } + + bs, err := hash.HashFile(value) + if err != nil { + return err + } + + // compare file hash with same file in the JWT payloads + fb, err := hex.DecodeString(file.Hash) + if err != nil { + return err + } + + if !bytes.Equal(fb, bs) { + return fmt.Errorf("%v: digest mismatch (want: %x, got: %x)", path, fb, bs) + } + + delete(files, path) + return nil +} diff --git a/bundle/verify_test.go b/bundle/verify_test.go new file mode 100644 index 0000000000..407109a522 --- /dev/null +++ b/bundle/verify_test.go @@ -0,0 +1,245 @@ +// Copyright 2020 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package bundle + +import ( + "bytes" + "fmt" + "testing" +) + +func TestVerifyBundleSignature(t *testing.T) { + badToken := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.4nrInalqppAc9EjsNUD9Y35amVpDGoRk4bkxzdY8fhs` + signedTokenHS256 := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6ImRiL3VhbTIvZW50aXRsZW1lbnRzL2RhdGEuanNvbiIsImhhc2giOiJjMjEzMTU0NGM3MTZhMjVhNWUzMWY1MDQzMDBmNTI0MGU4MjM1Y2FkYjlhNTdmMGJkMWI2ZjRiZDc0YjI2NjEyIiwiYWxnb3JpdGhtIjoic2hhMjU2In0seyJuYW1lIjoiZGIvdWFtMi9wb2xpY3kvb3BhLXBvbGljeS5yZWdvIiwiaGFzaCI6IjQyY2ZlNjc2OGI1N2JiNWY3NTAzYzE2NWMyOGRkMDdhYzViODEzNTU0ZWJjODUwZjJjYzM1ODQzZTcxMzdiMWQifV0sImlhdCI6MTU5MjI0ODAyNywiaXNzIjoiSldUU2VydmljZSIsImtleWlkIjoiZm9vIiwic2NvcGUiOiJ3cml0ZSJ9.4nrInalqppAc9EjsNUD9Y35amVpDGoRk4bkxzdY8fhs` + otherSignedTokenHS256 := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6ImRiL3VhbTMvcm9sZXMvZGF0YS5qc29uIiwiaGFzaCI6ImMyMTMxNTQ0YzcxNmEyNWE1ZTMxZjUwNDMwMGY1MjQwZTgyMzVjYWRiOWE1N2YwYmQxYjZmNGJkNzRiMjY2MTIiLCJhbGdvcml0aG0iOiJtZDUifSx7Im5hbWUiOiJkYi91YW0zL3BvbGljeS9wb2xpY3kucmVnbyIsImhhc2giOiI0MmNmZTY3NjhiNTdiYjVmNzUwM2MxNjVjMjhkZDA3YWM1YjgxMzU1NGViYzg1MGYyY2MzNTg0M2U3MTM3YjFkIn0seyJuYW1lIjoiZGIvdWFtNC9wb2xpY3kvcmVnby5yZWdvIiwiaGFzaCI6IjQyY2ZlNjc2OGI1N2JiNWY3NTAzYzE2NWMyOGRkMDdhYzViODEzNTU0ZWJjODUwZjJjYzM1ODQzZTcxMzdiMWQiLCJhbGdvcml0aG0iOiJzaGEzODQifV0sImlhdCI6MTU5MjI0ODAyNywiaXNzIjoiSldUU2VydmljZSIsImtleWlkIjoiYmFyIiwic2NvcGUiOiJ3cml0ZSJ9.d_NiBXF3zqNPZCEubQC1FC1IYwmwkYwjv00B5UyJ9Dk` + badTokenPayload := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6eyJuYW1lIjoiZGIvdWFtMi9wb2xpY3kvb3BhLXBvbGljeS5yZWdvIiwiaGFzaCI6IjQyY2ZlNjc2OGI1N2JiNWY3NTAzYzE2NWMyOGRkMDdhYzViODEzNTU0ZWJjODUwZjJjYzM1ODQzZTcxMzdiMWQifSwiaWF0IjoxNTkyMjQ4MDI3LCJpc3MiOiJKV1RTZXJ2aWNlIiwia2V5aWQiOiJmb28iLCJzY29wZSI6IndyaXRlIn0.J3KJFOycHPy4Wkw_LzzIKvTMqCsV8L8DdQW5Q-vieKg` + + tests := map[string]struct { + input SignaturesConfig + readerVerifyConfig *VerificationConfig + wantErr bool + err error + }{ + "no_signatures": {SignaturesConfig{}, nil, true, fmt.Errorf(".signatures.json: missing JWT (expected exactly one)")}, + "multiple_signatures": {SignaturesConfig{Signatures: []string{signedTokenHS256, otherSignedTokenHS256}}, nil, true, fmt.Errorf(".signatures.json: multiple JWTs not supported (expected exactly one)")}, + "invalid_token": {SignaturesConfig{Signatures: []string{badToken}}, nil, true, fmt.Errorf("Failed to split compact serialization")}, + "bad_token_payload": {SignaturesConfig{Signatures: []string{badTokenPayload}}, nil, true, fmt.Errorf("json: cannot unmarshal object into Go struct field DecodedSignature.files of type []bundle.FileInfo")}, + "valid_token_and_scope": { + SignaturesConfig{Signatures: []string{signedTokenHS256}}, + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "write", nil), + false, nil, + }, + "valid_token_and_scope_mismatch": { + SignaturesConfig{Signatures: []string{signedTokenHS256}}, + NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "bad_scope", nil), + true, fmt.Errorf("scope mismatch"), + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + _, err := VerifyBundleSignature(tc.input, tc.readerVerifyConfig) + + if tc.wantErr { + if err == nil { + t.Fatal("Expected error but got nil") + } + + if tc.err != nil && tc.err.Error() != err.Error() { + t.Fatalf("Expected error message %v but got %v", tc.err.Error(), err.Error()) + } + } else { + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } + }) + } + + // verify the number files on the reader collected from the JWT + sc := SignaturesConfig{Signatures: []string{signedTokenHS256}} + verificationConfig := NewVerificationConfig(map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "", "write", nil) + files, err := VerifyBundleSignature(sc, verificationConfig) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + expectedNumFiles := 2 + if len(files) != expectedNumFiles { + t.Fatalf("Expected %v files in the JWT payloads but got %v", expectedNumFiles, len(files)) + } +} + +func TestVerifyJWTSignature(t *testing.T) { + signedTokenHS256 := `eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6ImRiL3VhbTIvZW50aXRsZW1lbnRzL2RhdGEuanNvbiIsImhhc2giOiJjMjEzMTU0NGM3MTZhMjVhNWUzMWY1MDQzMDBmNTI0MGU4MjM1Y2FkYjlhNTdmMGJkMWI2ZjRiZDc0YjI2NjEyIiwiYWxnb3JpdGhtIjoic2hhMjU2In0seyJuYW1lIjoiZGIvdWFtMi9wb2xpY3kvb3BhLXBvbGljeS5yZWdvIiwiaGFzaCI6IjQyY2ZlNjc2OGI1N2JiNWY3NTAzYzE2NWMyOGRkMDdhYzViODEzNTU0ZWJjODUwZjJjYzM1ODQzZTcxMzdiMWQifV0sImlhdCI6MTU5MjI0ODAyNywiaXNzIjoiSldUU2VydmljZSIsImtleWlkIjoiZm9vIiwic2NvcGUiOiJ3cml0ZSJ9.4nrInalqppAc9EjsNUD9Y35amVpDGoRk4bkxzdY8fhs` + + signedTokenRS256 := `eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6ImRiL3VhbTIvZW50aXRsZW1lbnRzL2RhdGEuanNvbiIsImhhc2giOiJjMjEzMTU0NGM3MTZhMjVhNWUzMWY1MDQzMDBmNTI0MGU4MjM1Y2FkYjlhNTdmMGJkMWI2ZjRiZDc0YjI2NjEyIiwiYWxnb3JpdGhtIjoic2hhMjU2In0seyJuYW1lIjoiZGIvdWFtMi9wb2xpY3kvb3BhLXBvbGljeS5yZWdvIiwiaGFzaCI6IjQyY2ZlNjc2OGI1N2JiNWY3NTAzYzE2NWMyOGRkMDdhYzViODEzNTU0ZWJjODUwZjJjYzM1ODQzZTcxMzdiMWQifV0sImlhdCI6MTU5MjI1MTQwNiwiaXNzIjoiSldUU2VydmljZSIsImtleWlkIjoiZm9vIiwic2NvcGUiOiJ3cml0ZSJ9.Xp1RTaiy9FXeELDWfdMYSsnRnmeBzRtoi4ewT5zr__4IXR0fgjpOOJgUoeFalTqumZQr3UO9PgSFM1Xfp1ivP8OQhMzCZEg8gI3yXQVfmu2Pb0Bo70t04AsqMjPG0XWIwYBgj20HwCrvjrzV3O9PfMWSLS03kL8iXzZDNu8BnN5BEuG6X2gpd-KcqIy_OMJLJSUaXvD8a7nvhKm0WbRFGImS2ioUK_9zmz2C6T5oedxPrlardsr6TjXDU4QzMSEYXzv0UnXCPxE0oxAktY47AylOpvg0E1AfYtFFiTpMIhtEU00rVOeFKiDicdG-ZxxicXZTYayd3O5kcu5LusUm7naeWGXc0mTNyFLUehqn3rQxgHUOgFmS_IruRVLLflAxHOoa-KWjkHeZYx5mQVAQJZqkR2kf1o31tcmXo8zqEYSywUy40e4xU9ZEJepQ21oS0NkJLq1hSSD-0lSo9rGqsLboxJ_ZHmC109YrGNyxj4-AoIB_6_9UPOu43o2ylDmyxtiti10FjaO5LhLgr9noI9yTiF-0N5nlAQqiIU6v5ImGb0kAaHrk8Jhin52WHMn3gbyC1Ss9bQgEBl71ZrSqG5mxlws86iHCJ6dLTk_7A9KecH24S_Pt8hufWQW9GpzoslditpQFH2fEKnGZqUipUE3qQI063xBAyeoX2YkiVIc` + + publicKeyValid := `-----BEGIN PUBLIC KEY----- +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA7nJwME0QNM6g0Ou9Sylj +lcIY4cnBcs8oWVHe74bJ7JTgYmDOk2CA14RE3wJNkUKERP/cRdesKDA/BToJXJUr +oYvhjXxUYn+i3wK5vOGRY9WUtTF9paIIpIV4USUOwDh3ufhA9K3tyh+ZVsqn80em +0Lj2ME0EgScuk6u0/UYjjNvcmnQl+uDmghG8xBZh7TZW2+aceMwlb4LJIP36VRhg +jKQGIxg2rW8ROXgJaFbNRCbiOUUqlq9SUZuhHo8TNOARXXxp9R4Fq7Cl7ZbwWtNP +wAtM1y+Z+iyu/i91m0YLlU2XBOGLu9IA8IZjPlbCnk/SygpV9NNwTY9DSQ0QfXcP +TGlsbFwzRzTlhH25wEl3j+2Ub9w/NX7Yo+j/Ei9eGZ8cq0bcvEwDeIo98HeNZWrL +UUArayRYvh8zutOlzqehw8waFk9AxpfEp9oWekSz8gZw9OL773EhnglYxxjkPHNz +k66CufLuTEf6uE9NLE5HnlQMbiqBFirIyAWGKyU3v2tphKvcogxmzzWA51p0GY0l +GZvlLNt2NrJv2oGecyl3BLqHnBi+rGAosa/8XgfQT8RIk7YR/tDPDmPfaqSIc0po ++NcHYEH82Yv+gfKSK++1fyssGCsSRJs8PFMuPGgv62fFrE/EHSsHJaNWojSYce/T +rxm2RaHhw/8O4oKcfrbaRf8CAwEAAQ== +-----END PUBLIC KEY-----` + + publicKeyInvalid := `-----BEGIN PUBLIC KEY----- +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDMYfnvWtC8Id5bPKae5yXSxQTt ++Zpul6AnnZWfI2TtIarvjHBFUtXRo96y7hoL4VWOPKGCsRqMFDkrbeUjRrx8iL91 +4/srnyf6sh9c8Zk04xEOpK1ypvBz+Ks4uZObtjnnitf0NBGdjMKxveTq+VE7BWUI +yQjtQ8mbDOsiLLvh7wIDAQAB +-----END PUBLIC KEY-----` + + publicKeyBad := `-----BEGIN PUBLIC KEY----- +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDMYfnvWtC8Id5bPKae5yXSxQTt ++Zpul6AnnZWfI2TtIarvjHBFUtXRo96y7hoL4VWOPKGCsRqMFDkrbeUjRrx8iL91 +4/srnyf6sh9c8Zk04xEOpK1ypvBz+Ks4uZObtjnnitf0NBGdjMKxveTq+VE7BWUI +yQjtQ8mbDOsiLLvh7wIDAQAB== +-----END PUBLIC KEY-----` + + tests := map[string]struct { + token string + payload DecodedSignature + keys map[string]*KeyConfig + keyID string + wantErr bool + err error + }{ + "no_public_key_id": {"", DecodedSignature{}, map[string]*KeyConfig{}, "", true, fmt.Errorf("verification key ID is empty")}, + "actual_public_key_missing": {"", DecodedSignature{KeyID: "foo"}, map[string]*KeyConfig{}, "", true, fmt.Errorf("verification key corresponding to ID foo not found")}, + "bad_public_key_algorithm": { + "", + DecodedSignature{KeyID: "foo"}, + map[string]*KeyConfig{"foo": {Key: "somekey", Algorithm: "RS007"}}, "", + true, fmt.Errorf("unsupported signature algorithm: RS007"), + }, + "public_key_with_valid_HS256_sign": { + signedTokenHS256, + DecodedSignature{KeyID: "bar"}, + map[string]*KeyConfig{"foo": {Key: "secret", Algorithm: "HS256"}}, "foo", // check keyId in OPA config takes precedence + false, nil, + }, + "public_key_with_invalid_HS256_sign": { + signedTokenHS256, + DecodedSignature{KeyID: "foo"}, + map[string]*KeyConfig{"foo": {Key: "bad_secret", Algorithm: "HS256"}}, "", + true, fmt.Errorf("Failed to verify message: failed to match hmac signature"), + }, + "public_key_with_valid_RS256_sign": { + signedTokenRS256, + DecodedSignature{KeyID: "foo"}, + map[string]*KeyConfig{"foo": {Key: publicKeyValid, Algorithm: "RS256"}}, "", + false, nil, + }, + "public_key_with_invalid_RS256_sign": { + signedTokenRS256, + DecodedSignature{KeyID: "foo"}, + map[string]*KeyConfig{"foo": {Key: publicKeyInvalid, Algorithm: "RS256"}}, "", + true, fmt.Errorf("Failed to verify message: crypto/rsa: verification error"), + }, + "public_key_with_bad_cert_RS256": { + signedTokenRS256, + DecodedSignature{}, + map[string]*KeyConfig{"foo": {Key: publicKeyBad, Algorithm: "RS256"}}, "foo", + true, fmt.Errorf("failed to parse PEM block containing the key"), + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + err := verifyJWTSignature(tc.token, tc.payload, NewVerificationConfig(tc.keys, tc.keyID, "", nil)) + + if tc.wantErr { + if err == nil { + t.Fatal("Expected error but got nil") + } + + if tc.err != nil && tc.err.Error() != err.Error() { + t.Fatalf("Expected error message %v but got %v", tc.err.Error(), err.Error()) + } + } else { + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } + }) + } + + // public key id provided in OPA config, actual public key signed using RS256. Valid signature + keys := map[string]*KeyConfig{} + keys["foo"] = &KeyConfig{ + Key: publicKeyValid, + Algorithm: "RS256", + } + + err := verifyJWTSignature(signedTokenRS256, DecodedSignature{}, NewVerificationConfig(keys, "foo", "", nil)) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } +} + +func TestVerifyBundleFile(t *testing.T) { + + tests := map[string]struct { + files [][2]string + readerFiles map[string]FileInfo + wantErr bool + err error + }{ + "file_not_found": { + [][2]string{{"/.manifest", `{"revision": "quickbrownfaux"}`}}, + map[string]FileInfo{}, + true, fmt.Errorf("file /.manifest not included in bundle signature"), + }, + "bad_hashing_algorithm": { + [][2]string{{"/.manifest", `{"revision": "quickbrownfaux"}`}}, + map[string]FileInfo{"/.manifest": { + Name: "/.manifest", + Hash: "e7dc95e14ad6cd75d044c13d52ee3ab1", + Algorithm: "MD6", + }}, + true, fmt.Errorf("unsupported hashing algorithm: MD6"), + }, + "bad_digest": { + [][2]string{{"/.manifest", `{"revision": "quickbrownfaux"}`}}, + map[string]FileInfo{"/.manifest": { + Name: "/.manifest", + Hash: "874984d68515ba2439c04dddf5b21574", + Algorithm: MD5.String(), + }}, + true, fmt.Errorf("/.manifest: digest mismatch (want: 874984d68515ba2439c04dddf5b21574, got: a005c38a509dc2d5a7407b9494efb2ad)"), + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + data := bytes.NewBufferString(tc.files[0][1]) + err := VerifyBundleFile(tc.files[0][0], *data, tc.readerFiles) + + if tc.wantErr { + if err == nil { + t.Fatal("Expected error but got nil") + } + + if tc.err != nil && tc.err.Error() != err.Error() { + t.Fatalf("Expected error message %v but got %v", tc.err.Error(), err.Error()) + } + } else { + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } + }) + } +} diff --git a/cmd/build.go b/cmd/build.go index c27b4e943e..cef7678c87 100644 --- a/cmd/build.go +++ b/cmd/build.go @@ -12,21 +12,32 @@ import ( "os" "strings" + "github.com/open-policy-agent/opa/bundle" + "github.com/spf13/cobra" "github.com/open-policy-agent/opa/compile" "github.com/open-policy-agent/opa/util" ) +const defaultPublicKeyID = "default" + type buildParams struct { - target *util.EnumFlag - bundleMode bool - optimizationLevel int - entrypoints repeatedStringFlag - outputFile string - revision string - ignore []string - debug bool + target *util.EnumFlag + bundleMode bool + optimizationLevel int + entrypoints repeatedStringFlag + outputFile string + revision string + ignore []string + debug bool + algorithm string + key string + scope string + pubKey string + pubKeyID string + claimsFile string + excludeVerifyFiles []string } func newBuildParams() buildParams { @@ -96,7 +107,52 @@ The 'build' command supports targets (specified by -t): The -e flag tells the 'build' command which documents will be queried by the software asking for policy decisions, so that it can focus optimization efforts and ensure -that document is not eliminated by the optimizer.`, +that document is not eliminated by the optimizer. + +The 'build' command can be used to verify the signature of a signed bundle and +also to generate a signature for the output bundle the command creates. + +If the directory path(s) provided to the 'build' command contain a ".signatures.json" file, +it will attempt to verify the signatures included in that file. The bundle files +or directory path(s) to verify must be specified using --bundle. + +For more information on the bundle verification process see +https://www.openpolicyagent.org/docs/latest/management/#signature-verification. + +Example: + + $ opa build --verification-key /path/to/public_key.pem --signing-key /path/to/private_key.pem --bundle foo + +Where foo has the following structure: + + foo/ + | + +-- bar/ + | | + | +-- data.json + | + +-- policy.rego + | + +-- .manifest + | + +-- .signatures.json + + +The 'build' command will verify the signatures using the public key provided by the --verification-key flag. +The default signing algorithm is RS256 and the --signing-alg flag can be used to specify +a different one. The --verification-key-id and --scope flags can be used to specify the name for the key +provided using the --verification-key flag and scope to use for bundle signature verification respectively. + +If the verification succeeds, the 'build' command will write out an updated ".signatures.json" file +to the output bundle. It will use the key specified by the --signing-key flag to sign +the token in the ".signatures.json" file. + +To include additional claims in the payload use the --claims-file flag to provide a JSON file +containing optional claims. + +For more information on the format of the ".signatures.json" file +see https://www.openpolicyagent.org/docs/latest/management/#bundle-signature. +`, PreRunE: func(Cmd *cobra.Command, args []string) error { if len(args) == 0 { return fmt.Errorf("expected at least one path") @@ -117,8 +173,21 @@ that document is not eliminated by the optimizer.`, buildCommand.Flags().VarP(&buildParams.entrypoints, "entrypoint", "e", "set slash separated entrypoint path") buildCommand.Flags().StringVarP(&buildParams.revision, "revision", "r", "", "set output bundle revision") buildCommand.Flags().StringVarP(&buildParams.outputFile, "output", "o", "bundle.tar.gz", "set the output filename") + addBundleModeFlag(buildCommand.Flags(), &buildParams.bundleMode, false) addIgnoreFlag(buildCommand.Flags(), &buildParams.ignore) + + // bundle verification config + addVerificationKeyFlag(buildCommand.Flags(), &buildParams.pubKey) + addVerificationKeyIDFlag(buildCommand.Flags(), &buildParams.pubKeyID, defaultPublicKeyID) + addSigningAlgFlag(buildCommand.Flags(), &buildParams.algorithm, defaultTokenSigningAlg) + addBundleVerificationScopeFlag(buildCommand.Flags(), &buildParams.scope) + addBundleVerificationExcludeFilesFlag(buildCommand.Flags(), &buildParams.excludeVerifyFiles) + + // bundle signing config + addSigningKeyFlag(buildCommand.Flags(), &buildParams.key) + addClaimsFileFlag(buildCommand.Flags(), &buildParams.claimsFile) + RootCommand.AddCommand(buildCommand) } @@ -126,6 +195,16 @@ func dobuild(params buildParams, args []string) error { buf := bytes.NewBuffer(nil) + // generate the bundle verification and signing config + bvc := buildVerificationConfig(params.pubKey, params.pubKeyID, params.algorithm, params.scope, params.excludeVerifyFiles) + bsc := buildSigningConfig(params.key, params.algorithm, params.claimsFile) + + if bvc != nil || bsc != nil { + if !params.bundleMode { + return fmt.Errorf("enable bundle mode (ie. --bundle) to verify or sign bundle files or directories") + } + } + compiler := compile.New(). WithTarget(params.target.String()). WithAsBundle(params.bundleMode). @@ -134,7 +213,13 @@ func dobuild(params buildParams, args []string) error { WithEntrypoints(params.entrypoints.v...). WithPaths(args...). WithFilter(buildCommandLoaderFilter(params.bundleMode, params.ignore)). - WithRevision(params.revision) + WithRevision(params.revision). + WithBundleVerificationConfig(bvc). + WithBundleSigningConfig(bsc) + + if params.claimsFile == "" { + compiler = compiler.WithBundleVerificationKeyID(params.pubKeyID) + } err := compiler.Build(context.Background()) @@ -170,6 +255,23 @@ func buildCommandLoaderFilter(bundleMode bool, ignore []string) func(string, os. } } +func buildVerificationConfig(pubKey, pubKeyID, alg, scope string, excludeFiles []string) *bundle.VerificationConfig { + if pubKey == "" { + return nil + } + + keyConfig := bundle.NewKeyConfig(pubKey, alg, scope) + return bundle.NewVerificationConfig(map[string]*bundle.KeyConfig{pubKeyID: keyConfig}, pubKeyID, scope, excludeFiles) +} + +func buildSigningConfig(key, alg, claimsFile string) *bundle.SigningConfig { + if key == "" { + return nil + } + + return bundle.NewSigningConfig(key, alg, claimsFile) +} + func printdebug(w io.Writer, debug []compile.Debug) { for i := range debug { fmt.Fprintln(w, debug[i]) diff --git a/cmd/build_test.go b/cmd/build_test.go index 51cf68220b..6d8ac67ba7 100644 --- a/cmd/build_test.go +++ b/cmd/build_test.go @@ -91,3 +91,29 @@ func TestBuildErrorDoesNotWriteFile(t *testing.T) { } }) } + +func TestBuildErrorVerifyNonBundle(t *testing.T) { + + files := map[string]string{ + "test.rego": ` + package test + p { p } + `, + } + + test.WithTempFS(files, func(root string) { + params := newBuildParams() + params.outputFile = path.Join(root, "bundle.tar.gz") + params.pubKey = "secret" + + err := dobuild(params, []string{root}) + if err == nil { + t.Fatal("expected error but got nil") + } + + exp := "enable bundle mode (ie. --bundle) to verify or sign bundle files or directories" + if err.Error() != exp { + t.Fatalf("expected error message %v but got %v", exp, err.Error()) + } + }) +} diff --git a/cmd/check.go b/cmd/check.go index 3e3ca86ea5..122763951a 100644 --- a/cmd/check.go +++ b/cmd/check.go @@ -59,7 +59,7 @@ func checkModules(args []string) int { if checkParams.bundleMode { for _, path := range args { - b, err := loader.NewFileLoader().AsBundle(path) + b, err := loader.NewFileLoader().WithSkipBundleVerification(true).AsBundle(path) if err != nil { outputErrors(err) return 1 diff --git a/cmd/deps.go b/cmd/deps.go index 03a972d81c..5a1447fc61 100644 --- a/cmd/deps.go +++ b/cmd/deps.go @@ -90,7 +90,7 @@ func deps(args []string, params depsCommandParams) error { if len(params.bundlePaths.v) > 0 { for _, path := range params.bundlePaths.v { - b, err := loader.NewFileLoader().AsBundle(path) + b, err := loader.NewFileLoader().WithSkipBundleVerification(true).AsBundle(path) if err != nil { return err } diff --git a/cmd/eval.go b/cmd/eval.go index f2ae75e416..2aad518526 100644 --- a/cmd/eval.go +++ b/cmd/eval.go @@ -387,6 +387,9 @@ func setupEval(args []string, params evalCommandParams) (*evalContext, error) { } } + // skip bundle verification + regoArgs = append(regoArgs, rego.SkipBundleVerification(true)) + inputBytes, err := readInputBytes(params) if err != nil { return nil, err diff --git a/cmd/flags.go b/cmd/flags.go index 6dad6566db..37ccb639d0 100644 --- a/cmd/flags.go +++ b/cmd/flags.go @@ -85,6 +85,38 @@ func addIgnoreFlag(fs *pflag.FlagSet, ignoreNames *[]string) { fs.StringSliceVarP(ignoreNames, "ignore", "", []string{}, "set file and directory names to ignore during loading (e.g., '.*' excludes hidden files)") } +func addSigningAlgFlag(fs *pflag.FlagSet, alg *string, value string) { + fs.StringVarP(alg, "signing-alg", "", value, "name of the signing algorithm") +} + +func addClaimsFileFlag(fs *pflag.FlagSet, file *string) { + fs.StringVarP(file, "claims-file", "", "", "set path of JSON file containing optional claims (see: https://openpolicyagent.org/docs/latest/management/#bundle-signature-format)") +} + +func addSigningKeyFlag(fs *pflag.FlagSet, key *string) { + fs.StringVarP(key, "signing-key", "", "", "set the secret (HMAC) or path of the PEM file containing the private key (RSA and ECDSA)") +} + +func addVerificationKeyFlag(fs *pflag.FlagSet, key *string) { + fs.StringVarP(key, "verification-key", "", "", "set the secret (HMAC) or path of the PEM file containing the public key (RSA and ECDSA)") +} + +func addVerificationKeyIDFlag(fs *pflag.FlagSet, keyID *string, value string) { + fs.StringVarP(keyID, "verification-key-id", "", value, "name assigned to the verification key used for bundle verification") +} + +func addBundleVerificationScopeFlag(fs *pflag.FlagSet, scope *string) { + fs.StringVarP(scope, "scope", "", "", "scope to use for bundle signature verification") +} + +func addBundleVerificationSkipFlag(fs *pflag.FlagSet, skip *bool, value bool) { + fs.BoolVarP(skip, "skip-verify", "", value, "disables bundle signature verification") +} + +func addBundleVerificationExcludeFilesFlag(fs *pflag.FlagSet, excludeNames *[]string) { + fs.StringSliceVarP(excludeNames, "exclude-files-verify", "", []string{}, "set file names to exclude during bundle verification") +} + const ( explainModeOff = "off" explainModeFull = "full" diff --git a/cmd/oracle.go b/cmd/oracle.go index 7f67148306..ed507567a3 100644 --- a/cmd/oracle.go +++ b/cmd/oracle.go @@ -107,7 +107,7 @@ func dofindDefinition(params findDefinitionParams, stdin io.Reader, stdout io.Wr if len(params.bundlePaths.v) > 1 { return errors.New("not implemented: multiple bundle paths") } - b, err = loader.NewFileLoader().AsBundle(params.bundlePaths.v[0]) + b, err = loader.NewFileLoader().WithSkipBundleVerification(true).AsBundle(params.bundlePaths.v[0]) if err != nil { return err } diff --git a/cmd/run.go b/cmd/run.go index bf0230de50..6f9c8288a2 100644 --- a/cmd/run.go +++ b/cmd/run.go @@ -26,17 +26,23 @@ const ( ) type runCmdParams struct { - rt runtime.Params - tlsCertFile string - tlsPrivateKeyFile string - tlsCACertFile string - ignore []string - serverMode bool - skipVersionCheck bool - authentication *util.EnumFlag - authorization *util.EnumFlag - logLevel *util.EnumFlag - logFormat *util.EnumFlag + rt runtime.Params + tlsCertFile string + tlsPrivateKeyFile string + tlsCACertFile string + ignore []string + serverMode bool + skipVersionCheck bool + authentication *util.EnumFlag + authorization *util.EnumFlag + logLevel *util.EnumFlag + logFormat *util.EnumFlag + algorithm string + scope string + pubKey string + pubKeyID string + skipBundleVerify bool + excludeVerifyFiles []string } func newRunParams() runCmdParams { @@ -109,10 +115,49 @@ Use the "help input" command in the interactive shell to see more options. File paths can be specified as URLs to resolve ambiguity in paths containing colons: $ opa run file:///c:/path/to/data.json + +The 'run' command can also verify the signature of a signed bundle. +A signed bundle is a normal OPA bundle that includes a file +named ".signatures.json". For more information on signed bundles +see https://www.openpolicyagent.org/docs/latest/management/#signing. + +The key to verify the signature of signed bundle can be provided +using the --verification-key flag. For example, for RSA family of algorithms, +the command expects a PEM file containing the public key. +For HMAC family of algorithms (eg. HS256), the secret can be provided +using the --verification-key flag. + +The --verification-key-id flag can be used to optionally specify a name for the +key provided using the --verification-key flag. + +The --signing-alg flag can be used to specify the signing algorithm. +The 'run' command uses RS256 (by default) as the signing algorithm. + +The --scope flag can be used to specify the scope to use for +bundle signature verification. + +Example: + + $ opa run --verification-key secret --signing-alg HS256 --bundle bundle.tar.gz + +The 'run' command will read the bundle "bundle.tar.gz", check the +".signatures.json" file and perform verification using the provided key. +An error will be generated if "bundle.tar.gz" does not contain a ".signatures.json" file. +For more information on the bundle verification process see +https://www.openpolicyagent.org/docs/latest/management/#signature-verification. + +The 'run' command can ONLY be used with the --bundle flag to verify signatures +for existing bundle files or directories following the bundle structure. + +To skip bundle verification, use the --skip-verify flag. `, Run: func(cmd *cobra.Command, args []string) { ctx := context.Background() - rt := initRuntime(ctx, cmdParams, args) + rt, err := initRuntime(ctx, cmdParams, args) + if err != nil { + fmt.Println("error:", err) + os.Exit(1) + } startRuntime(ctx, rt, cmdParams.serverMode) }, } @@ -142,6 +187,14 @@ File paths can be specified as URLs to resolve ambiguity in paths containing col runCommand.Flags().BoolVar(&cmdParams.skipVersionCheck, "skip-version-check", false, "disables anonymous version reporting (see: https://openpolicyagent.org/docs/latest/privacy)") addIgnoreFlag(runCommand.Flags(), &cmdParams.ignore) + // bundle verification config + addVerificationKeyFlag(runCommand.Flags(), &cmdParams.pubKey) + addVerificationKeyIDFlag(runCommand.Flags(), &cmdParams.pubKeyID, defaultPublicKeyID) + addSigningAlgFlag(runCommand.Flags(), &cmdParams.algorithm, defaultTokenSigningAlg) + addBundleVerificationScopeFlag(runCommand.Flags(), &cmdParams.scope) + addBundleVerificationSkipFlag(runCommand.Flags(), &cmdParams.skipBundleVerify, false) + addBundleVerificationExcludeFilesFlag(runCommand.Flags(), &cmdParams.excludeVerifyFiles) + usageTemplate := `Usage: {{.UseLine}} [files] @@ -154,7 +207,7 @@ Flags: RootCommand.AddCommand(runCommand) } -func initRuntime(ctx context.Context, params runCmdParams, args []string) *runtime.Runtime { +func initRuntime(ctx context.Context, params runCmdParams, args []string) (*runtime.Runtime, error) { authenticationSchemes := map[string]server.AuthenticationScheme{ "token": server.AuthenticationToken, "tls": server.AuthenticationTLS, @@ -168,15 +221,13 @@ func initRuntime(ctx context.Context, params runCmdParams, args []string) *runti cert, err := loadCertificate(params.tlsCertFile, params.tlsPrivateKeyFile) if err != nil { - fmt.Println("error:", err) - os.Exit(1) + return nil, err } if params.tlsCACertFile != "" { pool, err := loadCertPool(params.tlsCACertFile) if err != nil { - fmt.Println("error:", err) - os.Exit(1) + return nil, err } params.rt.CertPool = pool } @@ -195,13 +246,20 @@ func initRuntime(ctx context.Context, params runCmdParams, args []string) *runti params.rt.EnableVersionCheck = !params.skipVersionCheck + params.rt.SkipBundleVerification = params.skipBundleVerify + + params.rt.BundleVerificationConfig = buildVerificationConfig(params.pubKey, params.pubKeyID, params.algorithm, params.scope, params.excludeVerifyFiles) + + if params.rt.BundleVerificationConfig != nil && !params.rt.BundleMode { + return nil, fmt.Errorf("enable bundle mode (ie. --bundle) to verify bundle files or directories") + } + rt, err := runtime.NewRuntime(ctx, params.rt) if err != nil { - fmt.Fprintln(os.Stderr, "error:", err) - os.Exit(1) + return nil, err } - return rt + return rt, nil } func startRuntime(ctx context.Context, rt *runtime.Runtime, serverMode bool) { diff --git a/cmd/run_test.go b/cmd/run_test.go index 8150c46003..65eab289ab 100644 --- a/cmd/run_test.go +++ b/cmd/run_test.go @@ -16,7 +16,10 @@ func TestRunServerBase(t *testing.T) { params := newTestRunParams() ctx, cancel := context.WithCancel(context.Background()) - rt := initRuntime(ctx, params, nil) + rt, err := initRuntime(ctx, params, nil) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } testRuntime := e2e.WrapRuntime(ctx, cancel, rt) @@ -29,7 +32,7 @@ func TestRunServerBase(t *testing.T) { done <- true }() - err := testRuntime.WaitForServer() + err = testRuntime.WaitForServer() if err != nil { t.Fatalf("Unexpected error: %s", err) } @@ -45,7 +48,10 @@ func TestRunServerWithDiagnosticAddr(t *testing.T) { params.rt.DiagnosticAddrs = &[]string{":0"} ctx, cancel := context.WithCancel(context.Background()) - rt := initRuntime(ctx, params, nil) + rt, err := initRuntime(ctx, params, nil) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } testRuntime := e2e.WrapRuntime(ctx, cancel, rt) @@ -58,7 +64,7 @@ func TestRunServerWithDiagnosticAddr(t *testing.T) { done <- true }() - err := testRuntime.WaitForServer() + err = testRuntime.WaitForServer() if err != nil { t.Fatalf("Unexpected error: %s", err) } @@ -77,6 +83,23 @@ func TestRunServerWithDiagnosticAddr(t *testing.T) { <-done } +func TestInitRuntimeVerifyNonBundle(t *testing.T) { + + params := newTestRunParams() + params.pubKey = "secret" + params.serverMode = false + + _, err := initRuntime(context.Background(), params, nil) + if err == nil { + t.Fatal("Expected error but got nil") + } + + exp := "enable bundle mode (ie. --bundle) to verify bundle files or directories" + if err.Error() != exp { + t.Fatalf("expected error message %v but got %v", exp, err.Error()) + } +} + func newTestRunParams() runCmdParams { params := newRunParams() params.rt.GracefulShutdownPeriod = 1 diff --git a/cmd/sign.go b/cmd/sign.go new file mode 100644 index 0000000000..2c293d932f --- /dev/null +++ b/cmd/sign.go @@ -0,0 +1,280 @@ +// Copyright 2018 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package cmd + +import ( + "bytes" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "strings" + + initload "github.com/open-policy-agent/opa/internal/runtime/init" + + "github.com/open-policy-agent/opa/bundle" + + "io/ioutil" + "os" + "path/filepath" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/open-policy-agent/opa/util" +) + +type signCmdParams struct { + algorithm string + key string + claimsFile string + outputFilePath string + bundleMode bool +} + +const ( + defaultTokenSigningAlg = "RS256" + defaultHashingAlg = "SHA-256" + signaturesFile = ".signatures.json" +) + +func newSignCmdParams() signCmdParams { + return signCmdParams{} +} + +func init() { + cmdParams := newSignCmdParams() + + var signCommand = &cobra.Command{ + Use: "sign [ [...]]", + Short: "Generate an OPA bundle signature", + Long: `Generate an OPA bundle signature. + +The 'sign' command generates a digital signature for policy bundles. It generates a +".signatures.json" file that dictates which files should be included in the bundle, +what their SHA hashes are, and is cryptographically secure. + +The signatures file is a JSON file with an array containing a single JSON Web Token (JWT) +that encapsulates the signature for the bundle. + +The --signing-alg flag can be used to specify the algorithm to sign the token. The 'sign' +command uses RS256 (by default) as the signing algorithm. +See https://www.openpolicyagent.org/docs/latest/configuration/#keys +for a list of supported signing algorithms. + +The key to be used for signing the JWT MUST be provided using the --signing-key flag. +For example, for RSA family of algorithms, the command expects a PEM file containing +the private key. +For HMAC family of algorithms (eg. HS256), the secret can be provided using +the --signing-key flag. + +OPA 'sign' can ONLY be used with the --bundle flag to load paths that refer to +existing bundle files or directories following the bundle structure. + + $ opa sign --signing-key /path/to/private_key.pem --bundle foo + +Where foo has the following structure: + + foo/ + | + +-- bar/ + | | + | +-- data.json + | + +-- policy.rego + | + +-- .manifest + +This will create a ".signatures.json" file in the current directory. +The --output-file-path flag can be used to specify a different location for +the ".signatures.json" file. + +The content of the ".signatures.json" file is shown below: + + { + "signatures": [ + "eyJhbGciOiJSUzI1NiJ9.eyJmaWxlcyI6W3sibmFtZSI6Ii5tYW5pZmVzdCIsImhhc2giOiIxODc0NWRlNzJjMDFlODBjZDlmNTIwZjQxOGMwMDlhYzRkMmMzZDAyYjE3YTUwZTJkMDQyMTU4YmMzNTJhMzJkIiwiYWxnb3JpdGhtIjoiU0hBLTI1NiJ9LHsibmFtZSI6ImJhci9kYXRhLmpzb24iLCJoYXNoIjoiOTNhMjM5NzFhOTE0ZTVlYWNiZjBhOGQyNTE1NGNkYTMwOWMzYzFjNzJmYmI5OTE0ZDQ3YzYwZjNjYjY4MTU4OCIsImFsZ29yaXRobSI6IlNIQS0yNTYifSx7Im5hbWUiOiJwb2xpY3kucmVnbyIsImhhc2giOiJkMGYyNDJhYWUzNGRiNTRlZjU2NmJlYTRkNDVmY2YxOTcwMGM1ZDhmODdhOWRiOTMyZGZhZDZkMWYwZjI5MWFjIiwiYWxnb3JpdGhtIjoiU0hBLTI1NiJ9XX0.lNsmRqrmT1JI4Z_zpY6IzHRZQAU306PyOjZ6osquixPuTtdSBxgbsdKDcp7Civw3B77BgygVsvx4k3fYr8XCDKChm0uYKScrpFr9_yS6g5mVTQws3KZncZXCQHdupRFoqMS8vXAVgJr52C83AinYWABwH2RYq_B0ZPf_GDzaMgzpep9RlDNecGs57_4zlyxmP2ESU8kjfX8jAA6rYFKeGXJHMD-j4SassoYIzYRv9YkHx8F8Y2ae5Kd5M24Ql0kkvqc_4eO_T9s4nbQ4q5qGHGE-91ND1KVn2avcUyVVPc0-XCR7EH8HnHgCl0v1c7gX1RL7ET7NJbPzfmzQAzk0ZW0dEHI4KZnXSpqy8m-3zAc8kIARm2QwoNEWpy3MWiooPeZVSa9d5iw1aLrbyumfjBP0vCQEPes-Aa6PrARwd5jR9SacO5By0-4emzskvJYRZqbfJ9tXSXDMcAFOAm6kqRPJaj8AO4CyajTC_Lt32_0OLeXqYgNpt3HDqLqGjrb-8fVeQc-hKh0aES8XehQqXj4jMwfsTyj5alsXZm08LwzcFlfQZ7s1kUtmr0_BBNJYcdZUdlu6Qio3LFSRYXNuu6edAO1VH5GKqZISvE1uvDZb2E0Z-rtH-oPp1iSpfvsX47jKJ42LVpI6OahEBri44dzHOIwwm3CIuV8gFzOwR0k" + ] + } + +And the decoded JWT payload has the following form: + + { + "files": [ + { + "name": ".manifest", + "hash": "18745de72c01e80cd9f520f418c009ac4d2c3d02b17a50e2d042158bc352a32d", + "algorithm": "SHA-256" + }, + { + "name": "policy.rego", + "hash": "d0f242aae34db54ef566bea4d45fcf19700c5d8f87a9db932dfad6d1f0f291ac", + "algorithm": "SHA-256" + }, + { + "name": "bar/data.json", + "hash": "93a23971a914e5eacbf0a8d25154cda309c3c1c72fbb9914d47c60f3cb681588", + "algorithm": "SHA-256" + } + ] + } + +The "files" field is generated from the files under the directory path(s) +provided to the 'sign' command. During bundle signature verification, OPA will check +each file name (ex. "foo/bar/data.json") in the "files" field +exists in the actual bundle. The file content is hashed using SHA256. + +To include additional claims in the payload use the --claims-file flag to provide +a JSON file containing optional claims. + +For more information on the format of the ".signatures.json" file see +https://www.openpolicyagent.org/docs/latest/management/#bundle-signature. +`, + PreRunE: func(Cmd *cobra.Command, args []string) error { + return validateSignParams(args, cmdParams) + }, + + Run: func(cmd *cobra.Command, args []string) { + if err := doSign(args, cmdParams); err != nil { + fmt.Println("error:", err) + os.Exit(1) + } + }, + } + + addBundleModeFlag(signCommand.Flags(), &cmdParams.bundleMode, false) + + // bundle signing config + addSigningKeyFlag(signCommand.Flags(), &cmdParams.key) + addClaimsFileFlag(signCommand.Flags(), &cmdParams.claimsFile) + addSigningAlgFlag(signCommand.Flags(), &cmdParams.algorithm, defaultTokenSigningAlg) + + signCommand.Flags().StringVarP(&cmdParams.outputFilePath, "output-file-path", "o", ".", "set the location for the .signatures.json file") + + RootCommand.AddCommand(signCommand) +} + +func doSign(args []string, params signCmdParams) error { + load, err := initload.WalkPaths(args, nil, params.bundleMode) + if err != nil { + return err + } + + hash, err := bundle.NewSignatureHasher(bundle.HashingAlgorithm(defaultHashingAlg)) + if err != nil { + return err + } + + files, err := readBundleFiles(load.BundlesLoader, hash) + if err != nil { + return err + } + + // generate the signed token + token, err := bundle.GenerateSignedToken(files, buildSigningConfig(params.key, params.algorithm, params.claimsFile), "") + if err != nil { + return err + } + + return writeTokenToFile(token, params.outputFilePath) +} + +func readBundleFiles(loaders []initload.BundleLoader, h bundle.SignatureHasher) ([]bundle.FileInfo, error) { + files := []bundle.FileInfo{} + + for _, bl := range loaders { + for { + f, err := bl.DirectoryLoader.NextFile() + if err == io.EOF { + break + } + + if err != nil { + return files, errors.Wrap(err, "bundle read failed") + } + + // skip existing signatures file + if strings.HasSuffix(f.Path(), bundle.SignaturesFile) { + continue + } + + var buf bytes.Buffer + n, err := f.Read(&buf, bundle.BundleLimitBytes) + f.Close() + + if err != nil && err != io.EOF { + return files, err + } else if err == nil && n >= bundle.BundleLimitBytes { + return files, fmt.Errorf("bundle exceeded max size (%v bytes)", bundle.BundleLimitBytes-1) + } + + path := f.Path() + if bl.IsDir { + path = f.URL() + } + + // hash the file content + fi, err := hashFileContent(h, buf.Bytes(), path) + if err != nil { + return files, err + } + files = append(files, fi) + } + } + return files, nil +} + +func hashFileContent(h bundle.SignatureHasher, data []byte, path string) (bundle.FileInfo, error) { + + var fileInfo bundle.FileInfo + var value interface{} + + if bundle.IsStructuredDoc(path) { + err := util.Unmarshal(data, &value) + if err != nil { + return fileInfo, err + } + } else { + value = data + } + + bytes, err := h.HashFile(value) + if err != nil { + return fileInfo, err + } + + return bundle.NewFile(strings.TrimPrefix(path, "/"), hex.EncodeToString(bytes), defaultHashingAlg), nil +} + +func writeTokenToFile(token, fileLoc string) error { + content := make(map[string]interface{}) + content["signatures"] = []string{token} + + bs, err := json.MarshalIndent(content, "", " ") + if err != nil { + return err + } + + path := signaturesFile + if fileLoc != "" { + path = filepath.Join(fileLoc, path) + } + return ioutil.WriteFile(path, bs, 0644) +} + +func validateSignParams(args []string, params signCmdParams) error { + if len(args) == 0 { + return fmt.Errorf("specify atleast one path containing policy and/or data files") + } + + if params.key == "" { + return fmt.Errorf("specify the secret (HMAC) or path of the PEM file containing the private key (RSA and ECDSA)") + } + + if !params.bundleMode { + return fmt.Errorf("enable bundle mode (ie. --bundle) to sign bundle files or directories") + } + return nil +} diff --git a/cmd/sign_test.go b/cmd/sign_test.go new file mode 100644 index 0000000000..d6fb101a70 --- /dev/null +++ b/cmd/sign_test.go @@ -0,0 +1,185 @@ +// Copyright 2018 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package cmd + +import ( + "bytes" + "encoding/json" + "fmt" + + "github.com/open-policy-agent/opa/internal/file/archive" + + "github.com/open-policy-agent/opa/bundle" + + "io/ioutil" + "os" + "path/filepath" + "testing" + + "github.com/open-policy-agent/opa/util/test" +) + +func TestWriteTokenToFile(t *testing.T) { + + token := `eyJhbGciOiJSUzI1NiJ9.eyJmaWxlcyI6W3sibmFtZSI6ImJ1bmRsZS8ubWFuaWZlc3QiLCJoYXNoIjoiZWUwZWRiZGZkMjgzNTBjNDk2ZjA4ODI3Y2E1Y2VhYjgwMzA2NzI0YjYyZGY1ZjY0MDRlNzBjYjc2NjYxNWQ5ZCIsImFsZ29yaXRobSI6IlNIQTI1NiJ9LHsibmFtZSI6ImJ1bmRsZS9odHRwL2V4YW1wbGUvYXV0aHovYXV0aHoucmVnbyIsImhhc2giOiI2MDJiZTcwMWIyYmE4ZTc3YTljNTNmOWIzM2QwZTkwM2MzNGMwMGMzMDkzM2Y2NDZiYmU3NGI3YzE2NGY2OGM2IiwiYWxnb3JpdGhtIjoiU0hBMjU2In0seyJuYW1lIjoiYnVuZGxlL3JvbGVzL2JpbmRpbmcvZGF0YS5qc29uIiwiaGFzaCI6ImIxODg1NTViZjczMGVlNDdkZjBiY2Y4MzVlYTNmNTQ1MjlmMzc4N2Y0ODQxZjFhZGE2MDM5M2RhYWZhZmJkYzciLCJhbGdvcml0aG0iOiJTSEEyNTYifV0sImtleWlkIjoiZm9vIiwic2NvcGUiOiJyZWFkIn0.YojuPnGWutdlDL7lwFGBXqPfDtxOG2BuZmShN5zm-G9zfMprI1AMqKDoPoNv4tuCGIBNXwoNsYHYiK538CHfJEfY1v4iDX3JFEWQlwx_CfJWDonwqT9SY9tHUW7PUUrI_WgJXZ5zei8RAMYMymKSb9hpSAtfGg_PU0kZr52WzjbPUj4SRiB19Swi61r0CFXYjbfx3GDJdjrGTNBSWrUCMrdhHYLEWqJPfSQ-fYfRrgQVhq3BJLwJJe66dgBEGnHEgA7XMuxkNIOv7mj3Y_EChbv2tjrD9NJPekDcYH1zCEc4BycHjNCcsGiQXDE6sFtoNZiCXLB2D0sLqUnBx4TCw27wTPfcOuL2KauLPahZitnH5mYvQD8NI76Pm4NSyJfevwdWjSsrT7vf0DCLS-dU6r9dJ79xM_hJU7136CT8ARcmSrk-EvCqfkrH2c4WwZyAzdyyyFumMZh4CYc2vcC7ap0NANHJT193fTud1i23mx1PBslwXdsIqXvBGlTbR7nb9o661m-B_mxbHMkG4nIeoGpZoaBJw8RVaA6-4D55gtk8aaMyLJIlIIlV2_AKOLk3nPG3ACHiLSndasLDOIRIYkCluIEaM2FLEEPEtJfKNR6e1K-EK2TvNKMDAEUtJW71ggOuGQ3b5otYOoVVENJLwm-PsO7qb2Tq6PyAquI3ExU` + expected := make(map[string]interface{}) + expected["signatures"] = []string{token} + + files := map[string]string{} + + test.WithTempFS(files, func(rootDir string) { + err := writeTokenToFile(token, rootDir) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + bs, err := ioutil.ReadFile(filepath.Join(rootDir, ".signatures.json")) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + expectedBytes, err := json.MarshalIndent(expected, "", " ") + if err != nil { + t.Fatal(err) + } + + if !bytes.Equal(expectedBytes, bs) { + t.Fatal("Unexpected content in \".signatures.json\" file") + } + }) +} + +func TestDoSign(t *testing.T) { + files := map[string]string{ + "foo/bar/data.json": `{"y": 2}`, + "/example/example.rego": `package example`, + "/.signatures.json": `{"signatures": []}`, + } + test.WithTempFS(files, func(rootDir string) { + params := signCmdParams{ + algorithm: "HS256", + key: "mysecret", + outputFilePath: rootDir, + bundleMode: true, + } + + err := doSign([]string{rootDir}, params) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + }) +} + +func TestBundleSignVerification(t *testing.T) { + + // files to be included in the bundle + files := map[string]string{ + "/.manifest": `{"revision": "quickbrownfaux"}`, + "/a/b/c/data.json": "[1,2,3]", + "/a/b/d/data.json": "true", + "/a/b/y/data.yaml": `foo: 1`, + "/example/example.rego": `package example`, + "/policy.wasm": `modules-compiled-as-wasm-binary`, + "/data.json": `{"x": {"y": true}, "a": {"b": {"z": true}}}}`, + } + + test.WithTempFS(files, func(rootDir string) { + params := signCmdParams{ + algorithm: "HS256", + key: "mysecret", + outputFilePath: rootDir, + bundleMode: true, + } + + err := doSign([]string{rootDir}, params) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + // create gzipped tarball + var filesInBundle [][2]string + err = filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error { + if !info.IsDir() { + bs, err := ioutil.ReadFile(path) + if err != nil { + return err + } + filesInBundle = append(filesInBundle, [2]string{path, string(bs)}) + } + return nil + }) + if err != nil { + t.Fatal(err) + } + + buf := archive.MustWriteTarGz(filesInBundle) + + // bundle verification config + kc := bundle.KeyConfig{ + Key: "mysecret", + Algorithm: "HS256", + } + + bvc := bundle.NewVerificationConfig(map[string]*bundle.KeyConfig{"foo": &kc}, "foo", "", nil) + reader := bundle.NewReader(buf).WithBundleVerificationConfig(bvc).WithBaseDir(rootDir) + + _, err = reader.Read() + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + }) +} + +func TestValidateSignParams(t *testing.T) { + + tests := map[string]struct { + args []string + params signCmdParams + wantErr bool + err error + }{ + "no_args": { + []string{}, + newSignCmdParams(), + true, fmt.Errorf("specify atleast one path containing policy and/or data files"), + }, + "no_signing_key": { + []string{"foo"}, + newSignCmdParams(), + true, fmt.Errorf("specify the secret (HMAC) or path of the PEM file containing the private key (RSA and ECDSA)"), + }, + "non_bundle_mode": { + []string{"foo"}, + signCmdParams{key: "foo"}, + true, fmt.Errorf("enable bundle mode (ie. --bundle) to sign bundle files or directories"), + }, + "no_error": { + []string{"foo"}, + signCmdParams{key: "foo", bundleMode: true}, + false, nil, + }, + } + + for name, tc := range tests { + t.Run(name, func(t *testing.T) { + + err := validateSignParams(tc.args, tc.params) + + if tc.wantErr { + if err == nil { + t.Fatal("Expected error but got nil") + } + + if tc.err != nil && tc.err.Error() != err.Error() { + t.Fatalf("Expected error message %v but got %v", tc.err.Error(), err.Error()) + } + } else { + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } + }) + } +} diff --git a/compile/compile.go b/compile/compile.go index 380e72de06..ff322f18ab 100644 --- a/compile/compile.go +++ b/compile/compile.go @@ -7,13 +7,14 @@ package compile import ( "context" - "errors" "fmt" "io" "io/ioutil" "regexp" "sort" + "github.com/pkg/errors" + "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/bundle" "github.com/open-policy-agent/opa/internal/ref" @@ -43,18 +44,21 @@ var validTargets = map[string]struct{}{ // Compiler implements bundle compilation and linking. type Compiler struct { - bundle *bundle.Bundle // the bundle that the compiler operates on - revision *string // the revision to set on the output bundle - asBundle bool // whether to assume bundle layout on file loading or not - filter loader.Filter // filter to apply to file loader - paths []string // file paths to load. TODO(tsandall): add support for supplying readers for embedded users. - entrypoints orderedStringSet // policy entrypoints required for optimization and certain targets - optimizationLevel int // how aggressive should optimization be - target string // target type (wasm, rego, etc.) - output io.Writer // output stream to write bundle to - entrypointrefs []*ast.Term // validated entrypoints computed from default decision or manually supplied entrypoints - compiler *ast.Compiler // rego ast compiler used for semantic checks and rewriting - debug *debugEvents // debug information produced during build + bundle *bundle.Bundle // the bundle that the compiler operates on + revision *string // the revision to set on the output bundle + asBundle bool // whether to assume bundle layout on file loading or not + filter loader.Filter // filter to apply to file loader + paths []string // file paths to load. TODO(tsandall): add support for supplying readers for embedded users. + entrypoints orderedStringSet // policy entrypoints required for optimization and certain targets + optimizationLevel int // how aggressive should optimization be + target string // target type (wasm, rego, etc.) + output io.Writer // output stream to write bundle to + entrypointrefs []*ast.Term // validated entrypoints computed from default decision or manually supplied entrypoints + compiler *ast.Compiler // rego ast compiler used for semantic checks and rewriting + debug *debugEvents // debug information produced during build + bvc *bundle.VerificationConfig // represents the key configuration used to verify a signed bundle + bsc *bundle.SigningConfig // represents the key configuration used to generate a signed bundle + keyID string // represents the name of the default key used to verify a signed bundle } type debugEvents struct { @@ -145,6 +149,25 @@ func (c *Compiler) WithFilter(filter loader.Filter) *Compiler { return c } +// WithBundleVerificationConfig sets the key configuration to use to verify a signed bundle +func (c *Compiler) WithBundleVerificationConfig(config *bundle.VerificationConfig) *Compiler { + c.bvc = config + return c +} + +// WithBundleSigningConfig sets the key configuration to use to generate a signed bundle +func (c *Compiler) WithBundleSigningConfig(config *bundle.SigningConfig) *Compiler { + c.bsc = config + return c +} + +// WithBundleVerificationKeyID sets the key to use to verify a signed bundle. +// If provided, the "keyid" claim in the bundle signature, will be set to this value +func (c *Compiler) WithBundleVerificationKeyID(keyID string) *Compiler { + c.keyID = keyID + return c +} + // Build compiles and links the input files and outputs a bundle to the writer. func (c *Compiler) Build(ctx context.Context) error { @@ -170,6 +193,16 @@ func (c *Compiler) Build(ctx context.Context) error { c.bundle.Manifest.Revision = *c.revision } + if err := c.bundle.FormatModules(false); err != nil { + return err + } + + if c.bsc != nil { + if err := c.bundle.GenerateSignature(c.bsc, c.keyID, false); err != nil { + return err + } + } + return bundle.NewWriter(c.output).Write(*c.bundle) } @@ -208,9 +241,10 @@ func (c *Compiler) initBundle() error { // TODO(tsandall): the metrics object should passed through here so we that // we can track read and parse times. - load, err := initload.LoadPaths(c.paths, c.filter, c.asBundle) + + load, err := initload.LoadPaths(c.paths, c.filter, c.asBundle, c.bvc, false) if err != nil { - return err + return errors.Wrap(err, "load error") } if c.asBundle { diff --git a/compile/compile_test.go b/compile/compile_test.go index 4d809bdc81..7ec510814d 100644 --- a/compile/compile_test.go +++ b/compile/compile_test.go @@ -134,6 +134,11 @@ func TestCompilerLoadAsBundleSuccess(t *testing.T) { panic(err) } + err = exp.FormatModules(false) + if err != nil { + t.Fatal(err) + } + if !compiler.bundle.Equal(*exp) { t.Fatalf("expected %v but got %v", exp, compiler.bundle) } @@ -208,6 +213,11 @@ func TestCompilerLoadFilesystem(t *testing.T) { panic(err) } + err = exp.FormatModules(false) + if err != nil { + t.Fatal(err) + } + if !compiler.bundle.Equal(*exp) { t.Fatalf("Expected:\n\n%v\n\nGot:\n\n%v", exp, compiler.bundle) } diff --git a/config/config.go b/config/config.go index 0af6c50e74..29fc5a5628 100644 --- a/config/config.go +++ b/config/config.go @@ -24,6 +24,7 @@ type Config struct { DecisionLogs json.RawMessage `json:"decision_logs"` Status json.RawMessage `json:"status"` Plugins map[string]json.RawMessage `json:"plugins"` + Keys json.RawMessage `json:"keys"` DefaultDecision *string `json:"default_decision"` DefaultAuthorizationDecision *string `json:"default_authorization_decision"` } diff --git a/docs/content/configuration.md b/docs/content/configuration.md index 962da8620f..b4e913e798 100644 --- a/docs/content/configuration.md +++ b/docs/content/configuration.md @@ -41,6 +41,9 @@ bundles: polling: min_delay_seconds: 60 max_delay_seconds: 120 + signing: + keyid: global_key + scope: write decision_logs: service: acmecorp @@ -52,6 +55,12 @@ status: service: acmecorp default_decision: /http/example/authz/allow + +keys: + global_key: + algorithm: RS256 + key: + scope: read ``` #### Environment Variable Substitution @@ -318,17 +327,57 @@ services: | `default_authorization_decision` | `string` | No (default: `/system/authz/allow`) | Set path of default authorization decision for OPA's API. | | `plugins` | `object` | No (default: `{}`) | Location for custom plugin configuration. See [Plugins](../plugins) for details. | +### Keys + +Keys is a dictionary mapping the key name to the actual key and optionally the algorithm and scope. + +| Field | Type | Required | Description | +| --- | --- | --- | --- | +| `keys[_].key` | `string` | Yes | Actual key to use for bundle signature verification. | +| `keys[_].algorithm` | `string` | No (default: `RS256`) | Name of the signing algorithm. | +| `keys[_].scope` | `string` | No | Scope to use for bundle signature verification. | + +> Note: If the `scope` is provided in a bundle's `signing` configuration (ie. `bundles[_].signing.scope`), +> it takes precedence over `keys[_].scope`. + +The following signing algorithms are supported: + +| Name | Description | +| --- | --- | +| `ES256` | ECDSA using P-256 and SHA-256 | +| `ES384` | ECDSA using P-384 and SHA-384 | +| `ES512` | ECDSA using P-521 and SHA-512 | +| `HS256` | HMAC using SHA-256 | +| `HS384` | HMAC using SHA-384 | +| `HS512` | HMAC using SHA-512 | +| `PS256` | RSASSA-PSS using SHA256 and MGF1-SHA256 | +| `PS384` | RSASSA-PSS using SHA384 and MGF1-SHA384 | +| `PS512` | RSASSA-PSS using SHA512 and MGF1-SHA512 | +| `RS256` | RSASSA-PKCS-v1.5 using SHA-256 | +| `RS384` | RSASSA-PKCS-v1.5 using SHA-384 | +| `RS512` | RSASSA-PKCS-v1.5 using SHA-512 | + ### Bundles Bundles are defined with a key that is the `name` of the bundle. This `name` is used in the status API, decision logs, server provenance, etc. +Each bundle can be configured to verify a bundle signature using the `keyid` and `scope` fields. The `keyid` is the name of +one of the keys listed under the [keys](#keys) entry. + +Signature verification fails if the `bundles[_].signing` field is configured on a bundle but no `.signatures.json` file is +included in the actual bundle gzipped tarball. + | Field | Type | Required | Description | | --- | --- | --- | --- | | `bundles[_].resource` | `string` | No (default: `bundles/`) | Resource path to use to download bundle from configured service. | | `bundles[_].service` | `string` | Yes | Name of service to use to contact remote server. | | `bundles[_].polling.min_delay_seconds` | `int64` | No (default: `60`) | Minimum amount of time to wait between bundle downloads. | | `bundles[_].polling.max_delay_seconds` | `int64` | No (default: `120`) | Maximum amount of time to wait between bundle downloads. | +| `bundles[_].signing.keyid` | `string` | No | Name of the key to use for bundle signature verification. | +| `bundles[_].signing.scope` | `string` | No | Scope to use for bundle signature verification. | +| `bundles[_].signing.exclude_files` | `array` | No | Files in the bundle to exclude during verification. | + ### Bundle (Deprecated) @@ -370,8 +419,11 @@ server provenance, etc. | Field | Type | Required | Description | | --- | --- | --- | --- | | `discovery.name` | `string` | Yes | Name of the discovery configuration to download. | -| `discovery.resource` | `string` | No (default: `/bundles/` | Resource path to use to download bundle from configured service. | +| `discovery.resource` | `string` | No (default: `/bundles/`) | Resource path to use to download bundle from configured service. | | `discovery.prefix` | `string` | No (default: `bundles`) | Deprecated: Use `resource` instead. Path prefix to use to download configuration from remote server. | | `discovery.decision` | `string` | No (default: value of `discovery.name` configuration field) | Name of the OPA query that will be used to calculate the configuration | | `discovery.polling.min_delay_seconds` | `int64` | No (default: `60`) | Minimum amount of time to wait between configuration downloads. | | `discovery.polling.max_delay_seconds` | `int64` | No (default: `120`) | Maximum amount of time to wait between configuration downloads. | +| `discovery.signing.keyid` | `string` | No | Name of the key to use for bundle signature verification. | +| `discovery.signing.scope` | `string` | No | Scope to use for bundle signature verification. | +| `discovery.signing.exclude_files` | `array` | No | Files in the bundle to exclude during verification. | \ No newline at end of file diff --git a/docs/content/management.md b/docs/content/management.md index 6d5e4ef5d3..5ac1b2a5d6 100644 --- a/docs/content/management.md +++ b/docs/content/management.md @@ -100,6 +100,9 @@ bundles: polling: min_delay_seconds: 10 max_delay_seconds: 20 + signing: + keyid: my_global_key + scope: read ``` Using this configuration, OPA will fetch bundles from @@ -122,6 +125,9 @@ be useful when relying on default `resource` behavior with a name like `authz/bundle.tar.gz` which results in a `resource` of `bundles/authz/bundle.tar.gz`. +The optional `bundles[_].signing` field can be used to specify the `keyid` and `scope` that should be used +for verifying the signature of the bundle. See [this](#bundle-signature) section for details. + See the following section for details on the bundle file format. > Note: The `bundle` config keyword will still work with the current versions @@ -267,6 +273,143 @@ you intended and that they are structured correctly. For example: opa run bundle.tar.gz ``` +### Signing + +To ensure the integrity of policies (ie. the policies are coming from a trusted source), policy bundles may be +digitally signed so that industry-standard cryptographic primitives can verify their authenticity. + +OPA supports digital signatures for policy bundles. Specifically, a signed bundle is a normal OPA bundle that includes +a file named `.signatures.json` that dictates which files should be included in the bundle, what their SHA hashes are, +and of course is cryptographically secure. + +When OPA receives a new bundle, it checks that it has been properly signed using a (public) key that OPA has been +configured with out-of-band. Only if that verification succeeds does OPA activate the new bundle; otherwise, OPA +continues using its existing bundle and reports an activation failure via the status API and error logging. + + > ⚠️ `opa run` performs bundle signature verification only when the `-b`/`--bundle` flag is given +> or when Bundle downloading is enabled. Sub-commands primarily used in development and debug environments +> (such as `opa eval`, `opa test`, etc.) DO NOT verify bundle signatures at this point in time. + +#### Signature Format + +Recall that a [policy bundle](#bundle-file-format) is a gzipped tarball that contains policies and data. A signed bundle +differs from a normal bundle in that it has a `.signatures.json` file as well. + +```bash +$ tar tzf bundle.tar.gz +.manifest +.signatures.json +roles +roles/bindings +roles/bindings/data.json +``` + +The signatures file is a JSON file with an array of JSON Web Tokens (JWTs) that encapsulate the signatures for the bundle. +Currently, you will be limited to one signature, as shown below. In the future, we may add support to include multiple +signatures to sign different files within the bundle. + +```json +{ + "signatures": [ "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmaWxlcyI6W3sibmFtZSI6Ii5tYW5pZmVzdCIsImhhc2giOiJjMjEzMTU0NGM3MTZhMjVhNWUzMWY1MDQzMDBmNTI0MGU4MjM1Y2FkYjlhNTdmMGJkMWI2ZjRiZDc0YjI2NjEyIiwiYWxnb3JpdGhtIjoiU0hBMjU2In0seyJuYW1lIjoicm9sZXMvYmluZGluZ3MvZGF0YS5qc29uIiwiaGFzaCI6IjQyY2ZlNjc2OGI1N2JiNWY3NTAzYzE2NWMyOGRkMDdhYzViODEzNTU0ZWJjODUwZjJjYzM1ODQzZTcxMzdiMWQifV0sImlhdCI6MTU5MjI0ODAyNywiaXNzIjoiSldUU2VydmljZSIsImtleWlkIjoibXlQdWJsaWNLZXkiLCJzY29wZSI6IndyaXRlIn0.ZjtUgXC6USwmhv4XP9gFH6MzZwpZrGpAL_2sTK1P-mg"] +} +``` + +The JWT when decoded has a JSON payload of the following form: + +```json +{ + "files": [ + { + "name": ".manifest", + "hash": "c2131544c716a25a5e31f504300f5240e8235cadb9a57f0bd1b6f4bd74b26612", + "algorithm": "SHA-256" + }, + { + "name": "roles/bindings/data.json", + "hash": "42cfe6768b57bb5f7503c165c28dd07ac5b813554ebc850f2cc35843e7137b1d" + } + ], + "iat": 1592248027, + "iss": "JWTService", + "keyid": "my_public_key", + "scope": "write" +} +``` + +| Field | Type | Required | Description | +| --- | --- | --- | --- | +| `files[_].name` | `string` | Yes | Path of a file in the bundle. | +| `files[_].hash` | `string` | Yes | Output of the hashing algorithm applied to the file. | +| `files[_].algorithm` | `string` | Yes | Name of the hashing algorithm. | +| `keyid` | `string` | No | Name of the key to use for JWT signature verification. | +| `scope` | `string` | No | Represents the fragment of signings. | +| `iat` | `string` | No | Time of signature creation since epoch in seconds. For informational purposes only. | +| `iss` | `string` | No | Identifies the issuer of the JWT. For informational purposes only. | + +> Note: OPA will first look for the `keyid` on the command-line. If the `keyid` is empty, OPA will look for it in it's +> configuration. If `keyid` is still empty, OPA will finally look for it in the JWT payload. + +The following hashing algorithms are supported: + + MD5 + SHA-1 + SHA-224 + SHA-256 + SHA-384 + SHA-512 + SHA-512-224 + SHA-512-256 + +To calculate the digest for unstructured files (ie. all files except JSON or YAML files), apply the hash +function to the byte stream of the file. + +For structured files, read the byte stream and parse into a JSON structure; then recursively order the fields of all +objects alphabetically and then apply the hash function to the result to compute the hash. This ensures +that the digital signature is independent of whitespace and other non-semantic JSON features. + +To generate a `.signatures.json` file for policy and data files that will be part of a bundle, see the `opa sign` command. + +#### Signature Verification + +When OPA receives a policy bundle that doesn't include the `.signatures.json` file and the bundle is not configured to +use a signature, OPA does not perform signature verification and activates the bundle just as it always has. + +If the actual bundle contains the `.signatures.json` file but the bundle is not configured to use a signature, verification fails. + +| `.signatures.json` exists | bundle configured to verify signature | verification performed | result | +| --- | --- | --- | --- | +| `no` | `no` | `no` | `NA` | +| `no` | `yes` | `yes` | `fail` | +| `yes` | `no` | `yes` | `fail` | +| `yes` | `yes` | `yes` | `depends on the verification steps described below` | + +When OPA receives a signed bundle it opens the `.signatures.json` file, grabs the JWT and performs the following steps: + +* Verify the JWT signature with the appropriate public key + +* Verify that the JWT payload and target directory specify the same set of files + +* Verify the content of each file by checking the hash recorded in the JWT payload is the same as the hash generated +for that file + +OPA activates the new bundle only if all the verification steps succeed; otherwise, it continues using its existing bundle +and reports an activation failure via the status API and error logging. + +The signature verification process uses each of the fields in the JWT payload as follows: + +* `files`: This list of files must match exactly the files in the bundle, and for each file the hash of the file must match + +* `keyid`: If supplied, dictates which key (and algorithm) to use for verification. The actual key is supplied via +OPA out-of-band + +* `scope`: If supplied, must match exactly the value provided out-of-band to OPA + +* `iat`: unused for verification + +* `iss`: unused for verification + + + ## Decision Logs OPA can periodically report decision logs to remote HTTP servers. The decision @@ -737,7 +880,6 @@ This will dump all status updates through the OPA logging system at the `info` l ## Discovery - OPA can be configured to download bundles of policy and data, report status, and upload decision logs to remote endpoints. The discovery feature helps you centrally manage the OPA configuration for these features. You should use the @@ -796,6 +938,9 @@ discovery: name: example resource: /configuration/example/discovery.tar.gz service: acmecorp + signing: + keyid: my_global_key + scope: read ``` Using the boot configuration above, OPA will fetch discovery bundles from: @@ -813,6 +958,10 @@ endpoint. If only one service is defined, there is no need to set `discovery.ser > The `discovery.prefix` configuration option is still available but has been deprecated in favor of `discovery.resource`. It will eventually be removed. +> The optional `discovery.signing` field can be used to specify the `keyid` and `scope` that should be used +> for verifying the signature of the discovery bundle. See [this](#discovery-bundle-signature) section for details. + + OPA generates it's subsequent configuration by querying the Rego and JSON files contained inside the discovery bundle. The query is defined by the `discovery.name` field from the boot configuration: `data.`. For @@ -1014,3 +1163,14 @@ immutable to avoid accidental configuration errors rendering OPA unable to disco If the discovered configuration changes the `discovery` or `labels` sections, those changes are ignored. If the discovered configuration changes the discovery service, an error will be logged. + +### Discovery Bundle Signature + +Like regular bundles, if the discovery bundle contains a `.signatures.json` file, OPA will verify the discovery +bundle before activating it. The format of the `.signatures.json` file and the verification steps are same as that for +regular bundles. Since the discovered configuration ignores changes to the `discovery` section, any key used for +signature verification of a discovery bundle **CANNOT** be modified via discovery. + +> 🚨 We recommend that if you are using discovery you should be signing the discovery bundles because those bundles +> include the keys used to verify the non-discovery bundles. However, OPA does not enforce that recommendation. You may use +> unsigned discovery bundles that themselves require non-discovery bundles to be signed. diff --git a/download/download.go b/download/download.go index db9722416c..2c470a48b6 100644 --- a/download/download.go +++ b/download/download.go @@ -49,6 +49,7 @@ type Downloader struct { f func(context.Context, Update) // callback function invoked when download updates occur logAttrs [][2]string // optional attributes to include in log messages etag string // HTTP Etag for caching purposes + bvc *bundle.VerificationConfig } // New returns a new Downloader that can be started. @@ -74,6 +75,12 @@ func (d *Downloader) WithLogAttrs(attrs [][2]string) *Downloader { return d } +// WithBundleVerificationConfig sets the key configuration used to verify a signed bundle +func (d *Downloader) WithBundleVerificationConfig(config *bundle.VerificationConfig) *Downloader { + d.bvc = config + return d +} + // ClearCache resets the etag value on the downloader func (d *Downloader) ClearCache() { d.etag = "" @@ -159,7 +166,7 @@ func (d *Downloader) download(ctx context.Context, m metrics.Metrics) (*bundle.B defer m.Timer(metrics.RegoLoadBundles).Stop() baseURL := path.Join(d.client.Config().URL, d.path) loader := bundle.NewTarballLoaderWithBaseURL(resp.Body, baseURL) - reader := bundle.NewCustomReader(loader).WithMetrics(m) + reader := bundle.NewCustomReader(loader).WithMetrics(m).WithBundleVerificationConfig(d.bvc) b, err := reader.Read() if err != nil { return nil, "", err diff --git a/topdown/internal/jwx/.gitignore b/internal/jwx/.gitignore similarity index 100% rename from topdown/internal/jwx/.gitignore rename to internal/jwx/.gitignore diff --git a/topdown/internal/jwx/LICENSE b/internal/jwx/LICENSE similarity index 100% rename from topdown/internal/jwx/LICENSE rename to internal/jwx/LICENSE diff --git a/topdown/internal/jwx/Makefile b/internal/jwx/Makefile similarity index 100% rename from topdown/internal/jwx/Makefile rename to internal/jwx/Makefile diff --git a/topdown/internal/jwx/buffer/buffer.go b/internal/jwx/buffer/buffer.go similarity index 100% rename from topdown/internal/jwx/buffer/buffer.go rename to internal/jwx/buffer/buffer.go diff --git a/topdown/internal/jwx/buffer/buffer_test.go b/internal/jwx/buffer/buffer_test.go similarity index 100% rename from topdown/internal/jwx/buffer/buffer_test.go rename to internal/jwx/buffer/buffer_test.go diff --git a/topdown/internal/jwx/jwa/elliptic.go b/internal/jwx/jwa/elliptic.go similarity index 100% rename from topdown/internal/jwx/jwa/elliptic.go rename to internal/jwx/jwa/elliptic.go diff --git a/topdown/internal/jwx/jwa/key_type.go b/internal/jwx/jwa/key_type.go similarity index 100% rename from topdown/internal/jwx/jwa/key_type.go rename to internal/jwx/jwa/key_type.go diff --git a/topdown/internal/jwx/jwa/parameters.go b/internal/jwx/jwa/parameters.go similarity index 93% rename from topdown/internal/jwx/jwa/parameters.go rename to internal/jwx/jwa/parameters.go index 63c5a6462e..2fe72e1dbc 100644 --- a/topdown/internal/jwx/jwa/parameters.go +++ b/internal/jwx/jwa/parameters.go @@ -3,7 +3,7 @@ package jwa import ( "crypto/elliptic" - "github.com/open-policy-agent/opa/topdown/internal/jwx/buffer" + "github.com/open-policy-agent/opa/internal/jwx/buffer" ) // EllipticCurve provides a indirect type to standard elliptic curve such that we can diff --git a/topdown/internal/jwx/jwa/signature.go b/internal/jwx/jwa/signature.go similarity index 100% rename from topdown/internal/jwx/jwa/signature.go rename to internal/jwx/jwa/signature.go diff --git a/topdown/internal/jwx/jwk/ecdsa.go b/internal/jwx/jwk/ecdsa.go similarity index 98% rename from topdown/internal/jwx/jwk/ecdsa.go rename to internal/jwx/jwk/ecdsa.go index 7bff2bf8e8..30bee46b4b 100644 --- a/topdown/internal/jwx/jwk/ecdsa.go +++ b/internal/jwx/jwk/ecdsa.go @@ -7,7 +7,7 @@ import ( "github.com/pkg/errors" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) func newECDSAPublicKey(key *ecdsa.PublicKey) (*ECDSAPublicKey, error) { diff --git a/topdown/internal/jwx/jwk/ecdsa_test.go b/internal/jwx/jwk/ecdsa_test.go similarity index 97% rename from topdown/internal/jwx/jwk/ecdsa_test.go rename to internal/jwx/jwk/ecdsa_test.go index 86509ce8b6..e23c247288 100644 --- a/topdown/internal/jwx/jwk/ecdsa_test.go +++ b/internal/jwx/jwk/ecdsa_test.go @@ -6,9 +6,9 @@ import ( "crypto/rand" "encoding/json" "fmt" - "github.com/open-policy-agent/opa/topdown/internal/jwx/buffer" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/buffer" + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwk" "reflect" "testing" ) diff --git a/topdown/internal/jwx/jwk/headers.go b/internal/jwx/jwk/headers.go similarity index 98% rename from topdown/internal/jwx/jwk/headers.go rename to internal/jwx/jwk/headers.go index 8f310a4c12..cf700ee864 100644 --- a/topdown/internal/jwx/jwk/headers.go +++ b/internal/jwx/jwk/headers.go @@ -3,7 +3,7 @@ package jwk import ( "github.com/pkg/errors" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) // Convenience constants for common JWK parameters diff --git a/topdown/internal/jwx/jwk/headers_test.go b/internal/jwx/jwk/headers_test.go similarity index 97% rename from topdown/internal/jwx/jwk/headers_test.go rename to internal/jwx/jwk/headers_test.go index de861da146..940e031ac8 100644 --- a/topdown/internal/jwx/jwk/headers_test.go +++ b/internal/jwx/jwk/headers_test.go @@ -1,8 +1,8 @@ package jwk_test import ( - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwk" "reflect" "testing" ) diff --git a/topdown/internal/jwx/jwk/interface.go b/internal/jwx/jwk/interface.go similarity index 96% rename from topdown/internal/jwx/jwk/interface.go rename to internal/jwx/jwk/interface.go index f718bec674..9822973052 100644 --- a/topdown/internal/jwx/jwk/interface.go +++ b/internal/jwx/jwk/interface.go @@ -4,7 +4,7 @@ import ( "crypto/ecdsa" "crypto/rsa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) // Set is a convenience struct to allow generating and parsing diff --git a/topdown/internal/jwx/jwk/jwk.go b/internal/jwx/jwk/jwk.go similarity index 98% rename from topdown/internal/jwx/jwk/jwk.go rename to internal/jwx/jwk/jwk.go index 18835cbb36..22ccf8dfc6 100644 --- a/topdown/internal/jwx/jwk/jwk.go +++ b/internal/jwx/jwk/jwk.go @@ -8,7 +8,7 @@ import ( "github.com/pkg/errors" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) // GetPublicKey returns the public key based on the private key type. diff --git a/topdown/internal/jwx/jwk/jwk_test.go b/internal/jwx/jwk/jwk_test.go similarity index 98% rename from topdown/internal/jwx/jwk/jwk_test.go rename to internal/jwx/jwk/jwk_test.go index a9f107a2e6..b9504fe1b5 100644 --- a/topdown/internal/jwx/jwk/jwk_test.go +++ b/internal/jwx/jwk/jwk_test.go @@ -3,7 +3,7 @@ package jwk_test import ( "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/jwk" ) func TestNew(t *testing.T) { diff --git a/topdown/internal/jwx/jwk/key_ops.go b/internal/jwx/jwk/key_ops.go similarity index 100% rename from topdown/internal/jwx/jwk/key_ops.go rename to internal/jwx/jwk/key_ops.go diff --git a/topdown/internal/jwx/jwk/rsa.go b/internal/jwx/jwk/rsa.go similarity index 97% rename from topdown/internal/jwx/jwk/rsa.go rename to internal/jwx/jwk/rsa.go index e15e907d51..c885ffffc1 100644 --- a/topdown/internal/jwx/jwk/rsa.go +++ b/internal/jwx/jwk/rsa.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) func newRSAPublicKey(key *rsa.PublicKey) (*RSAPublicKey, error) { diff --git a/topdown/internal/jwx/jwk/rsa_test.go b/internal/jwx/jwk/rsa_test.go similarity index 98% rename from topdown/internal/jwx/jwk/rsa_test.go rename to internal/jwx/jwk/rsa_test.go index 96fc328506..846dc3b628 100644 --- a/topdown/internal/jwx/jwk/rsa_test.go +++ b/internal/jwx/jwk/rsa_test.go @@ -6,8 +6,8 @@ import ( "fmt" "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwk" ) func TestRSA(t *testing.T) { diff --git a/topdown/internal/jwx/jwk/symmetric.go b/internal/jwx/jwk/symmetric.go similarity index 93% rename from topdown/internal/jwx/jwk/symmetric.go rename to internal/jwx/jwk/symmetric.go index 6d1da1e400..8a07361556 100644 --- a/topdown/internal/jwx/jwk/symmetric.go +++ b/internal/jwx/jwk/symmetric.go @@ -3,7 +3,7 @@ package jwk import ( "github.com/pkg/errors" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) func newSymmetricKey(key []byte) (*SymmetricKey, error) { diff --git a/topdown/internal/jwx/jwk/symmetric_test.go b/internal/jwx/jwk/symmetric_test.go similarity index 96% rename from topdown/internal/jwx/jwk/symmetric_test.go rename to internal/jwx/jwk/symmetric_test.go index 5e38d680cc..3407d35785 100644 --- a/topdown/internal/jwx/jwk/symmetric_test.go +++ b/internal/jwx/jwk/symmetric_test.go @@ -8,8 +8,8 @@ import ( "reflect" "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwk" ) func TestSymmetric(t *testing.T) { diff --git a/topdown/internal/jwx/jws/headers.go b/internal/jwx/jws/headers.go similarity index 98% rename from topdown/internal/jwx/jws/headers.go rename to internal/jwx/jws/headers.go index fd6ffbe0e7..045e38fa1e 100644 --- a/topdown/internal/jwx/jws/headers.go +++ b/internal/jwx/jws/headers.go @@ -3,7 +3,7 @@ package jws import ( "github.com/pkg/errors" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) // Constants for JWS Common parameters diff --git a/topdown/internal/jwx/jws/headers_test.go b/internal/jwx/jws/headers_test.go similarity index 96% rename from topdown/internal/jwx/jws/headers_test.go rename to internal/jwx/jws/headers_test.go index 493066e571..cde814fcbe 100644 --- a/topdown/internal/jwx/jws/headers_test.go +++ b/internal/jwx/jws/headers_test.go @@ -5,8 +5,8 @@ import ( "reflect" "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws" + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jws" ) func TestHeader(t *testing.T) { diff --git a/topdown/internal/jwx/jws/interface.go b/internal/jwx/jws/interface.go similarity index 100% rename from topdown/internal/jwx/jws/interface.go rename to internal/jwx/jws/interface.go diff --git a/topdown/internal/jwx/jws/jws.go b/internal/jwx/jws/jws.go similarity index 96% rename from topdown/internal/jwx/jws/jws.go rename to internal/jwx/jws/jws.go index 34e18a499f..6fca28d23c 100644 --- a/topdown/internal/jwx/jws/jws.go +++ b/internal/jwx/jws/jws.go @@ -25,10 +25,10 @@ import ( "encoding/json" "strings" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws/sign" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws/verify" + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/jws/sign" + "github.com/open-policy-agent/opa/internal/jwx/jws/verify" "github.com/pkg/errors" ) diff --git a/topdown/internal/jwx/jws/jws_test.go b/internal/jwx/jws/jws_test.go similarity index 98% rename from topdown/internal/jwx/jws/jws_test.go rename to internal/jwx/jws/jws_test.go index 092dd42ed3..d24d3e90fb 100644 --- a/topdown/internal/jwx/jws/jws_test.go +++ b/internal/jwx/jws/jws_test.go @@ -8,11 +8,11 @@ import ( "crypto/sha512" "encoding/base64" "encoding/json" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws/sign" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws/verify" + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/jws" + "github.com/open-policy-agent/opa/internal/jwx/jws/sign" + "github.com/open-policy-agent/opa/internal/jwx/jws/verify" "math/big" "strings" "testing" diff --git a/topdown/internal/jwx/jws/message.go b/internal/jwx/jws/message.go similarity index 100% rename from topdown/internal/jwx/jws/message.go rename to internal/jwx/jws/message.go diff --git a/topdown/internal/jwx/jws/sign/ecdsa.go b/internal/jwx/jws/sign/ecdsa.go similarity index 96% rename from topdown/internal/jwx/jws/sign/ecdsa.go rename to internal/jwx/jws/sign/ecdsa.go index 02fc9f0223..7023906806 100644 --- a/topdown/internal/jwx/jws/sign/ecdsa.go +++ b/internal/jwx/jws/sign/ecdsa.go @@ -5,7 +5,7 @@ import ( "crypto/ecdsa" "crypto/rand" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" "github.com/pkg/errors" ) diff --git a/topdown/internal/jwx/jws/sign/ecdsa_test.go b/internal/jwx/jws/sign/ecdsa_test.go similarity index 92% rename from topdown/internal/jwx/jws/sign/ecdsa_test.go rename to internal/jwx/jws/sign/ecdsa_test.go index 8076510b1f..d7e3e1d804 100644 --- a/topdown/internal/jwx/jws/sign/ecdsa_test.go +++ b/internal/jwx/jws/sign/ecdsa_test.go @@ -3,7 +3,7 @@ package sign import ( "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) func TestECDSASign(t *testing.T) { diff --git a/topdown/internal/jwx/jws/sign/hmac.go b/internal/jwx/jws/sign/hmac.go similarity index 95% rename from topdown/internal/jwx/jws/sign/hmac.go rename to internal/jwx/jws/sign/hmac.go index f86283efba..cbf7b9f0a1 100644 --- a/topdown/internal/jwx/jws/sign/hmac.go +++ b/internal/jwx/jws/sign/hmac.go @@ -6,7 +6,7 @@ import ( "crypto/sha512" "hash" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" "github.com/pkg/errors" ) diff --git a/topdown/internal/jwx/jws/sign/hmac_test.go b/internal/jwx/jws/sign/hmac_test.go similarity index 92% rename from topdown/internal/jwx/jws/sign/hmac_test.go rename to internal/jwx/jws/sign/hmac_test.go index 6b24cf8948..cd30cf93b1 100644 --- a/topdown/internal/jwx/jws/sign/hmac_test.go +++ b/internal/jwx/jws/sign/hmac_test.go @@ -3,7 +3,7 @@ package sign import ( "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) func TestHMACSign(t *testing.T) { diff --git a/topdown/internal/jwx/jws/sign/interface.go b/internal/jwx/jws/sign/interface.go similarity index 95% rename from topdown/internal/jwx/jws/sign/interface.go rename to internal/jwx/jws/sign/interface.go index c79fd3e935..42a10c42e4 100644 --- a/topdown/internal/jwx/jws/sign/interface.go +++ b/internal/jwx/jws/sign/interface.go @@ -4,7 +4,7 @@ import ( "crypto/ecdsa" "crypto/rsa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) // Signer provides a common interface for supported alg signing methods diff --git a/topdown/internal/jwx/jws/sign/rsa.go b/internal/jwx/jws/sign/rsa.go similarity index 97% rename from topdown/internal/jwx/jws/sign/rsa.go rename to internal/jwx/jws/sign/rsa.go index d9cc13af90..bc51dbcd03 100644 --- a/topdown/internal/jwx/jws/sign/rsa.go +++ b/internal/jwx/jws/sign/rsa.go @@ -5,7 +5,7 @@ import ( "crypto/rand" "crypto/rsa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" "github.com/pkg/errors" ) diff --git a/internal/jwx/jws/sign/sign.go b/internal/jwx/jws/sign/sign.go new file mode 100644 index 0000000000..a808b054ed --- /dev/null +++ b/internal/jwx/jws/sign/sign.go @@ -0,0 +1,59 @@ +package sign + +import ( + "crypto/x509" + "encoding/pem" + "fmt" + + "github.com/pkg/errors" + + "github.com/open-policy-agent/opa/internal/jwx/jwa" +) + +// New creates a signer that signs payloads using the given signature algorithm. +func New(alg jwa.SignatureAlgorithm) (Signer, error) { + switch alg { + case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512: + return newRSA(alg) + case jwa.ES256, jwa.ES384, jwa.ES512: + return newECDSA(alg) + case jwa.HS256, jwa.HS384, jwa.HS512: + return newHMAC(alg) + default: + return nil, errors.Errorf(`unsupported signature algorithm %s`, alg) + } +} + +// GetSigningKey returns a *rsa.PrivateKey or *ecdsa.PrivateKey typically encoded in PEM blocks of type "RSA PRIVATE KEY" +// or "EC PRIVATE KEY" for RSA and ECDSA family of algorithms. +// For HMAC family, it return a []byte value +func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (interface{}, error) { + switch alg { + case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512: + block, _ := pem.Decode([]byte(key)) + if block == nil { + return nil, fmt.Errorf("failed to parse PEM block containing the key") + } + + priv, err := x509.ParsePKCS1PrivateKey(block.Bytes) + if err != nil { + return nil, err + } + return priv, nil + case jwa.ES256, jwa.ES384, jwa.ES512: + block, _ := pem.Decode([]byte(key)) + if block == nil { + return nil, fmt.Errorf("failed to parse PEM block containing the key") + } + + priv, err := x509.ParseECPrivateKey(block.Bytes) + if err != nil { + return nil, err + } + return priv, nil + case jwa.HS256, jwa.HS384, jwa.HS512: + return []byte(key), nil + default: + return nil, errors.Errorf("unsupported signature algorithm: %s", alg) + } +} diff --git a/topdown/internal/jwx/jws/verify/ecdsa.go b/internal/jwx/jws/verify/ecdsa.go similarity index 96% rename from topdown/internal/jwx/jws/verify/ecdsa.go rename to internal/jwx/jws/verify/ecdsa.go index 5adccda30e..e71dc6f8f5 100644 --- a/topdown/internal/jwx/jws/verify/ecdsa.go +++ b/internal/jwx/jws/verify/ecdsa.go @@ -7,7 +7,7 @@ import ( "github.com/pkg/errors" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) var ecdsaVerifyFuncs = map[jwa.SignatureAlgorithm]ecdsaVerifyFunc{} diff --git a/topdown/internal/jwx/jws/verify/ecdsa_test.go b/internal/jwx/jws/verify/ecdsa_test.go similarity index 92% rename from topdown/internal/jwx/jws/verify/ecdsa_test.go rename to internal/jwx/jws/verify/ecdsa_test.go index 73a04e4ff5..4526d6e376 100644 --- a/topdown/internal/jwx/jws/verify/ecdsa_test.go +++ b/internal/jwx/jws/verify/ecdsa_test.go @@ -3,7 +3,7 @@ package verify import ( "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) func TestECDSAVerify(t *testing.T) { diff --git a/topdown/internal/jwx/jws/verify/hmac.go b/internal/jwx/jws/verify/hmac.go similarity index 84% rename from topdown/internal/jwx/jws/verify/hmac.go rename to internal/jwx/jws/verify/hmac.go index e0b5e1981c..77e45887ae 100644 --- a/topdown/internal/jwx/jws/verify/hmac.go +++ b/internal/jwx/jws/verify/hmac.go @@ -5,8 +5,8 @@ import ( "github.com/pkg/errors" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws/sign" + "github.com/open-policy-agent/opa/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jws/sign" ) func newHMAC(alg jwa.SignatureAlgorithm) (*HMACVerifier, error) { diff --git a/topdown/internal/jwx/jws/verify/hmac_test.go b/internal/jwx/jws/verify/hmac_test.go similarity index 91% rename from topdown/internal/jwx/jws/verify/hmac_test.go rename to internal/jwx/jws/verify/hmac_test.go index bc3ca7aa09..685585912b 100644 --- a/topdown/internal/jwx/jws/verify/hmac_test.go +++ b/internal/jwx/jws/verify/hmac_test.go @@ -3,7 +3,7 @@ package verify import ( "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) func TestHMACVerify(t *testing.T) { diff --git a/topdown/internal/jwx/jws/verify/interface.go b/internal/jwx/jws/verify/interface.go similarity index 94% rename from topdown/internal/jwx/jws/verify/interface.go rename to internal/jwx/jws/verify/interface.go index b72b7232ad..f5beb69741 100644 --- a/topdown/internal/jwx/jws/verify/interface.go +++ b/internal/jwx/jws/verify/interface.go @@ -4,7 +4,7 @@ import ( "crypto/ecdsa" "crypto/rsa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws/sign" + "github.com/open-policy-agent/opa/internal/jwx/jws/sign" ) // Verifier provides a common interface for supported alg verification methods diff --git a/topdown/internal/jwx/jws/verify/rsa.go b/internal/jwx/jws/verify/rsa.go similarity index 97% rename from topdown/internal/jwx/jws/verify/rsa.go rename to internal/jwx/jws/verify/rsa.go index 26f341d129..8188ceb1fe 100644 --- a/topdown/internal/jwx/jws/verify/rsa.go +++ b/internal/jwx/jws/verify/rsa.go @@ -4,7 +4,7 @@ import ( "crypto" "crypto/rsa" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" "github.com/pkg/errors" ) diff --git a/topdown/internal/jwx/jws/verify/rsa_test.go b/internal/jwx/jws/verify/rsa_test.go similarity index 92% rename from topdown/internal/jwx/jws/verify/rsa_test.go rename to internal/jwx/jws/verify/rsa_test.go index 7a1b0659d3..6404faa199 100644 --- a/topdown/internal/jwx/jws/verify/rsa_test.go +++ b/internal/jwx/jws/verify/rsa_test.go @@ -3,7 +3,7 @@ package verify import ( "testing" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" + "github.com/open-policy-agent/opa/internal/jwx/jwa" ) func TestRSAVerify(t *testing.T) { diff --git a/internal/jwx/jws/verify/verify.go b/internal/jwx/jws/verify/verify.go new file mode 100644 index 0000000000..1bb3bf83a9 --- /dev/null +++ b/internal/jwx/jws/verify/verify.go @@ -0,0 +1,57 @@ +package verify + +import ( + "crypto/ecdsa" + "crypto/rsa" + "crypto/x509" + "encoding/pem" + "fmt" + + "github.com/pkg/errors" + + "github.com/open-policy-agent/opa/internal/jwx/jwa" +) + +// New creates a new JWS verifier using the specified algorithm +// and the public key +func New(alg jwa.SignatureAlgorithm) (Verifier, error) { + switch alg { + case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512: + return newRSA(alg) + case jwa.ES256, jwa.ES384, jwa.ES512: + return newECDSA(alg) + case jwa.HS256, jwa.HS384, jwa.HS512: + return newHMAC(alg) + default: + return nil, errors.Errorf(`unsupported signature algorithm: %s`, alg) + } +} + +// GetSigningKey returns a *rsa.PublicKey or *ecdsa.PublicKey typically encoded in PEM blocks of type "PUBLIC KEY", +// for RSA and ECDSA family of algorithms. +// For HMAC family, it return a []byte value +func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (interface{}, error) { + switch alg { + case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512, jwa.ES256, jwa.ES384, jwa.ES512: + block, _ := pem.Decode([]byte(key)) + if block == nil { + return nil, fmt.Errorf("failed to parse PEM block containing the key") + } + + pub, err := x509.ParsePKIXPublicKey(block.Bytes) + if err != nil { + return nil, err + } + + switch pub := pub.(type) { + case *rsa.PublicKey, *ecdsa.PublicKey: + return pub, nil + default: + return nil, fmt.Errorf("invalid key type %T", pub) + } + case jwa.HS256, jwa.HS384, jwa.HS512: + return []byte(key), nil + default: + return nil, errors.Errorf("unsupported signature algorithm: %s", alg) + } +} diff --git a/topdown/internal/jwx/jws/verify/verify_test.go b/internal/jwx/jws/verify/verify_test.go similarity index 100% rename from topdown/internal/jwx/jws/verify/verify_test.go rename to internal/jwx/jws/verify/verify_test.go diff --git a/internal/runtime/init/init.go b/internal/runtime/init/init.go index 385218c7f1..f733171ca4 100644 --- a/internal/runtime/init/init.go +++ b/internal/runtime/init/init.go @@ -7,6 +7,8 @@ package init import ( "context" + "path/filepath" + "strings" "github.com/pkg/errors" @@ -89,9 +91,27 @@ type LoadPathsResult struct { Files loader.Result } +// WalkPathsResult contains the output loading a set of paths. +type WalkPathsResult struct { + BundlesLoader []BundleLoader + FileDescriptors []*Descriptor +} + +// BundleLoader contains information about files in a bundle +type BundleLoader struct { + DirectoryLoader bundle.DirectoryLoader + IsDir bool +} + +// Descriptor contains information about a file +type Descriptor struct { + Root string + Path string +} + // LoadPaths reads data and policy from the given paths and returns a set of bundles or // raw loader file results. -func LoadPaths(paths []string, filter loader.Filter, asBundle bool) (*LoadPathsResult, error) { +func LoadPaths(paths []string, filter loader.Filter, asBundle bool, bvc *bundle.VerificationConfig, skipVerify bool) (*LoadPathsResult, error) { var result LoadPathsResult var err error @@ -99,7 +119,8 @@ func LoadPaths(paths []string, filter loader.Filter, asBundle bool) (*LoadPathsR if asBundle { result.Bundles = make(map[string]*bundle.Bundle, len(paths)) for _, path := range paths { - result.Bundles[path], err = loader.NewFileLoader().AsBundle(path) + result.Bundles[path], err = loader.NewFileLoader().WithBundleVerificationConfig(bvc). + WithSkipBundleVerification(skipVerify).AsBundle(path) if err != nil { return nil, err } @@ -116,3 +137,53 @@ func LoadPaths(paths []string, filter loader.Filter, asBundle bool) (*LoadPathsR return &result, nil } + +// WalkPaths reads data and policy from the given paths and returns a set of bundle directory loaders +// or descriptors that contain information about files. +func WalkPaths(paths []string, filter loader.Filter, asBundle bool) (*WalkPathsResult, error) { + + var result WalkPathsResult + + if asBundle { + result.BundlesLoader = make([]BundleLoader, len(paths)) + for i, path := range paths { + bundleLoader, isDir, err := loader.GetBundleDirectoryLoader(path) + if err != nil { + return nil, err + } + + result.BundlesLoader[i] = BundleLoader{ + DirectoryLoader: bundleLoader, + IsDir: isDir, + } + } + return &result, nil + } + + result.FileDescriptors = []*Descriptor{} + for _, path := range paths { + filePaths, err := loader.FilteredPaths([]string{path}, filter) + if err != nil { + return nil, err + } + + for _, fp := range filePaths { + // Trim off the root directory and return path as if chrooted + cleanedPath := strings.TrimPrefix(fp, path) + if path == "." && filepath.Base(fp) == bundle.ManifestExt { + cleanedPath = fp + } + + if !strings.HasPrefix(cleanedPath, "/") { + cleanedPath = "/" + cleanedPath + } + + result.FileDescriptors = append(result.FileDescriptors, &Descriptor{ + Root: path, + Path: cleanedPath, + }) + } + } + + return &result, nil +} diff --git a/internal/runtime/init/init_test.go b/internal/runtime/init/init_test.go index d0862f44f3..27e6643d34 100644 --- a/internal/runtime/init/init_test.go +++ b/internal/runtime/init/init_test.go @@ -6,7 +6,10 @@ package init import ( "context" + "io" + "path" "path/filepath" + "strings" "testing" "github.com/open-policy-agent/opa/storage" @@ -115,7 +118,7 @@ p = true { 1 = 2 }` err := storage.Txn(ctx, store, storage.WriteParams, func(txn storage.Transaction) error { - loaded, err := LoadPaths(paths, nil, tc.asBundle) + loaded, err := LoadPaths(paths, nil, tc.asBundle, nil, true) if err != nil { return err } @@ -177,3 +180,74 @@ p = true { 1 = 2 }` }) } } + +func TestWalkPaths(t *testing.T) { + files := map[string]string{ + "/bundle1/a/data.json": `{"foo": "bar1", "x": {"y": {"z": [1]}}}`, + "/bundle1/a/policy.rego": `package example.foo`, + "/bundle1/a/.manifest": `{"roots": ["a"]}`, + "/bundle2/b/data.json": `{"foo": "bar2"}`, + "/bundle2/b/policy.rego": `package authz`, + "/bundle2/b/.manifest": `{"roots": ["b"]}`, + } + + test.WithTempFS(files, func(rootDir string) { + + paths := []string{} + paths = append(paths, filepath.Join(rootDir, "bundle1")) + paths = append(paths, filepath.Join(rootDir, "bundle2")) + + // bundle mode + loaded, err := WalkPaths(paths, nil, true) + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + if len(loaded.BundlesLoader) != len(paths) { + t.Fatalf("Expected %v bundle loaders but got %v", len(paths), len(loaded.BundlesLoader)) + } + + // check files + result := []string{} + for _, bl := range loaded.BundlesLoader { + for { + f, err := bl.DirectoryLoader.NextFile() + if err == io.EOF { + break + } + + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + result = append(result, f.Path()) + + if _, ok := files[strings.TrimPrefix(f.URL(), rootDir)]; !ok { + t.Fatalf("unexpected file %v", f.Path()) + } + } + } + + if len(result) != len(files) { + t.Fatalf("Expected %v files across bundles but got %v", len(files), len(result)) + } + + // non-bundle mode + loaded, err = WalkPaths(paths, nil, false) + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + if len(loaded.FileDescriptors) != len(files) { + t.Fatalf("Expected %v files across directories but got %v", len(files), len(loaded.FileDescriptors)) + } + + for _, d := range loaded.FileDescriptors { + path := path.Join(d.Root, d.Path) + path = strings.TrimPrefix(path, rootDir) + if _, ok := files[path]; !ok { + t.Fatalf("unexpected file %v", path) + } + } + }) +} diff --git a/loader/loader.go b/loader/loader.go index 981aa373f1..c8876696ee 100644 --- a/loader/loader.go +++ b/loader/loader.go @@ -86,17 +86,31 @@ type FileLoader interface { Filtered(paths []string, filter Filter) (*Result, error) AsBundle(path string) (*bundle.Bundle, error) WithMetrics(m metrics.Metrics) FileLoader + WithBundleVerificationConfig(*bundle.VerificationConfig) FileLoader + WithSkipBundleVerification(skipVerify bool) FileLoader } // NewFileLoader returns a new FileLoader instance. func NewFileLoader() FileLoader { return &fileLoader{ metrics: metrics.New(), + files: make(map[string]bundle.FileInfo), } } +type descriptor struct { + result *Result + path string + relPath string + depth int +} + type fileLoader struct { - metrics metrics.Metrics + metrics metrics.Metrics + bvc *bundle.VerificationConfig + skipVerify bool + descriptors []*descriptor + files map[string]bundle.FileInfo } // WithMetrics provides the metrics instance to use while loading @@ -105,6 +119,18 @@ func (fl *fileLoader) WithMetrics(m metrics.Metrics) FileLoader { return fl } +// WithBundleVerificationConfig sets the key configuration used to verify a signed bundle +func (fl *fileLoader) WithBundleVerificationConfig(config *bundle.VerificationConfig) FileLoader { + fl.bvc = config + return fl +} + +// WithSkipBundleVerification skips verification of a signed bundle +func (fl *fileLoader) WithSkipBundleVerification(skipVerify bool) FileLoader { + fl.skipVerify = skipVerify + return fl +} + // All returns a Result object loaded (recursively) from the specified paths. func (fl fileLoader) All(paths []string) (*Result, error) { return fl.Filtered(paths, nil) @@ -143,14 +169,39 @@ func (fl fileLoader) Filtered(paths []string, filter Filter) (*Result, error) { // it will be treated as a normal tarball bundle. If a directory // is supplied it will be loaded as an unzipped bundle tree. func (fl fileLoader) AsBundle(path string) (*bundle.Bundle, error) { - path, err := fileurl.Clean(path) + bundleLoader, isDir, err := GetBundleDirectoryLoader(path) if err != nil { return nil, err } + br := bundle.NewCustomReader(bundleLoader).WithMetrics(fl.metrics).WithBundleVerificationConfig(fl.bvc). + WithSkipBundleVerification(fl.skipVerify) + + // For bundle directories add the full path in front of module file names + // to simplify debugging. + if isDir { + br.WithBaseDir(path) + } + + b, err := br.Read() + if err != nil { + err = errors.Wrap(err, fmt.Sprintf("bundle %s", path)) + } + + return &b, err +} + +// GetBundleDirectoryLoader returns a bundle directory loader which can be used to load +// files in the directory. +func GetBundleDirectoryLoader(path string) (bundle.DirectoryLoader, bool, error) { + path, err := fileurl.Clean(path) + if err != nil { + return nil, false, err + } + fi, err := os.Stat(path) if err != nil { - return nil, fmt.Errorf("error reading %q: %s", path, err) + return nil, false, fmt.Errorf("error reading %q: %s", path, err) } var bundleLoader bundle.DirectoryLoader @@ -160,25 +211,27 @@ func (fl fileLoader) AsBundle(path string) (*bundle.Bundle, error) { } else { fh, err := os.Open(path) if err != nil { - return nil, err + return nil, false, err } bundleLoader = bundle.NewTarballLoaderWithBaseURL(fh, path) } + return bundleLoader, fi.IsDir(), nil +} - br := bundle.NewCustomReader(bundleLoader).WithMetrics(fl.metrics) - - // For bundle directories add the full path in front of module file names - // to simplify debugging. - if fi.IsDir() { - br.WithBaseDir(path) - } +// FilteredPaths return a list of files from the specified +// paths while applying the given filters. If any filter returns true, the +// file/directory is excluded. +func FilteredPaths(paths []string, filter Filter) ([]string, error) { + result := []string{} - b, err := br.Read() + _, err := all(paths, filter, func(_ *Result, path string, _ int) error { + result = append(result, path) + return nil + }) if err != nil { - err = errors.Wrap(err, fmt.Sprintf("bundle %s", path)) + return nil, err } - - return &b, err + return result, nil } // All returns a Result object loaded (recursively) from the specified paths. @@ -421,7 +474,7 @@ func loadFileForAnyType(path string, bs []byte, m metrics.Metrics) (interface{}, func loadBundleFile(path string, bs []byte, m metrics.Metrics) (bundle.Bundle, error) { tl := bundle.NewTarballLoaderWithBaseURL(bytes.NewBuffer(bs), path) - br := bundle.NewCustomReader(tl).WithMetrics(m).IncludeManifestInData(true) + br := bundle.NewCustomReader(tl).WithMetrics(m).WithSkipBundleVerification(true).IncludeManifestInData(true) return br.Read() } diff --git a/loader/loader_test.go b/loader/loader_test.go index ddaefef693..e7996cfa4f 100644 --- a/loader/loader_test.go +++ b/loader/loader_test.go @@ -6,6 +6,7 @@ package loader import ( "bytes" + "io" "os" "path" "path/filepath" @@ -153,6 +154,108 @@ func TestLoadDirRecursive(t *testing.T) { }) } +func TestFilteredPaths(t *testing.T) { + files := map[string]string{ + "/a/data1.json": `{"a": [1,2,3]}`, + "/a/e.rego": `package q`, + "/b/data2.yaml": `{"aaa": {"bbb": 1}}`, + "/b/data3.yaml": `{"aaa": {"ccc": 2}}`, + "/b/d/x.json": "null", + "/b/d/e.rego": `package p`, + "/b/d/ignore": `deadbeef`, + "/foo": `{"zzz": "b"}`, + } + + test.WithTempFS(files, func(rootDir string) { + + paths := []string{} + paths = append(paths, filepath.Join(rootDir, "a")) + paths = append(paths, filepath.Join(rootDir, "b")) + paths = append(paths, filepath.Join(rootDir, "foo")) + + result, err := FilteredPaths(paths, nil) + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + if len(result) != len(files) { + t.Fatalf("Expected %v files across directories but got %v", len(files), len(result)) + } + }) +} + +func TestGetBundleDirectoryLoader(t *testing.T) { + files := map[string]string{ + "bundle.tar.gz": "", + } + + mod := "package b.c\np=1" + + test.WithTempFS(files, func(rootDir string) { + + bundleFile := filepath.Join(rootDir, "bundle.tar.gz") + + f, err := os.Create(bundleFile) + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + b := &bundle.Bundle{ + Manifest: bundle.Manifest{ + Roots: &[]string{"a", "b/c"}, + Revision: "123", + }, + Data: map[string]interface{}{ + "a": map[string]interface{}{ + "b": []int{4, 5, 6}, + }, + }, + Modules: []bundle.ModuleFile{ + { + URL: path.Join(bundleFile, "policy.rego"), + Path: "/policy.rego", + Raw: []byte(mod), + Parsed: ast.MustParseModule(mod), + }, + }, + } + + err = bundle.Write(f, *b) + f.Close() + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + bl, isDir, err := GetBundleDirectoryLoader(bundleFile) + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + if isDir { + t.Fatal("Expected bundle to be gzipped tarball but got directory") + } + + // check files + result := []string{} + for { + f, err := bl.NextFile() + if err == io.EOF { + break + } + + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + result = append(result, f.Path()) + } + + if len(result) != 3 { + t.Fatalf("Expected 3 files in the bundle but got %v", len(result)) + } + }) +} + func TestLoadBundle(t *testing.T) { test.WithTempFS(nil, func(rootDir string) { diff --git a/plugins/bundle/config.go b/plugins/bundle/config.go index 5831557435..62e36c90d4 100644 --- a/plugins/bundle/config.go +++ b/plugins/bundle/config.go @@ -9,6 +9,8 @@ import ( "path" "strings" + "github.com/open-policy-agent/opa/bundle" + "github.com/open-policy-agent/opa/download" "github.com/open-policy-agent/opa/util" ) @@ -28,7 +30,7 @@ func ParseConfig(config []byte, services []string) (*Config, error) { return nil, err } - if err := parsedConfig.validateAndInjectDefaults(services); err != nil { + if err := parsedConfig.validateAndInjectDefaults(services, nil); err != nil { return nil, err } @@ -39,6 +41,7 @@ func ParseConfig(config []byte, services []string) (*Config, error) { Config: parsedConfig.Config, Service: parsedConfig.Service, Resource: parsedConfig.generateLegacyResourcePath(), + Signing: nil, }, } @@ -49,13 +52,41 @@ func ParseConfig(config []byte, services []string) (*Config, error) { // the defined `bundles`. This expects a map of bundle names to resource // configurations. func ParseBundlesConfig(config []byte, services []string) (*Config, error) { - if config == nil { + return NewConfigBuilder().WithBytes(config).WithServices(services).Parse() +} + +// NewConfigBuilder returns a new ConfigBuilder to build and parse the bundle config +func NewConfigBuilder() *ConfigBuilder { + return &ConfigBuilder{} +} + +// WithBytes sets the raw bundle config +func (b *ConfigBuilder) WithBytes(config []byte) *ConfigBuilder { + b.raw = config + return b +} + +// WithServices sets the services that implement control plane APIs +func (b *ConfigBuilder) WithServices(services []string) *ConfigBuilder { + b.services = services + return b +} + +// WithKeyConfigs sets the public keys to verify a signed bundle +func (b *ConfigBuilder) WithKeyConfigs(keys map[string]*bundle.KeyConfig) *ConfigBuilder { + b.keys = keys + return b +} + +// Parse validates the config and injects default values for the defined `bundles`. +func (b *ConfigBuilder) Parse() (*Config, error) { + if b.raw == nil { return nil, nil } var bundleConfigs map[string]*Source - if err := util.Unmarshal(config, &bundleConfigs); err != nil { + if err := util.Unmarshal(b.raw, &bundleConfigs); err != nil { return nil, err } @@ -67,7 +98,7 @@ func ParseBundlesConfig(config []byte, services []string) (*Config, error) { } } - err := c.validateAndInjectDefaults(services) + err := c.validateAndInjectDefaults(b.services, b.keys) if err != nil { return nil, err } @@ -75,6 +106,13 @@ func ParseBundlesConfig(config []byte, services []string) (*Config, error) { return &c, nil } +// ConfigBuilder assists in the construction of the plugin configuration. +type ConfigBuilder struct { + raw []byte + services []string + keys map[string]*bundle.KeyConfig +} + // Config represents the configuration of the plugin. // The Config can define a single bundle source or a map of // `Source` objects defining where/how to download bundles. The @@ -94,8 +132,9 @@ type Config struct { type Source struct { download.Config - Service string `json:"service"` - Resource string `json:"resource"` + Service string `json:"service"` + Resource string `json:"resource"` + Signing *bundle.VerificationConfig `json:"signing"` } // IsMultiBundle returns whether or not the config is the newer multi-bundle @@ -106,7 +145,7 @@ func (c *Config) IsMultiBundle() bool { return c.Name == "" } -func (c *Config) validateAndInjectDefaults(services []string) error { +func (c *Config) validateAndInjectDefaults(services []string, keys map[string]*bundle.KeyConfig) error { if c.Bundles == nil { return c.validateAndInjectDefaultsLegacy(services) } @@ -117,6 +156,18 @@ func (c *Config) validateAndInjectDefaults(services []string) error { } var err error + + if source.Signing != nil { + err = source.Signing.ValidateAndInjectDefaults(keys) + if err != nil { + return fmt.Errorf("invalid configuration for bundle %q: %s", name, err.Error()) + } + } else { + if len(keys) > 0 { + source.Signing = bundle.NewVerificationConfig(keys, "", "", nil) + } + } + source.Service, err = c.getServiceFromList(source.Service, services) if err == nil { err = source.Config.ValidateAndInjectDefaults() diff --git a/plugins/bundle/config_test.go b/plugins/bundle/config_test.go index cc35b2fd46..5b9ef70c63 100644 --- a/plugins/bundle/config_test.go +++ b/plugins/bundle/config_test.go @@ -8,6 +8,8 @@ import ( "fmt" "testing" + "github.com/open-policy-agent/opa/bundle" + "github.com/ghodss/yaml" ) @@ -37,6 +39,10 @@ func TestConfigValidation(t *testing.T) { input: `{"name": "a/b/c", "service": "service2", "prefix": "/"}`, wantErr: false, }, + { + input: `{"name": "a/b/c", "service": "service2", "prefix": "/"}`, + wantErr: false, + }, } for i, test := range tests { @@ -54,7 +60,11 @@ func TestConfigValidation(t *testing.T) { func TestConfigValid(t *testing.T) { - in := `{"name": "a/b/c", "service": "service2", "prefix": "mybundle"}` + in := `{ + "name": "a/b/c", + "service": "service2", + "prefix": "mybundle", + }` config, err := ParseConfig([]byte(in), []string{"service1", "service2"}) if err != nil { @@ -199,11 +209,23 @@ func TestParseAndValidateBundlesConfig(t *testing.T) { services: []string{"s1"}, wantError: true, }, + { + conf: `{"b1":{"service": "s1", "signing": {"keyid": "foo", "scope": "write"}}}`, + services: []string{"s1"}, + wantError: false, + }, + { + conf: `{"b1":{"service": "s1", "signing": {"keyid": "bar", "scope": "write"}}}`, + services: []string{"s1"}, + wantError: true, + }, } + keys := map[string]*bundle.KeyConfig{"foo": {Key: "secret"}} for i := range tests { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { - _, err := ParseBundlesConfig([]byte(tests[i].conf), tests[i].services) + _, err := NewConfigBuilder().WithBytes([]byte(tests[i].conf)).WithServices(tests[i].services). + WithKeyConfigs(keys).Parse() if err != nil && !tests[i].wantError { t.Fatalf("Unexpected error: %s", err) } @@ -214,6 +236,63 @@ func TestParseAndValidateBundlesConfig(t *testing.T) { } } +func TestParseBundlesConfigWithSigning(t *testing.T) { + conf := []byte(` +bundle.tar.gz: + service: s1 +b2: + service: s1 + resource: /b2/path/ +b3: + service: s3 + resource: /some/longer/path/bundle.tar.gz +`) + services := []string{"s1", "s3"} + parsedConfig, err := NewConfigBuilder().WithBytes(conf).WithServices(services).Parse() + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } + + if parsedConfig.Name != "" { + t.Fatalf("Expected config `Name` to be empty, actual: %s", parsedConfig.Name) + } + + if len(parsedConfig.Bundles) != 3 { + t.Fatalf("Expected 3 bundles in parsed config, got: %+v", parsedConfig.Bundles) + } + + expectedSources := map[string]struct { + service string + resource string + }{ + "bundle.tar.gz": { + service: "s1", + resource: "bundles/bundle.tar.gz", + }, + "b2": { + service: "s1", + resource: "/b2/path/", + }, + "b3": { + service: "s3", + resource: "/some/longer/path/bundle.tar.gz", + }, + } + + for name, expected := range expectedSources { + actual, ok := parsedConfig.Bundles[name] + if !ok { + t.Fatalf("Expected to have bundle with name %s configured, actual: %+v", name, parsedConfig.Bundles) + } + if expected.resource != actual.Resource { + t.Errorf("Expected resource '%s', found '%s'", expected.resource, actual.Resource) + } + if expected.service != actual.Service { + t.Errorf("Expected service '%s', found '%s'", expected.service, actual.Service) + } + } +} + func TestParseBundlesConfig(t *testing.T) { conf := []byte(` bundle.tar.gz: diff --git a/plugins/bundle/plugin.go b/plugins/bundle/plugin.go index 7d109bda51..1f8cea30f1 100644 --- a/plugins/bundle/plugin.go +++ b/plugins/bundle/plugin.go @@ -249,10 +249,11 @@ func (p *Plugin) newDownloader(name string, source *Source) *download.Downloader conf := source.Config client := p.manager.Client(source.Service) path := source.Resource + return download.New(conf, client, path).WithCallback(func(ctx context.Context, u download.Update) { // wrap the callback to include the name of the bundle that was updated p.oneShot(ctx, name, u) - }) + }).WithBundleVerificationConfig(source.Signing) } func (p *Plugin) oneShot(ctx context.Context, name string, u download.Update) { diff --git a/plugins/discovery/config.go b/plugins/discovery/config.go index 98492d462f..a80c0a6d4b 100644 --- a/plugins/discovery/config.go +++ b/plugins/discovery/config.go @@ -8,6 +8,8 @@ import ( "fmt" "strings" + "github.com/open-policy-agent/opa/bundle" + "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/download" "github.com/open-policy-agent/opa/util" @@ -15,40 +17,92 @@ import ( // Config represents the configuration for the discovery feature. type Config struct { - download.Config // bundle downloader configuration - Name *string `json:"name"` // name of the discovery bundle - Prefix *string `json:"prefix,omitempty"` // Deprecated: use `Resource` instead. - Decision *string `json:"decision"` // the name of the query to run on the bundle to get the config - Service string `json:"service"` // the name of the service used to download discovery bundle from - Resource *string `json:"resource,omitempty"` // the resource path which will be downloaded from the service + download.Config // bundle downloader configuration + Name *string `json:"name"` // name of the discovery bundle + Prefix *string `json:"prefix,omitempty"` // Deprecated: use `Resource` instead. + Decision *string `json:"decision"` // the name of the query to run on the bundle to get the config + Service string `json:"service"` // the name of the service used to download discovery bundle from + Resource *string `json:"resource,omitempty"` // the resource path which will be downloaded from the service + Signing *bundle.VerificationConfig `json:"signing,omitempty"` // configuration used to verify a signed bundle service string path string query string } -// ParseConfig returns a valid Config object with defaults injected. -func ParseConfig(bs []byte, services []string) (*Config, error) { +// ConfigBuilder assists in the construction of the plugin configuration. +type ConfigBuilder struct { + raw []byte + services []string + keys map[string]*bundle.KeyConfig +} + +// NewConfigBuilder returns a new ConfigBuilder to build and parse the discovery config +func NewConfigBuilder() *ConfigBuilder { + return &ConfigBuilder{} +} + +// WithBytes sets the raw discovery config +func (b *ConfigBuilder) WithBytes(config []byte) *ConfigBuilder { + b.raw = config + return b +} + +// WithServices sets the services that implement control plane APIs +func (b *ConfigBuilder) WithServices(services []string) *ConfigBuilder { + b.services = services + return b +} + +// WithKeyConfigs sets the public keys to verify a signed bundle +func (b *ConfigBuilder) WithKeyConfigs(keys map[string]*bundle.KeyConfig) *ConfigBuilder { + b.keys = keys + return b +} - if bs == nil { +// Parse returns a valid Config object with defaults injected. +func (b *ConfigBuilder) Parse() (*Config, error) { + if b.raw == nil { return nil, nil } var result Config - if err := util.Unmarshal(bs, &result); err != nil { + if err := util.Unmarshal(b.raw, &result); err != nil { return nil, err } - return &result, result.validateAndInjectDefaults(services) + return &result, result.validateAndInjectDefaults(b.services, b.keys) } -func (c *Config) validateAndInjectDefaults(services []string) error { +// ParseConfig returns a valid Config object with defaults injected. +func ParseConfig(bs []byte, services []string) (*Config, error) { + return NewConfigBuilder().WithBytes(bs).WithServices(services).Parse() +} + +func (c *Config) validateAndInjectDefaults(services []string, keys map[string]*bundle.KeyConfig) error { if c.Name == nil { return fmt.Errorf("missing required discovery.name field") } + // make a copy of the keys map + copy := map[string]*bundle.KeyConfig{} + for key, kc := range keys { + copy[key] = kc + } + + if c.Signing != nil { + err := c.Signing.ValidateAndInjectDefaults(copy) + if err != nil { + return fmt.Errorf("invalid configuration for discovery service %q: %s", *c.Name, err.Error()) + } + } else { + if len(keys) > 0 { + c.Signing = bundle.NewVerificationConfig(copy, "", "", nil) + } + } + if c.Resource != nil { c.path = *c.Resource } else { @@ -62,7 +116,7 @@ func (c *Config) validateAndInjectDefaults(services []string) error { service, err := c.getServiceFromList(c.Service, services) if err != nil { - return fmt.Errorf("invalid configuration for decision service: %s", err.Error()) + return fmt.Errorf("invalid configuration for discovery service: %s", err.Error()) } c.service = service diff --git a/plugins/discovery/config_test.go b/plugins/discovery/config_test.go index ceb7d70d4c..d5cbd73ad8 100644 --- a/plugins/discovery/config_test.go +++ b/plugins/discovery/config_test.go @@ -7,6 +7,8 @@ package discovery import ( "fmt" "testing" + + "github.com/open-policy-agent/opa/bundle" ) func TestConfigValidation(t *testing.T) { @@ -50,11 +52,22 @@ func TestConfigValidation(t *testing.T) { services: []string{"service1"}, wantErr: false, }, + { + input: `{"name": "a/b/c", "decision": "query", "signing": {"keyid": "foo", "scope": "write"}}}`, + services: []string{"s1"}, + wantErr: false, + }, + { + input: `{"name": "a/b/c", "decision": "query", "signing": {"keyid": "bar", "scope": "write"}}}`, + services: []string{"s1"}, + wantErr: true, + }, } + keys := map[string]*bundle.KeyConfig{"foo": {Key: "secret"}} for i, test := range tests { t.Run(fmt.Sprintf("TestConfigValidation_case_%d", i), func(t *testing.T) { - _, err := ParseConfig([]byte(test.input), test.services) + _, err := NewConfigBuilder().WithBytes([]byte(test.input)).WithServices(test.services).WithKeyConfigs(keys).Parse() if err != nil && !test.wantErr { t.Fail() } @@ -82,7 +95,7 @@ func TestConfigDecision(t *testing.T) { for i, test := range tests { t.Run(fmt.Sprintf("TestConfigDecision_case_%d", i), func(t *testing.T) { - c, err := ParseConfig([]byte(test.input), []string{"service1"}) + c, err := NewConfigBuilder().WithBytes([]byte(test.input)).WithServices([]string{"service1"}).Parse() if err != nil { t.Fatal("unexpected error while parsing config") } @@ -114,7 +127,7 @@ func TestConfigService(t *testing.T) { for i, test := range tests { t.Run(fmt.Sprintf("TestConfigService_case_%d", i), func(t *testing.T) { - c, err := ParseConfig([]byte(test.input), test.services) + c, err := NewConfigBuilder().WithBytes([]byte(test.input)).WithServices(test.services).Parse() if err != nil { t.Fatal("unexpected error while parsing config") } @@ -151,7 +164,7 @@ func TestConfigPath(t *testing.T) { for i, test := range tests { t.Run(fmt.Sprintf("TestConfigDecision_case_%d", i), func(t *testing.T) { - c, err := ParseConfig([]byte(test.input), []string{"service1"}) + c, err := NewConfigBuilder().WithBytes([]byte(test.input)).WithServices([]string{"service1"}).Parse() if err != nil { t.Fatal("unexpected error while parsing config") } diff --git a/plugins/discovery/discovery.go b/plugins/discovery/discovery.go index 085915fcf5..a451a8c249 100644 --- a/plugins/discovery/discovery.go +++ b/plugins/discovery/discovery.go @@ -71,7 +71,8 @@ func New(manager *plugins.Manager, opts ...func(*Discovery)) (*Discovery, error) f(result) } - config, err := ParseConfig(manager.Config.Discovery, manager.Services()) + config, err := NewConfigBuilder().WithBytes(manager.Config.Discovery).WithServices(manager.Services()). + WithKeyConfigs(manager.PublicKeys()).Parse() if err != nil { return nil, err @@ -87,7 +88,8 @@ func New(manager *plugins.Manager, opts ...func(*Discovery)) (*Discovery, error) } result.config = config - result.downloader = download.New(config.Config, manager.Client(config.service), config.path).WithCallback(result.oneShot) + result.downloader = download.New(config.Config, manager.Client(config.service), config.path).WithCallback(result.oneShot). + WithBundleVerificationConfig(config.Signing) result.status = &bundle.Status{ Name: *config.Name, } @@ -175,7 +177,7 @@ func (c *Discovery) processUpdate(ctx context.Context, u download.Update) { func (c *Discovery) reconfigure(ctx context.Context, u download.Update) error { - ps, err := processBundle(ctx, c.manager, c.factories, u.Bundle, c.config.query, c.config.service, c.metrics) + ps, err := c.processBundle(ctx, u.Bundle) if err != nil { return err } @@ -212,9 +214,9 @@ func (c *Discovery) logrusFields() logrus.Fields { } } -func processBundle(ctx context.Context, manager *plugins.Manager, factories map[string]plugins.Factory, b *bundleApi.Bundle, query, service string, m metrics.Metrics) (*pluginSet, error) { +func (c *Discovery) processBundle(ctx context.Context, b *bundleApi.Bundle) (*pluginSet, error) { - config, err := evaluateBundle(ctx, manager.ID, manager.Info, b, query) + config, err := evaluateBundle(ctx, c.manager.ID, c.manager.Info, b, c.config.query) if err != nil { return nil, err } @@ -229,18 +231,32 @@ func processBundle(ctx context.Context, manager *plugins.Manager, factories map[ return nil, err } - if client, ok := services[service]; ok { - dClient := manager.Client(service) + if client, ok := services[c.config.service]; ok { + dClient := c.manager.Client(c.config.service) if !client.Config().Equal(dClient.Config()) { return nil, fmt.Errorf("updates to the discovery service are not allowed") } } - if err := manager.Reconfigure(config); err != nil { + // check for updates to the keys provided in the boot config + keys, err := bundleApi.ParseKeysConfig(config.Keys) + if err != nil { + return nil, err + } + + for key, kc := range keys { + if curr, ok := c.config.Signing.PublicKeys[key]; ok { + if !curr.Equal(kc) { + return nil, fmt.Errorf("updates to keys specified in the boot configuration are not allowed") + } + } + } + + if err := c.manager.Reconfigure(config); err != nil { return nil, err } - return getPluginSet(factories, manager, config, m) + return getPluginSet(c.factories, c.manager, config, c.metrics) } func evaluateBundle(ctx context.Context, id string, info *ast.Term, b *bundleApi.Bundle, query string) (*config.Config, error) { @@ -329,7 +345,8 @@ func getPluginSet(factories map[string]plugins.Factory, manager *plugins.Manager return nil, err } if bundleConfig == nil { - bundleConfig, err = bundle.ParseBundlesConfig(config.Bundles, manager.Services()) + bundleConfig, err = bundle.NewConfigBuilder().WithBytes(config.Bundles).WithServices(manager.Services()). + WithKeyConfigs(manager.PublicKeys()).Parse() if err != nil { return nil, err } diff --git a/plugins/discovery/discovery_test.go b/plugins/discovery/discovery_test.go index da15a331bf..8d46b6af15 100644 --- a/plugins/discovery/discovery_test.go +++ b/plugins/discovery/discovery_test.go @@ -104,7 +104,8 @@ func TestProcessBundle(t *testing.T) { "default": { "url": "http://localhost:8181" } - } + }, + "discovery": {"name": "config"} }`), "test-id", inmem.New()) if err != nil { t.Fatal(err) @@ -120,7 +121,12 @@ func TestProcessBundle(t *testing.T) { } `) - ps, err := processBundle(ctx, manager, nil, initialBundle, "data.config", "default", nil) + disco, err := New(manager) + if err != nil { + t.Fatal(err) + } + + ps, err := disco.processBundle(ctx, initialBundle) if err != nil { t.Fatal(err) } @@ -139,7 +145,7 @@ func TestProcessBundle(t *testing.T) { } `) - ps, err = processBundle(ctx, manager, nil, updatedBundle, "data.config", "default", nil) + ps, err = disco.processBundle(ctx, updatedBundle) if err != nil { t.Fatal(err) } @@ -156,7 +162,7 @@ func TestProcessBundle(t *testing.T) { } `) - _, err = processBundle(ctx, manager, nil, updatedBundle, "data.config", "default", nil) + _, err = disco.processBundle(ctx, updatedBundle) if err == nil { t.Fatal("Expected error but got success") } @@ -286,6 +292,17 @@ func TestReconfigureWithUpdates(t *testing.T) { } }, "discovery": {"name": "config"}, + "keys": { + "global_key": { + "key": "secret", + "algorithm": "HS256", + "scope": "read" + }, + "local_key": { + "key": "some_private_key", + "scope": "write" + } + } }`), "test-id", inmem.New()) if err != nil { t.Fatal(err) @@ -493,6 +510,132 @@ func TestReconfigureWithUpdates(t *testing.T) { if err != nil { t.Fatalf("Unexpected error %v", err) } + + // add a new key + updatedBundle = makeDataBundle(9, ` + { + "config": { + "keys": { + "new_global_key": { + "key": "secret", + "algorithm": "HS256", + "scope": "read" + } + } + } + } + `) + + err = disco.reconfigure(ctx, download.Update{Bundle: updatedBundle}) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + // update a key in the boot config + updatedBundle = makeDataBundle(10, ` + { + "config": { + "keys": { + "global_key": { + "key": "new_secret", + "algorithm": "HS256", + "scope": "read" + } + } + } + } + `) + + err = disco.reconfigure(ctx, download.Update{Bundle: updatedBundle}) + if err == nil { + t.Fatal("Expected error but got nil") + } + + errMsg := "updates to keys specified in the boot configuration are not allowed" + if err.Error() != errMsg { + t.Fatalf("Expected error message: %v but got: %v", errMsg, err.Error()) + } + + // no config change for a key in the boot config + updatedBundle = makeDataBundle(11, ` + { + "config": { + "keys": { + "global_key": { + "key": "secret", + "algorithm": "HS256", + "scope": "read" + } + } + } + } + `) + + err = disco.reconfigure(ctx, download.Update{Bundle: updatedBundle}) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + + // update a key not in the boot config + updatedBundle = makeDataBundle(12, ` + { + "config": { + "keys": { + "new_global_key": { + "key": "secret", + "algorithm": "HS256", + "scope": "write" + } + } + } + } + `) + + err = disco.reconfigure(ctx, download.Update{Bundle: updatedBundle}) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } +} + +func TestProcessBundleWithSigning(t *testing.T) { + + ctx := context.Background() + + manager, err := plugins.New([]byte(`{ + "labels": {"x": "y"}, + "services": { + "localhost": { + "url": "http://localhost:9999" + } + }, + "discovery": {"name": "config", "signing": {"keyid": "my_global_key"}}, + "keys": {"my_global_key": {"algorithm": "HS256", "key": "secret"}}, + }`), "test-id", inmem.New()) + if err != nil { + t.Fatal(err) + } + + disco, err := New(manager) + if err != nil { + t.Fatal(err) + } + + initialBundle := makeDataBundle(1, ` + { + "config": { + "bundle": {"name": "test1"}, + "status": {}, + "decision_logs": {}, + "keys": {"my_local_key": {"algorithm": "HS256", "key": "new_secret"}} + } + } + `) + + _, err = disco.processBundle(ctx, initialBundle) + if err != nil { + t.Fatalf("Unexpected error %v", err) + } + } type testServer struct { diff --git a/plugins/plugins.go b/plugins/plugins.go index 403d48d15b..c8042d3ee0 100644 --- a/plugins/plugins.go +++ b/plugins/plugins.go @@ -126,6 +126,7 @@ type Manager struct { compiler *ast.Compiler compilerMux sync.RWMutex services map[string]rest.Client + keys map[string]*bundle.KeyConfig plugins []namedplugin registeredTriggers []func(txn storage.Transaction) mtx sync.Mutex @@ -205,10 +206,16 @@ func New(raw []byte, id string, store storage.Store, opts ...func(*Manager)) (*M return nil, err } + keys, err := bundle.ParseKeysConfig(parsedConfig.Keys) + if err != nil { + return nil, err + } + m := &Manager{ Store: store, Config: parsedConfig, ID: id, + keys: keys, services: services, pluginStatus: map[string]*Status{}, pluginStatusListeners: map[string]StatusListener{}, @@ -384,6 +391,12 @@ func (m *Manager) Reconfigure(config *config.Config) error { if err != nil { return err } + + keys, err := bundle.ParseKeysConfig(config.Keys) + if err != nil { + return err + } + m.mtx.Lock() defer m.mtx.Unlock() config.Labels = m.Config.Labels // don't overwrite labels @@ -391,6 +404,10 @@ func (m *Manager) Reconfigure(config *config.Config) error { for name, client := range services { m.services[name] = client } + + for name, key := range keys { + m.keys[name] = key + } return nil } @@ -502,6 +519,13 @@ func loadCompilerFromStore(ctx context.Context, store storage.Store, txn storage return compiler, nil } +// PublicKeys returns a public keys that can be used for verifying signed bundles. +func (m *Manager) PublicKeys() map[string]*bundle.KeyConfig { + m.mtx.Lock() + defer m.mtx.Unlock() + return m.keys +} + // Client returns a client for communicating with a remote service. func (m *Manager) Client(name string) rest.Client { m.mtx.Lock() diff --git a/rego/rego.go b/rego/rego.go index 3d32ff91ed..485eefe23d 100644 --- a/rego/rego.go +++ b/rego/rego.go @@ -435,43 +435,44 @@ type loadPaths struct { // Rego constructs a query and can be evaluated to obtain results. type Rego struct { - query string - parsedQuery ast.Body - compiledQueries map[queryType]compiledQuery - pkg string - parsedPackage *ast.Package - imports []string - parsedImports []*ast.Import - rawInput *interface{} - parsedInput ast.Value - unknowns []string - parsedUnknowns []*ast.Term - disableInlining []string - shallowInlining bool - skipPartialNamespace bool - partialNamespace string - modules []rawModule - parsedModules map[string]*ast.Module - compiler *ast.Compiler - store storage.Store - ownStore bool - txn storage.Transaction - metrics metrics.Metrics - tracers []topdown.Tracer - tracebuf *topdown.BufferTracer - trace bool - instrumentation *topdown.Instrumentation - instrument bool - capture map[*ast.Expr]ast.Var // map exprs to generated capture vars - termVarID int - dump io.Writer - runtime *ast.Term - builtinDecls map[string]*ast.Builtin - builtinFuncs map[string]*topdown.Builtin - unsafeBuiltins map[string]struct{} - loadPaths loadPaths - bundlePaths []string - bundles map[string]*bundle.Bundle + query string + parsedQuery ast.Body + compiledQueries map[queryType]compiledQuery + pkg string + parsedPackage *ast.Package + imports []string + parsedImports []*ast.Import + rawInput *interface{} + parsedInput ast.Value + unknowns []string + parsedUnknowns []*ast.Term + disableInlining []string + shallowInlining bool + skipPartialNamespace bool + partialNamespace string + modules []rawModule + parsedModules map[string]*ast.Module + compiler *ast.Compiler + store storage.Store + ownStore bool + txn storage.Transaction + metrics metrics.Metrics + tracers []topdown.Tracer + tracebuf *topdown.BufferTracer + trace bool + instrumentation *topdown.Instrumentation + instrument bool + capture map[*ast.Expr]ast.Var // map exprs to generated capture vars + termVarID int + dump io.Writer + runtime *ast.Term + builtinDecls map[string]*ast.Builtin + builtinFuncs map[string]*topdown.Builtin + unsafeBuiltins map[string]struct{} + loadPaths loadPaths + bundlePaths []string + bundles map[string]*bundle.Bundle + skipBundleVerification bool } // Function represents a built-in function that is callable in Rego. @@ -928,6 +929,13 @@ func UnsafeBuiltins(unsafeBuiltins map[string]struct{}) func(r *Rego) { } } +// SkipBundleVerification skips verification of a signed bundle. +func SkipBundleVerification(yes bool) func(r *Rego) { + return func(r *Rego) { + r.skipBundleVerification = yes + } +} + // New returns a new Rego object. func New(options ...func(r *Rego)) *Rego { @@ -1468,7 +1476,7 @@ func (r *Rego) loadBundles(ctx context.Context, txn storage.Transaction, m metri defer m.Timer(metrics.RegoLoadBundles).Stop() for _, path := range r.bundlePaths { - bndl, err := loader.NewFileLoader().WithMetrics(m).AsBundle(path) + bndl, err := loader.NewFileLoader().WithMetrics(m).WithSkipBundleVerification(r.skipBundleVerification).AsBundle(path) if err != nil { return fmt.Errorf("loading error: %s", err) } diff --git a/runtime/runtime.go b/runtime/runtime.go index 19f061691a..365cd32dad 100644 --- a/runtime/runtime.go +++ b/runtime/runtime.go @@ -20,6 +20,8 @@ import ( "syscall" "time" + "github.com/open-policy-agent/opa/bundle" + "github.com/pkg/errors" "github.com/sirupsen/logrus" "gopkg.in/fsnotify.v1" @@ -161,6 +163,12 @@ type Params struct { // EnableVersionCheck flag controls whether OPA will report its version to an external service. // If this flag is true, OPA will report its version to the external service EnableVersionCheck bool + + // BundleVerificationConfig sets the key configuration used to verify a signed bundle + BundleVerificationConfig *bundle.VerificationConfig + + // SkipBundleVerification flag controls whether OPA will verify a signed bundle + SkipBundleVerification bool } // LoggingConfig stores the configuration for OPA's logging behaviour. @@ -217,7 +225,7 @@ func NewRuntime(ctx context.Context, params Params) (*Runtime, error) { } } - loaded, err := initload.LoadPaths(params.Paths, params.Filter, params.BundleMode) + loaded, err := initload.LoadPaths(params.Paths, params.Filter, params.BundleMode, params.BundleVerificationConfig, params.SkipBundleVerification) if err != nil { return nil, errors.Wrap(err, "load error") } @@ -512,7 +520,7 @@ func (rt *Runtime) readWatcher(ctx context.Context, watcher *fsnotify.Watcher, p func (rt *Runtime) processWatcherUpdate(ctx context.Context, paths []string, removed string) error { - loaded, err := initload.LoadPaths(paths, rt.Params.Filter, rt.Params.BundleMode) + loaded, err := initload.LoadPaths(paths, rt.Params.Filter, rt.Params.BundleMode, nil, true) if err != nil { return err } diff --git a/tester/runner.go b/tester/runner.go index 4efcf04371..9fcf32dce9 100644 --- a/tester/runner.go +++ b/tester/runner.go @@ -520,7 +520,7 @@ func Load(args []string, filter loader.Filter) (map[string]*ast.Module, storage. func LoadBundles(args []string, filter loader.Filter) (map[string]*bundle.Bundle, error) { bundles := map[string]*bundle.Bundle{} for _, bundleDir := range args { - b, err := loader.NewFileLoader().AsBundle(bundleDir) + b, err := loader.NewFileLoader().WithSkipBundleVerification(true).AsBundle(bundleDir) if err != nil { return nil, fmt.Errorf("unable to load bundle %s: %s", bundleDir, err) } diff --git a/topdown/internal/jwx/jws/sign/sign.go b/topdown/internal/jwx/jws/sign/sign.go deleted file mode 100644 index fd4b0645f2..0000000000 --- a/topdown/internal/jwx/jws/sign/sign.go +++ /dev/null @@ -1,21 +0,0 @@ -package sign - -import ( - "github.com/pkg/errors" - - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" -) - -// New creates a signer that signs payloads using the given signature algorithm. -func New(alg jwa.SignatureAlgorithm) (Signer, error) { - switch alg { - case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512: - return newRSA(alg) - case jwa.ES256, jwa.ES384, jwa.ES512: - return newECDSA(alg) - case jwa.HS256, jwa.HS384, jwa.HS512: - return newHMAC(alg) - default: - return nil, errors.Errorf(`unsupported signature algorithm %s`, alg) - } -} diff --git a/topdown/internal/jwx/jws/verify/verify.go b/topdown/internal/jwx/jws/verify/verify.go deleted file mode 100644 index d484cda0b5..0000000000 --- a/topdown/internal/jwx/jws/verify/verify.go +++ /dev/null @@ -1,22 +0,0 @@ -package verify - -import ( - "github.com/pkg/errors" - - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwa" -) - -// New creates a new JWS verifier using the specified algorithm -// and the public key -func New(alg jwa.SignatureAlgorithm) (Verifier, error) { - switch alg { - case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512: - return newRSA(alg) - case jwa.ES256, jwa.ES384, jwa.ES512: - return newECDSA(alg) - case jwa.HS256, jwa.HS384, jwa.HS512: - return newHMAC(alg) - default: - return nil, errors.Errorf(`unsupported signature algorithm: %s`, alg) - } -} diff --git a/topdown/tokens.go b/topdown/tokens.go index 60ed44736b..1cdf39a287 100644 --- a/topdown/tokens.go +++ b/topdown/tokens.go @@ -25,9 +25,9 @@ import ( "github.com/pkg/errors" "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/jws" "github.com/open-policy-agent/opa/topdown/builtins" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws" ) var ( diff --git a/topdown/tokens_test.go b/topdown/tokens_test.go index 9a1430e622..216dfeb1bd 100644 --- a/topdown/tokens_test.go +++ b/topdown/tokens_test.go @@ -14,10 +14,10 @@ import ( "testing" "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/internal/jwx/jwk" + "github.com/open-policy-agent/opa/internal/jwx/jws" "github.com/open-policy-agent/opa/storage" "github.com/open-policy-agent/opa/storage/inmem" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jwk" - "github.com/open-policy-agent/opa/topdown/internal/jwx/jws" ) func TestParseTokenConstraints(t *testing.T) {