Skip to content

Commit

Permalink
feat: Adds IsValidTDF function - needs tests (#1188)
Browse files Browse the repository at this point in the history
Resolves #1000
  • Loading branch information
biscoe916 committed Aug 12, 2024
1 parent 514f1b8 commit 4750195
Show file tree
Hide file tree
Showing 7 changed files with 440 additions and 2 deletions.
56 changes: 56 additions & 0 deletions examples/cmd/isvalid.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package cmd

import (
"bytes"
"encoding/base64"
"io"

"github.com/opentdf/platform/sdk"
"github.com/spf13/cobra"
)

func init() {
encryptCmd := cobra.Command{
Use: "isvalid",
Short: "Check validity of a TDF",
RunE: isValid,
}

ExamplesCmd.AddCommand(&encryptCmd)
}

func isValid(cmd *cobra.Command, args []string) error {
goodNanoTdfStr := "TDFMABJsb2NhbGhvc3Q6ODA4MC9rYXOAAQIA2qvjMRfg7b27lT2kf9SwHRkDIg8ZXtfRoiIvdMUHq/gL5AUMfmv4Di8sKCyLkmUm/WITVj5hDeV/z4JmQ0JL7ZxqSmgZoK6TAHvkKhUly4zMEWMRXH8IktKhFKy1+fD+3qwDopqWAO5Nm2nYQqi75atEFckstulpNKg3N+Ul22OHr/ZuR127oPObBDYNRfktBdzoZbEQcPlr8q1B57q6y5SPZFjEzL9weK+uS5bUJWkF3nsHASo2bZw7IPhTZxoFVmCDjwvj6MbxNa7zG6aClHJ162zKxLLnD9TtIHuZ59R7LgiSieipXeExj+ky9OgIw5DfwyUuxsQLtKpMIAFPmLY9Hy2naUJxke0MT1EUBgastCq+YtFGslV9LJo/A8FtrRqludwtM0O+Z9FlAkZ1oNL7M7uOkLrh7eRrv+C1AAAX6FaBQoOtqnmyu6Jp+VzkxDddEeLRUyI="
badNanoTdfStr := "TDFMABfg7b27lT2kf9SwHRkDIg8ZXtfRoiIvdMUHq/gL5AUMfmv4Di8sKCyLkmUm/WITVj5hDeV/z4JmQ0JL7ZxqSmgZoK6TAHvkKhUly4zMEWMRXH8IktKhFKy1+fD+3qwDopqWAO5Nm2nYQqi75atEFckstulpNKg3N+Ul22OHr/ZuR127oPObBDYNRfktBdzoZbEQcPlr8q1B57q6y5SPZFjEzL9weK+uS5bUJWkF3nsHASo2bZw7IPhTZxoFVmCDjwvj6MbxNa7zG6aClHJ162zKxLLnD9TtIHuZ59R7LgiSieipXeExj+ky9OgIw5DfwyUuxsQLtKpMIAFPmLY9Hy2naUJxke0MT1EUBgastCq+YtFGslV9LJo/A8FtrRqludwtM0O+Z9FlAkZ1oNL7M7uOkLrh7eRrv+C1AAAX6FaBQoOtqnmyu6Jp+VzkxDddEeLRUyI="

goodStandardTdf := "UEsDBC0ACAAAAJ2TFTEAAAAAAAAAAAAAAAAJAAAAMC5wYXlsb2Fktu4m+vdwl0mtjhY3U5e7TG2o1s8ifK+RAhFNjRjGTLJ7V3w5UEsHCGiY7skkAAAAJAAAAFBLAwQtAAgAAACdkxUxAAAAAAAAAAAAAAAADwAAADAubWFuaWZlc3QuanNvbnsiZW5jcnlwdGlvbkluZm9ybWF0aW9uIjp7InR5cGUiOiJzcGxpdCIsInBvbGljeSI6ImV5SjFkV2xrSWpvaU1HTTFORGsyWlRZdE5EYzRaaTB4TVdWbUxXSXlOakV0WWpJMVl6UmhORE14TjJFM0lpd2lZbTlrZVNJNmV5SmtZWFJoUVhSMGNtbGlkWFJsY3lJNlczc2lZWFIwY21saWRYUmxJam9pYUhSMGNITTZMeTlsZUdGdGNHeGxMbU52YlM5aGRIUnlMMkYwZEhJeEwzWmhiSFZsTDNaaGJIVmxNU0lzSW1ScGMzQnNZWGxPWVcxbElqb2lJaXdpYVhORVpXWmhkV3gwSWpwbVlXeHpaU3dpY0hWaVMyVjVJam9pSWl3aWEyRnpWVkpNSWpvaUluMWRMQ0prYVhOelpXMGlPbHRkZlgwPSIsImtleUFjY2VzcyI6W3sidHlwZSI6IndyYXBwZWQiLCJ1cmwiOiJodHRwOi8vbG9jYWxob3N0OjgwODAiLCJwcm90b2NvbCI6ImthcyIsIndyYXBwZWRLZXkiOiJ0VVMvUE9TaVBtOGV6OGhyL2dMVGN6Y1lOT0trcUNEclZiQTBWdHZna29QbHB0M1BDZVpTdDNndnlQNVZKZXBNMmNqdVBhUWJJUGlyMjlWdVJ2T1RXZmQzRUh1KzgyVCtFNEVZbEpBM25VbDdGQTRMUGZhUEtXWk1zTExHUkJJVUxZT0VhMWJma1MvUm9Xb0EwK283WlFFVkNhYmdJN2JFRDJKV2Q2aG1yam1iUnM2d0lwOVFXNUs4Q3dJWjZVZjlGMXEwRDViTmlrbGxHaCtiaVJsV1NucEwxbHBPaFdva1gxdUJsU0VRSDNvM2JtVXFTNVVaUjRmYUxuTW5xOGR0bS8wYnJjTjUwaFNiK0xTTlZkd2daTEszTTRHTmxEeGdzcDkxY0VuYjZoZktLemdSY0VCS0tMQTF1b3BXNHdCRG9BamFuWWplQlZVT3ZBZEI5ek45T3c9PSIsInBvbGljeUJpbmRpbmciOnsiYWxnIjoiSFMyNTYiLCJoYXNoIjoiWmpBek1HWXlZekl4WlRCbU16Tm1NamhoTWpGalpqSTJaRE5oWlRrMk5ERTNaREJoWlRrM05ESTJNREExTnpVMU1UVTFNV0ZpTTJSak9EUTFabU0yWWc9PSJ9LCJraWQiOiJyMSJ9XSwibWV0aG9kIjp7ImFsZ29yaXRobSI6IkFFUy0yNTYtR0NNIiwiaXYiOiIiLCJpc1N0cmVhbWFibGUiOnRydWV9LCJpbnRlZ3JpdHlJbmZvcm1hdGlvbiI6eyJyb290U2lnbmF0dXJlIjp7ImFsZyI6IkhTMjU2Iiwic2lnIjoiWkdWaFltRmtNRGhsTURCbU1UVm1ZekJtTVdFME0ySmhOamhrTmpBMVpUazFNVGRtWmpoa1pETmtNekk0Tldaa01XUXhOVFZsWXpjME1EVXhPRE13Tmc9PSJ9LCJzZWdtZW50SGFzaEFsZyI6IkdNQUMiLCJzZWdtZW50U2l6ZURlZmF1bHQiOjIwOTcxNTIsImVuY3J5cHRlZFNlZ21lbnRTaXplRGVmYXVsdCI6MjA5NzE4MCwic2VnbWVudHMiOlt7Imhhc2giOiJNakkzWTJGbU9URXdNakV4TkdRNFpERTRZelkwWTJJeU4ySTFOemRqTXprPSIsInNlZ21lbnRTaXplIjo4LCJlbmNyeXB0ZWRTZWdtZW50U2l6ZSI6MzZ9XX19LCJwYXlsb2FkIjp7InR5cGUiOiJyZWZlcmVuY2UiLCJ1cmwiOiIwLnBheWxvYWQiLCJwcm90b2NvbCI6InppcCIsIm1pbWVUeXBlIjoiYXBwbGljYXRpb24vb2N0ZXQtc3RyZWFtIiwiaXNFbmNyeXB0ZWQiOnRydWV9fVBLBwgwpFOlrwUAAK8FAABQSwECLQAtAAgAAACdkxUxaJjuySQAAAAkAAAACQAAAAAAAAAAAAAAAAAAAAAAMC5wYXlsb2FkUEsBAi0ALQAIAAAAnZMVMTCkU6WvBQAArwUAAA8AAAAAAAAAAAAAAAAAWwAAADAubWFuaWZlc3QuanNvblBLBQYAAAAAAgACAHQAAABHBgAAAAA="

// Decode the base64 string
goodDecodedNanoTdf, _ := base64.StdEncoding.DecodeString(goodNanoTdfStr)
badDecodedNanoTdf, _ := base64.StdEncoding.DecodeString(badNanoTdfStr)

goodDecodedStandardTdf, _ := base64.StdEncoding.DecodeString(goodStandardTdf)

inGoodStandard := bytes.NewReader(goodDecodedStandardTdf)
isValidTdf, _ := sdk.IsValidTdf(inGoodStandard)
cmd.Println("Valid TDF: ")
cmd.Println(isValidTdf)

inGood := bytes.NewReader(goodDecodedNanoTdf)
inBad := bytes.NewReader(badDecodedNanoTdf)
isValidNanoTdfGood, _ := sdk.IsValidNanoTdf(inGood)
isValidNanoTdfBad, _ := sdk.IsValidNanoTdf(inBad)

cmd.Println("Valid NanoTDF (Good): ")
cmd.Println(isValidNanoTdfGood)

cmd.Println("Valid NanoTDF (Bad): ")
cmd.Println(isValidNanoTdfBad)

_, _ = inGood.Seek(0, io.SeekStart)
tdfType := sdk.GetTdfType(inGood)
cmd.Println("Type: ")
cmd.Println(tdfType.String())

return nil
}
3 changes: 3 additions & 0 deletions sdk/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ require (
github.com/opentdf/platform/protocol/go v0.2.10
github.com/stretchr/testify v1.9.0
github.com/testcontainers/testcontainers-go v0.28.0
github.com/xeipuuv/gojsonschema v1.2.0
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
google.golang.org/grpc v1.62.1
google.golang.org/protobuf v1.33.0
Expand Down Expand Up @@ -70,6 +71,8 @@ require (
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/yusufpapurcu/wmi v1.2.3 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
Expand Down
7 changes: 7 additions & 0 deletions sdk/go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,13 @@ github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFA
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
Expand Down
3 changes: 1 addition & 2 deletions sdk/nanotdf.go
Original file line number Diff line number Diff line change
Expand Up @@ -518,6 +518,7 @@ func NewNanoTDFHeaderFromReader(reader io.Reader) (NanoTDFHeader, uint32, error)
var size uint32

magicNumber := make([]byte, len(kNanoTDFMagicStringAndVersion))

l, err := reader.Read(magicNumber)
if err != nil {
return header, 0, fmt.Errorf(" io.Reader.Read failed :%w", err)
Expand Down Expand Up @@ -839,8 +840,6 @@ func (s SDK) ReadNanoTDFContext(ctx context.Context, writer io.Writer, reader io
if err != nil {
return 0, err
}
// print(payloadLength)
// print(string(decryptedData))

return uint32(writeLen), nil
}
Expand Down
238 changes: 238 additions & 0 deletions sdk/schema/manifest.schema.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,238 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://example.com/manifest.schema.json",
"title": "manifest",
"description": "TDF manifest in JSON",
"type": "object",
"properties": {
"payload": {
"type": "object",
"description": "An object which contains information describing the payload.",
"properties": {
"type": {
"description": "Describes the type of payload is associated with the TDF.",
"type": "string"
},
"url": {
"description": "URL which points to payload. For reference types, with the default ZIP protocol, the URL would point to a local file within the zip.",
"type": "string"
},
"protocol": {
"description": "The payload protocol. Default is zip."
},
"isEncrypted": {
"description": "Designates whether or not the payload is encrypted, or cleartext.",
"type": "boolean"
},
"mimeType": {
"description": "Specifies the type of file that is encrypted. Default is `application/octet-stream`.",
"type": "string"
},
"tdf_spec_version": {
"description": "Semver version number of the TDF spec.",
"type": "string"
}
},
"required": ["type", "url", "protocol", "isEncrypted","mimeType"]
},
"encryptionInformation": {
"type": "object",
"properties": {
"type": {
"description": "Designates the type of key access protocol was used. Default, is split.",
"type": "string"
},
"keyAccess": {
"description": "An array of keyAccess objects which are used to retrieve keys from one, or more Key Access Services",
"type": "array",
"items": {
"description": "A key access object",
"type": "object",
"properties": {
"type": {
"description": "The type of key access object.",
"type": "string",
"enum": ["wrapped", "remote"]
},
"url": {
"description": "A fully qualified URL pointing to a key access service responsible for managing access to the encryption keys.",
"type": "string"
},
"protocol": {
"description": "The protocol to be used for managing key access.",
"type": "string",
"enum": ["kas"]
},
"wrappedKey": {
"description": "The symmetric key used to encrypt the payload. It has been encrypted using the public key of the KAS, then base64 encoded.",
"type": "string"
},
"sid": {
"description": "A unique identifier for a single key split. In some complex policies, multiple key access objects may exist that share a specific key split. Using a splitId allows software to more efficiently operate by not reusing key material unnecessarily. ",
"type": "string"
},
"kid": {
"description": "A UUID for the specific keypair used for wrapping the symmetric key.",
"type": "string"
},
"policyBinding": {
"description": "Object describing the policyBinding. Contains a hash, and an algorithm used. May also be a string, with just the hash. In that case default to HS256.",
"oneOf": [
{
"type": "string"
},{
"type": "object",
"properties": {
"alg": {
"description": "The policy binding algorithm used to generate the hash.",
"type": "string"
},
"hash": {
"description": "This contains a keyed hash that will provide cryptographic integrity on the policy object, such that it cannot be modified or copied to another TDF, without invalidating the binding. Specifically, you would have to have access to the key in order to overwrite the policy.",
"type": "string"
}
}
,"required": ["alg", "hash"]
}
]
},
"encryptedMetadata": {
"description": "Metadata associated with the TDF, and the request. The contents of the metadata are freeform, and are used to pass information from the client, and any plugins that may be in use by the KAS. The metadata stored here should not be used for primary access decisions. Base64.",
"type": "string"
}
}
},
"required": ["type", "url", "protocol", "wrappedKey","sid", "kid", "policyBinding"]
},
"method": {
"type": "object",
"properties": {
"algorithm": {
"description": "Algorithm used to encrypt the payload",
"type": "string"
},
"isStreamable": {
"description": "Designates whether or not the payload is streamable.",
"type": "boolean"
}
},
"required": ["algorithm", "isStreamable"]
},
"integrityInformation": {
"type": "object",
"properties": {
"rootSignature": {
"type": "object",
"properties": {
"alg": {
"description": "Algorithm used to generate the root signature of the payload",
"type": "string"
},
"sig": {
"description": "The payload signature",
"type": "string"
}
}
},
"segmentSizeDefault": {
"description": "Default size of a encryption segment",
"type": "number"
},
"segmentHashAlg": {
"description": "Algorithm used to generate segment hashes",
"type": "string"
},
"segments": {
"description": "An array of segment objects. Allows for the possibility of assuring integrity over file segments, in addition to the entire payload. Useful for streaming.",
"type": "array",
"items": {
"description": "Segment object. Contains information necessary to validate integrity over a specific byte range of a payload.",
"type": "object",
"properties": {
"hash": {
"description": "Generated hash using the segment hashing algorithm specified in the parent object.",
"type": "string"
},
"segmentSize": {
"description": "The size of the segment prior to its encryption. Optional field only specified if it differs from the 'segmentSizeDefault', specified above.",
"type": "number"
},
"encryptedSegmentSize": {
"description": "The size of the segment once it has been encrypted.",
"type": "number"
}
}
}
},
"encryptedSegmentSizeDefault": {
"description": "Default size of an encrypted segment. TODO: Is this necessary??",
"type": "number"
}
},
"required": ["rootSignature", "segmentSizeDefault", "segments", "encryptedSegmentSizeDefault"]
},
"policy": {
"description": "Base64 encoded policy object",
"type": "string"
}
}
},
"assertions": {
"type": "array",
"description": "An array of objects used to express metadata about the objects in the scope attribute of the assertion. An assertion also supports metadata about the assertion statement for the purposes of indicating any handling instructions pertinent to the statement itself. Also supports encrypted statements and binding the statement with objects in its scope.",
"items": {
"type": "object",
"description": "A single assertion",
"properties": {
"id": {
"description": "A unique local identifier used for binding and signing purposes. Not guaranteed to be unique across multiple TDOs but must be unique within a single instance.",
"type": "string"
},
"type": {
"description": "Describes the type of assertion ('handling' or 'other').",
"type": "string"
},
"scope": {
"description": "An enumeration of the object to which the assertion applies ('tdo' or 'payload').",
"type": "string"
},
"appliesToState": {
"description": "Used to indicate if the statement metadata applies to 'encrypted' or 'unencrypted' data.",
"type": "string"
},
"statement": {
"description": "Intended for access, rights, and/or handling instructions that apply to the scope of the assertion.",
"type": "object",
"properties": {
"format": {
"description": "Describes the payload content encoding format ('xml-structured', 'base64binary', 'string').",
"type": "string"
},
"value": {
"description": "Payload content encoded in the format specified.",
"type": ["string", "object"]
}
}
},
"binding": {
"description": "Object describing the assertionBinding. Contains a hash, and an algorithm used.",
"type": "object",
"properties": {
"method": {
"description": "The assertion binding method used encode the signature. Default is 'jws'",
"type": "string"
},
"signature": {
"description": "This contains a keyed hash that will provide cryptographic integrity on the assertion object, such that it cannot be modified or copied to another TDF, without invalidating the binding. Specifically, you would have to have access to the key in order to overwrite the policy.",
"type": "string"
}
},
"required": ["method", "signature"]
}
},
"required": ["id", "type", "scope", "appliesToState", "statement"]
}
}
},
"required": ["payload", "encryptionInformation"]
}
Loading

0 comments on commit 4750195

Please sign in to comment.