Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[KATC] Backfill tests #1766

Merged
merged 27 commits into from
Jul 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
dda79df
Construct KATC tables
RebeccaMahany Jun 26, 2024
5530c17
Add support for deserializing structured_clone javascript objects
RebeccaMahany Jun 26, 2024
80b380b
Reorder function args
RebeccaMahany Jun 27, 2024
38c9e46
Rename type => source
RebeccaMahany Jun 27, 2024
90a930b
Fetch columns from query results
RebeccaMahany Jun 27, 2024
42ae269
Ensure path is included in results; reorder func args
RebeccaMahany Jun 27, 2024
4782383
Merge remote-tracking branch 'upstream/main' into becca/katc-construct
RebeccaMahany Jun 27, 2024
376714f
Merge remote-tracking branch 'upstream/main' into becca/katc-construct
RebeccaMahany Jun 28, 2024
15f7ea5
Read-only
RebeccaMahany Jun 28, 2024
f52843e
Transform entire row instead of individual data to properly unwrap in…
RebeccaMahany Jun 28, 2024
9868c16
Add source path constraint filtering so we don't run queries against …
RebeccaMahany Jun 28, 2024
9159366
Add test for constraint checks
RebeccaMahany Jun 28, 2024
1e32327
Rename function for brevity
RebeccaMahany Jun 28, 2024
af341ae
Add documentation
RebeccaMahany Jun 28, 2024
3d0c135
Add a table test
RebeccaMahany Jun 28, 2024
212b047
discard column log is way too noisy, remove it
RebeccaMahany Jun 28, 2024
f7c7eb8
Remove source type until implemented
RebeccaMahany Jun 28, 2024
3efe890
Rename to disambiguate source (type of table) and source (specific lo…
RebeccaMahany Jul 1, 2024
7fbdcb8
Rename structured clone to something more intuitive
RebeccaMahany Jul 1, 2024
97d8647
Fix dsn for sqlite
RebeccaMahany Jul 1, 2024
62e4061
Remove unneeded check
RebeccaMahany Jul 1, 2024
eda263e
Don't need unnecessary variable, return early if constraint not met
RebeccaMahany Jul 1, 2024
3019cf0
Support LIKE syntax for source rather than glob
RebeccaMahany Jul 1, 2024
17cebee
Add tests for deserializeFirefox
RebeccaMahany Jul 2, 2024
c72f9cc
Add test for snappyDecode
RebeccaMahany Jul 2, 2024
402e135
Add tests for sqliteData
RebeccaMahany Jul 2, 2024
f9a6f7a
Merge branch 'main' into becca/katc-construct-tests
RebeccaMahany Jul 3, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
233 changes: 233 additions & 0 deletions ee/katc/deserialize_firefox_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,233 @@
package katc

import (
"bytes"
"context"
"encoding/binary"
"encoding/json"
"io"
"testing"

"github.com/google/uuid"
"github.com/kolide/launcher/pkg/log/multislogger"
"github.com/stretchr/testify/require"
)

func Test_deserializeFirefox(t *testing.T) {
t.Parallel()

// Build expected object
u, err := uuid.NewRandom()
require.NoError(t, err, "generating test UUID")
idValue := u.String()
arrWithNestedObj := []string{"{\"id\":\"3\"}"}
nestedArrBytes, err := json.Marshal(arrWithNestedObj)
require.NoError(t, err)
expectedObj := map[string][]byte{
"id": []byte(idValue), // will exercise deserializeString
"version": []byte("1"), // will exercise int deserialization
"option": nil, // will exercise null/undefined deserialization
"types": nestedArrBytes, // will exercise deserializeArray, deserializeNestedObject
}

// Build a serialized object to deserialize
serializedObj := []byte{
// Header
0x00, 0x00, 0x00, 0x00, // header tag data -- discarded
0x00, 0x00, 0xf1, 0xff, // LE `tagHeader`
// Begin object
0x00, 0x00, 0x00, 0x00, // object tag data -- discarded
0x08, 0x00, 0xff, 0xff, // LE `tagObject`
// Begin `id` key
0x02, 0x00, 0x00, 0x80, // LE data about upcoming string: length 2 (remaining bytes), is ASCII
0x04, 0x00, 0xff, 0xff, // LE `tagString`
0x69, 0x64, // "id"
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // padding to get to 8-byte word boundary
// End `id` key
// Begin `id` value
0x24, 0x00, 0x00, 0x80, // LE data about upcoming string: length 36 (remaining bytes), is ASCII
0x04, 0x00, 0xff, 0xff, // LE `tagString`
}
// Append `id`
serializedObj = append(serializedObj, []byte(idValue)...)
// Append `id` padding, add `version`
serializedObj = append(serializedObj,
0x00, 0x00, 0x00, 0x00, // padding to get to 8-byte word boundary for `id` string
// End `id` value
// Begin `version` key
0x07, 0x00, 0x00, 0x80, // LE data about upcoming string: length 7 (remaining bytes), is ASCII
0x04, 0x00, 0xff, 0xff, // LE `tagString`
0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, // "version"
0x00, // padding to get to 8-byte word boundary
// End `version` key
// Begin `version` value
0x01, 0x00, 0x00, 0x00, // Value `1`
0x03, 0x00, 0xff, 0xff, // LE `tagInt32`
// End `version` value
// Begin `option` key
0x06, 0x00, 0x00, 0x80, // LE data about upcoming string: length 6 (remaining bytes), is ASCII
0x04, 0x00, 0xff, 0xff, // LE `tagString`
0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, // "option"
0x00, 0x00, // padding to get to 8-byte word boundary
// End `option` key
// Begin `option` value
0x00, 0x00, 0x00, 0x00, // Unused data, discarded
0x00, 0x00, 0xff, 0xff, // LE `tagNull`
// End `option` value
// Begin `types` key
0x05, 0x00, 0x00, 0x80, // LE data about upcoming string: length 5 (remaining bytes), is ASCII
0x04, 0x00, 0xff, 0xff, // LE `tagString`
0x74, 0x79, 0x70, 0x65, 0x73, // "types"
0x00, 0x00, 0x00, // padding to get to 8-byte word boundary
// End `types` key
// Begin `types` value
0x01, 0x00, 0x00, 0x00, // Array length (1)
0x07, 0x00, 0xff, 0xff, // LE `tagArrayObject`
// Begin first array item
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // An extra pair that gets discarded, I don't know why
0x00, 0x00, 0x00, 0x00, // Tag data, discarded
0x08, 0x00, 0xff, 0xff, // LE `tagObjectObject`
// Begin nested object
// Begin `id` key
0x02, 0x00, 0x00, 0x80, // LE data about upcoming string: length 2 (remaining bytes), is ASCII
0x04, 0x00, 0xff, 0xff, // LE `tagString`
0x69, 0x64, // "id"
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // padding to get to 8-byte word boundary
// End `id` key
// Begin `id` value
0x03, 0x00, 0x00, 0x00, // Value `3`
0x03, 0x00, 0xff, 0xff, // LE `tagInt32`
// End `id` value
// Object footer
0x00, 0x00, 0x00, 0x00, // tag data -- discarded
0x13, 0x00, 0xff, 0xff, // LE `tagEndOfKeys` 0xffff0013
// End nested object
// End first array item
// End `types` value
// Object footer
0x00, 0x00, 0x00, 0x00, // tag data -- discarded
0x13, 0x00, 0xff, 0xff, // LE `tagEndOfKeys`
)

results, err := deserializeFirefox(context.TODO(), multislogger.NewNopLogger(), map[string][]byte{
"data": serializedObj,
})
require.NoError(t, err, "expected to be able to deserialize object")

require.Equal(t, expectedObj, results)
}

func Test_deserializeFirefox_missingTopLevelDataKey(t *testing.T) {
t.Parallel()

_, err := deserializeFirefox(context.TODO(), multislogger.NewNopLogger(), map[string][]byte{
"not_a_data_key": nil,
})
require.Error(t, err, "expect deserializeFirefox requires top-level data key")
}

func Test_deserializeFirefox_malformedData(t *testing.T) {
t.Parallel()

for _, tt := range []struct {
testCaseName string
data []byte
}{
{
testCaseName: "missing header",
data: []byte{
0x00, 0x00, 0x00, 0x00, // header tag data -- discarded
0x00, 0x00, 0xff, 0xff, // LE `tagNull` (`tagHeader` expected instead)
},
},
{
testCaseName: "missing top-level object",
data: []byte{
// Header
0x00, 0x00, 0x00, 0x00, // header tag data -- discarded
0x00, 0x00, 0xf1, 0xff, // LE `tagHeader`
// End header
0x00, 0x00, 0x00, 0x00, // data about tag, not used
0x04, 0x00, 0xff, 0xff, // LE `tagString` (`tagObject` expected instead)
},
},
} {
tt := tt
t.Run(tt.testCaseName, func(t *testing.T) {
t.Parallel()

_, err := deserializeFirefox(context.TODO(), multislogger.NewNopLogger(), map[string][]byte{
"data": tt.data,
})
require.Error(t, err, "expect deserializeFirefox rejects malformed data")
})
}
}

// Test_deserializeString tests that deserializeString can handle both ASCII and UTF-16 strings
func Test_deserializeString(t *testing.T) {
t.Parallel()

for _, tt := range []struct {
testCaseName string
expected []byte
stringData []byte
stringBytes []byte
}{
{
testCaseName: "ascii",
expected: []byte("createdAt"),
stringData: []byte{
0x09, 0x00, 0x00, 0x80, // LE data about upcoming string: length 9 (remaining bytes), is ASCII (true)
},
stringBytes: []byte{
0x63, // c
0x72, // r
0x65, // e
0x61, // a
0x74, // t
0x65, // e
0x64, // d
0x41, // A
0x74, // t
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // padding to get to 8-byte word boundary
},
},
{
testCaseName: "utf-16",
expected: []byte("🏆"),
stringData: []byte{
0x02, 0x00, 0x00, 0x00, // LE data about upcoming string: length 2 (remaining bytes), is ASCII (false)
},
stringBytes: []byte{
0x3c, 0xd8, 0xc6, 0xdf, // emoji: UTF-16 LE
0x00, 0x00, 0x00, 0x00, // padding to get to 8-byte word boundary
},
},
} {
tt := tt
t.Run(tt.testCaseName, func(t *testing.T) {
t.Parallel()

stringDataInt := binary.LittleEndian.Uint32(tt.stringData)
stringReader := bytes.NewReader(tt.stringBytes)

resultBytes, err := deserializeString(stringDataInt, stringReader)
require.NoError(t, err)

require.Equal(t, tt.expected, resultBytes)

// Confirm we read all the padding in as well
_, err = stringReader.ReadByte()
require.Error(t, err)
require.ErrorIs(t, err, io.EOF)
})
}
}

func Test_bitMask(t *testing.T) {
t.Parallel()

var expected uint32 = 0b01111111111111111111111111111111
require.Equal(t, expected, bitMask(31))
}
30 changes: 30 additions & 0 deletions ee/katc/snappy_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package katc

import (
"context"
"testing"

"github.com/golang/snappy"
"github.com/kolide/launcher/pkg/log/multislogger"
"github.com/stretchr/testify/require"
)

func Test_snappyDecode(t *testing.T) {
t.Parallel()

expectedRow := map[string][]byte{
"some_key_a": []byte("some_value_a"),
"some_key_b": []byte("some_value_b"),
}

encodedRow := map[string][]byte{
"some_key_a": snappy.Encode(nil, expectedRow["some_key_a"]),
"some_key_b": snappy.Encode(nil, expectedRow["some_key_b"]),
}

results, err := snappyDecode(context.TODO(), multislogger.NewNopLogger(), encodedRow)
require.NoError(t, err)

// Validate that the keys are unchanged, and that the data was correctly decoded
require.Equal(t, expectedRow, results)
}
Loading
Loading