Skip to content

Commit

Permalink
Merge branch 'master' into feature-ecs-1.8
Browse files Browse the repository at this point in the history
  • Loading branch information
adriansr committed Feb 2, 2021
2 parents 1495059 + db17a9c commit 358941d
Show file tree
Hide file tree
Showing 40 changed files with 1,688 additions and 160 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,8 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Simplify regex for organization custom prefix in AWS/CloudTrail fileset. {issue}23203[23203] {pull}23204[23204]
- Fix syslog header parsing in infoblox module. {issue}23272[23272] {pull}23273[23273]
- Fix concurrent modification exception in Suricata ingest node pipeline. {pull}23534[23534]
- Fix Zoom module parameters for basic auth and url path. {pull}23779[23779]
- Fix handling of ModifiedProperties field in Office 365. {pull}23777[23777]

*Heartbeat*

Expand Down Expand Up @@ -619,6 +621,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Add ECS categorization info for auditd module {pull}18596[18596]
- Add several improvements for auditd module for improved ECS field mapping {pull}22647[22647]
- Add ECS 1.7 `configuration` categorization in certain events in auditd module. {pull}23000[23000]
- Improve file_integrity monitoring when a file is created/deleted in quick succession. {issue}17347[17347] {pull}22170[22170]
- system/host: Add new ECS 1.8 field `os.type` in `host.os.type`. {pull}23513[23513]

*Filebeat*
Expand Down Expand Up @@ -831,6 +834,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Added `encode_as` and `decode_as` options to httpjson along with pluggable encoders/decoders {pull}23478[23478]
- Added `application/x-ndjson` as decode option for httpjson input {pull}23521[23521]
- Added `application/x-www-form-urlencoded` as encode option for httpjson input {pull}23521[23521]
- Added RFC6587 framing option for tcp and unix inputs {issue}23663[23663] {pull}23724[23724]

*Heartbeat*

Expand Down
6 changes: 5 additions & 1 deletion auditbeat/module/file_integrity/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package file_integrity

import (
"math"
"path/filepath"
"sort"
"strings"
Expand All @@ -29,6 +30,9 @@ import (
"github.com/elastic/beats/v7/libbeat/common/match"
)

// MaxValidFileSizeLimit is the largest possible value for `max_file_size`.
const MaxValidFileSizeLimit = math.MaxInt64 - 1

// HashType identifies a cryptographic algorithm.
type HashType string

Expand Down Expand Up @@ -110,7 +114,7 @@ nextHash:
}

c.MaxFileSizeBytes, err = humanize.ParseBytes(c.MaxFileSize)
if err != nil {
if err != nil || c.MaxFileSizeBytes > MaxValidFileSizeLimit {
errs = append(errs, errors.Wrap(err, "invalid max_file_size value"))
} else if c.MaxFileSizeBytes <= 0 {
errs = append(errs, errors.Errorf("max_file_size value (%v) must be positive", c.MaxFileSize))
Expand Down
57 changes: 44 additions & 13 deletions auditbeat/module/file_integrity/event.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import (
"fmt"
"hash"
"io"
"math"
"os"
"path/filepath"
"runtime"
Expand Down Expand Up @@ -119,8 +120,9 @@ type Event struct {
Hashes map[HashType]Digest `json:"hash,omitempty"` // File hashes.

// Metadata
rtt time.Duration // Time taken to collect the info.
errors []error // Errors that occurred while collecting the info.
rtt time.Duration // Time taken to collect the info.
errors []error // Errors that occurred while collecting the info.
hashFailed bool // Set when hashing the file failed.
}

// Metadata contains file metadata.
Expand Down Expand Up @@ -183,11 +185,16 @@ func NewEventFromFileInfo(
switch event.Info.Type {
case FileType:
if event.Info.Size <= maxFileSize {
hashes, err := hashFile(event.Path, hashTypes...)
hashes, nbytes, err := hashFile(event.Path, maxFileSize, hashTypes...)
if err != nil {
event.errors = append(event.errors, err)
} else {
event.hashFailed = true
} else if hashes != nil {
// hashFile returns nil hashes and no error when:
// - There's no hashes configured.
// - File size at the time of hashing is larger than configured limit.
event.Hashes = hashes
event.Info.Size = nbytes
}
}
case SymlinkType:
Expand Down Expand Up @@ -319,6 +326,17 @@ func buildMetricbeatEvent(e *Event, existedBefore bool) mb.Event {
out.MetricSetFields.Put("event.type", None.ECSTypes())
}

if n := len(e.errors); n > 0 {
errors := make([]string, n)
for idx, err := range e.errors {
errors[idx] = err.Error()
}
if n == 1 {
out.MetricSetFields.Put("error.message", errors[0])
} else {
out.MetricSetFields.Put("error.message", errors)
}
}
return out
}

Expand All @@ -327,7 +345,7 @@ func buildMetricbeatEvent(e *Event, existedBefore bool) mb.Event {
// contains a superset of new's hashes then false is returned.
func diffEvents(old, new *Event) (Action, bool) {
if old == new {
return 0, false
return None, false
}

if old == nil && new != nil {
Expand Down Expand Up @@ -389,9 +407,9 @@ func diffEvents(old, new *Event) (Action, bool) {
return result, result != None
}

func hashFile(name string, hashType ...HashType) (map[HashType]Digest, error) {
func hashFile(name string, maxSize uint64, hashType ...HashType) (nameToHash map[HashType]Digest, nbytes uint64, err error) {
if len(hashType) == 0 {
return nil, nil
return nil, 0, nil
}

var hashes []hash.Hash
Expand Down Expand Up @@ -433,27 +451,40 @@ func hashFile(name string, hashType ...HashType) (map[HashType]Digest, error) {
case XXH64:
hashes = append(hashes, xxhash.New())
default:
return nil, errors.Errorf("unknown hash type '%v'", name)
return nil, 0, errors.Errorf("unknown hash type '%v'", name)
}
}

f, err := file.ReadOpen(name)
if err != nil {
return nil, errors.Wrap(err, "failed to open file for hashing")
return nil, 0, errors.Wrap(err, "failed to open file for hashing")
}
defer f.Close()

hashWriter := multiWriter(hashes)
if _, err := io.Copy(hashWriter, f); err != nil {
return nil, errors.Wrap(err, "failed to calculate file hashes")
// Make sure it hashes up to the limit in case the file is growing
// since its size was checked.
validSizeLimit := maxSize < math.MaxInt64-1
var r io.Reader = f
if validSizeLimit {
r = io.LimitReader(r, int64(maxSize+1))
}
written, err := io.Copy(hashWriter, r)
if err != nil {
return nil, 0, errors.Wrap(err, "failed to calculate file hashes")
}

// The file grew larger than configured limit.
if validSizeLimit && written > int64(maxSize) {
return nil, 0, nil
}

nameToHash := make(map[HashType]Digest, len(hashes))
nameToHash = make(map[HashType]Digest, len(hashes))
for i, h := range hashes {
nameToHash[hashType[i]] = h.Sum(nil)
}

return nameToHash, nil
return nameToHash, uint64(written), nil
}

func multiWriter(hash []hash.Hash) io.Writer {
Expand Down
127 changes: 109 additions & 18 deletions auditbeat/module/file_integrity/event_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import (
"encoding/hex"
"fmt"
"io/ioutil"
"math"
"os"
"runtime"
"testing"
Expand Down Expand Up @@ -172,6 +173,18 @@ func TestDiffEvents(t *testing.T) {
}

func TestHashFile(t *testing.T) {
f, err := ioutil.TempFile("", "input.txt")
if err != nil {
t.Fatal(err)
}
defer os.Remove(f.Name())

const data = "hello world!\n"
const dataLen = uint64(len(data))
f.WriteString(data)
f.Sync()
f.Close()

t.Run("valid hashes", func(t *testing.T) {
// Computed externally.
expectedHashes := map[HashType]Digest{
Expand All @@ -193,21 +206,11 @@ func TestHashFile(t *testing.T) {
XXH64: mustDecodeHex("d3e8573b7abf279a"),
}

f, err := ioutil.TempFile("", "input.txt")
hashes, size, err := hashFile(f.Name(), dataLen, validHashes...)
if err != nil {
t.Fatal(err)
}
defer os.Remove(f.Name())

f.WriteString("hello world!\n")
f.Sync()
f.Close()

hashes, err := hashFile(f.Name(), validHashes...)
if err != nil {
t.Fatal(err)
}

assert.Equal(t, dataLen, size)
for _, hashType := range validHashes {
if hash, found := hashes[hashType]; !found {
t.Errorf("%v not found", hashType)
Expand All @@ -228,21 +231,107 @@ func TestHashFile(t *testing.T) {
})

t.Run("no hashes", func(t *testing.T) {
hashes, err := hashFile("anyfile.txt")
hashes, size, err := hashFile("anyfile.txt", 1234)
assert.Nil(t, hashes)
assert.NoError(t, err)
assert.Zero(t, size)
})

t.Run("invalid hash", func(t *testing.T) {
hashes, err := hashFile("anyfile.txt", "md4")
hashes, size, err := hashFile("anyfile.txt", 1234, "md4")
assert.Nil(t, hashes)
assert.Error(t, err)
assert.Zero(t, size)
})

t.Run("invalid file", func(t *testing.T) {
hashes, err := hashFile("anyfile.txt", "md5")
hashes, size, err := hashFile("anyfile.txt", 1234, "md5")
assert.Nil(t, hashes)
assert.Error(t, err)
assert.Zero(t, size)
})

t.Run("size over hash limit", func(t *testing.T) {
hashes, size, err := hashFile(f.Name(), dataLen-1, SHA1)
assert.Nil(t, hashes)
assert.Zero(t, size)
assert.NoError(t, err)
})
t.Run("size at hash limit", func(t *testing.T) {
hashes, size, err := hashFile(f.Name(), dataLen, SHA1)
assert.NotNil(t, hashes)
assert.Equal(t, dataLen, size)
assert.NoError(t, err)
})
t.Run("size below hash limit", func(t *testing.T) {
hashes, size, err := hashFile(f.Name(), dataLen+1, SHA1)
assert.NotNil(t, hashes)
assert.Equal(t, dataLen, size)
assert.NoError(t, err)
})
t.Run("no size limit", func(t *testing.T) {
hashes, size, err := hashFile(f.Name(), math.MaxInt64, SHA1)
assert.NotNil(t, hashes)
assert.Equal(t, dataLen, size)
assert.NoError(t, err)
})
}

func TestNewEventFromFileInfoHash(t *testing.T) {
f, err := ioutil.TempFile("", "input.txt")
if err != nil {
t.Fatal(err)
}
defer os.Remove(f.Name())

const data = "hello world!\n"
const dataLen = uint64(len(data))
f.WriteString(data)
f.Sync()
defer f.Close()

info, err := os.Stat(f.Name())
if err != nil {
t.Fatal(err)
}

t.Run("file stays the same", func(t *testing.T) {
ev := NewEventFromFileInfo(f.Name(), info, nil, Updated, SourceFSNotify, MaxValidFileSizeLimit, []HashType{SHA1})
if !assert.NotNil(t, ev) {
t.Fatal("nil event")
}
assert.Equal(t, dataLen, ev.Info.Size)
assert.NotNil(t, ev.Hashes)
digest := Digest(mustDecodeHex("f951b101989b2c3b7471710b4e78fc4dbdfa0ca6"))
assert.Equal(t, digest, ev.Hashes[SHA1])
})
t.Run("file grows before hashing", func(t *testing.T) {
f.WriteString(data)
f.Sync()
ev := NewEventFromFileInfo(f.Name(), info, nil, Updated, SourceFSNotify, MaxValidFileSizeLimit, []HashType{SHA1})
if !assert.NotNil(t, ev) {
t.Fatal("nil event")
}
assert.Equal(t, dataLen*2, ev.Info.Size)
assert.NotNil(t, ev.Hashes)
digest := Digest(mustDecodeHex("62e8a0ef77ed7596347a065cae28a860f87e382f"))
assert.Equal(t, digest, ev.Hashes[SHA1])
})
t.Run("file shrinks before hashing", func(t *testing.T) {
err = f.Truncate(0)
if !assert.NoError(t, err) {
t.Fatal(err)
}
f.Sync()
assert.NoError(t, err)
ev := NewEventFromFileInfo(f.Name(), info, nil, Updated, SourceFSNotify, MaxValidFileSizeLimit, []HashType{SHA1})
if !assert.NotNil(t, ev) {
t.Fatal("nil event")
}
assert.Zero(t, ev.Info.Size)
assert.NotNil(t, ev.Hashes)
digest := Digest(mustDecodeHex("da39a3ee5e6b4b0d3255bfef95601890afd80709"))
assert.Equal(t, digest, ev.Hashes[SHA1])
})
}

Expand All @@ -254,24 +343,26 @@ func BenchmarkHashFile(b *testing.B) {
defer os.Remove(f.Name())

zeros := make([]byte, 100)
iterations := 1024 * 1024 // 100 MiB
const iterations = 1024 * 1024 // 100 MiB
for i := 0; i < iterations; i++ {
if _, err = f.Write(zeros); err != nil {
b.Fatal(err)
}
}
b.Logf("file size: %v bytes", len(zeros)*iterations)
size := uint64(iterations * len(zeros))
b.Logf("file size: %v bytes", size)
f.Sync()
f.Close()
b.ResetTimer()

for _, hashType := range validHashes {
b.Run(string(hashType), func(b *testing.B) {
for i := 0; i < b.N; i++ {
_, err = hashFile(f.Name(), hashType)
_, nbytes, err := hashFile(f.Name(), size+1, hashType)
if err != nil {
b.Fatal(err)
}
assert.Equal(b, size, nbytes)
}
})
}
Expand Down
Loading

0 comments on commit 358941d

Please sign in to comment.