Skip to content

Commit

Permalink
Unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
nhulston committed Nov 11, 2024
1 parent d065596 commit 7cba2af
Show file tree
Hide file tree
Showing 2 changed files with 146 additions and 6 deletions.
90 changes: 84 additions & 6 deletions packages/datadog-plugin-aws-sdk/test/s3.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const agent = require('../../dd-trace/test/plugins/agent')
const { setup } = require('./spec_helpers')
const axios = require('axios')
const { rawExpectedSchema } = require('./s3-naming')
const { SPAN_LINK_KIND, S3_PTR_KIND, SPAN_POINTER_DIRECTION } = require('../../dd-trace/src/span_pointers')

const bucketName = 's3-bucket-name-test'

Expand Down Expand Up @@ -34,19 +35,39 @@ describe('Plugin', () => {
return agent.load('aws-sdk')
})

before(done => {
beforeEach(done => {
AWS = require(`../../../versions/${s3ClientName}@${version}`).get()

s3 = new AWS.S3({ endpoint: 'http://127.0.0.1:4567', s3ForcePathStyle: true, region: 'us-east-1' })
s3 = new AWS.S3({ endpoint: 'http://127.0.0.1:4566', s3ForcePathStyle: true, region: 'us-east-1' })
// Fix for LocationConstraint issue - only for SDK v2
if (s3ClientName === 'aws-sdk') {
s3.api.globalEndpoint = '127.0.0.1'
}
s3.createBucket({ Bucket: bucketName }, (err) => {
if (err) return done(err)
done()

// Create a test object that we can use for operations
s3.putObject({
Bucket: bucketName,
Key: 'test-key',
Body: 'test body'
}, (err) => {
if (err) return done(err)
done()
})
})
})

after(done => {
s3.deleteBucket({ Bucket: bucketName }, () => {
done()
afterEach(done => {
s3.deleteObject({
Bucket: bucketName,
Key: 'test-key'
}, (err) => {
if (err) console.error('Error deleting test object:', err)

s3.deleteBucket({ Bucket: bucketName }, () => {
done()
})
})
})

Expand Down Expand Up @@ -74,6 +95,63 @@ describe('Plugin', () => {
rawExpectedSchema.outbound
)

describe('span pointers', () => {
it('should add span pointer for putObject operation', (done) => {
agent.use(traces => {
const span = traces[0][0]
const links = span.links || []

expect(links).to.have.lengthOf(1)
expect(links[0].attributes).to.deep.equal({
'ptr.kind': S3_PTR_KIND,
'ptr.dir': SPAN_POINTER_DIRECTION.DOWNSTREAM,
'link.kind': SPAN_LINK_KIND,
'ptr.hash': 'abc'
})
})

s3.putObject({
Bucket: bucketName,
Key: 'new-test-key',
Body: 'test body'
}, done)
})

it('should add span pointer for copyObject operation', (done) => {
agent.use(traces => {
const span = traces[0][0]
const links = span.links || []

expect(links).to.have.lengthOf(1)
expect(links[0].attributes).to.deep.equal({
'ptr.kind': S3_PTR_KIND,
'ptr.dir': SPAN_POINTER_DIRECTION.DOWNSTREAM,
'link.kind': SPAN_LINK_KIND,
'ptr.hash': 'abc'
})
})

s3.copyObject({
Bucket: bucketName,
Key: 'copy-key',
CopySource: `${bucketName}/test-key`
}, done)
})

it('should not add span pointer for getObject operation', (done) => {
agent.use(traces => {
const span = traces[0][0]
const links = span.links || []
expect(links).to.have.lengthOf(0)
})

s3.getObject({
Bucket: bucketName,
Key: 'test-key'
}, done)
})
})

it('should allow disabling a specific span kind of a service', (done) => {
let total = 0

Expand Down
62 changes: 62 additions & 0 deletions packages/dd-trace/test/span_pointers.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
'use strict'

require('./setup/tap')

const {
ZERO_CONTEXT,
SPAN_LINK_KIND,
S3_PTR_KIND,
SPAN_POINTER_DIRECTION,
generateS3PointerHash
} = require('../../dd-trace/src/span_pointers')

describe('span_pointers', () => {
describe('ZERO_CONTEXT', () => {
it('should have trace and span IDs of 0', () => {
expect(ZERO_CONTEXT.toTraceId()).to.equal('0')
expect(ZERO_CONTEXT.toSpanId()).to.equal('0')
})
})

// datadog-lambda-js imports these and will error if they are not found (moved or renamed)
describe('constants', () => {
it('should export the correct constant values', () => {
expect(SPAN_LINK_KIND).to.equal('span-pointer')
expect(S3_PTR_KIND).to.equal('aws.s3.object')
expect(SPAN_POINTER_DIRECTION.UPSTREAM).to.equal('u')
expect(SPAN_POINTER_DIRECTION.DOWNSTREAM).to.equal('d')
})
})

describe('generateS3PointerHash', () => {
it('should generate a valid hash for a basic S3 object', () => {
const hash = generateS3PointerHash('some-bucket', 'some-key.data', 'ab12ef34')
expect(hash).to.equal('e721375466d4116ab551213fdea08413')
})

it('should generate a valid hash for an S3 object with a non-ascii key', () => {
const hash1 = generateS3PointerHash('some-bucket', 'some-key.你好', 'ab12ef34')
expect(hash1).to.equal('d1333a04b9928ab462b5c6cadfa401f4')
})

it('should generate a valid hash for multipart-uploaded S3 object', () => {
const hash1 = generateS3PointerHash('some-bucket', 'some-key.data', 'ab12ef34-5')
expect(hash1).to.equal('2b90dffc37ebc7bc610152c3dc72af9f')
})

it('should handle quoted ETags', () => {
const hash1 = generateS3PointerHash('bucket', 'key', 'etag')
const hash2 = generateS3PointerHash('bucket', 'key', '"etag"')
expect(hash1).to.equal(hash2)
})

it('should return null if any required parameter is missing', () => {
expect(generateS3PointerHash(null, 'key', 'etag')).to.be.null
expect(generateS3PointerHash('bucket', null, 'etag')).to.be.null
expect(generateS3PointerHash('bucket', 'key', null)).to.be.null
expect(generateS3PointerHash('', 'key', 'etag')).to.be.null
expect(generateS3PointerHash('bucket', '', 'etag')).to.be.null
expect(generateS3PointerHash('bucket', 'key', '')).to.be.null
})
})
})

0 comments on commit 7cba2af

Please sign in to comment.