Skip to content

Commit

Permalink
feat: Added support for account level governance of AI Monitoring
Browse files Browse the repository at this point in the history
  • Loading branch information
jsumners-nr committed Jul 3, 2024
1 parent 3a910ef commit baa3798
Show file tree
Hide file tree
Showing 9 changed files with 69 additions and 14 deletions.
14 changes: 10 additions & 4 deletions lib/config/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,12 @@ Config.prototype._fromServer = function _fromServer(params, key) {
case 'high_security':
break

// interpret AI Monitoring account setting
case 'collect_ai':
this._disableOption(params.collect_ai, 'ai_monitoring')
this.emit('change', this)
break

// always accept these settings
case 'cross_process_id':
case 'encoding_key':
Expand Down Expand Up @@ -1071,10 +1077,10 @@ function setFromEnv({ config, key, envVar, formatter, paths }) {
/**
* Recursively visit the nodes of the config definition and look for environment variable names, overriding any configuration values that are found.
*
* @param {object} [config=this] The current level of the configuration object.
* @param {object} [data=configDefinition] The current level of the config definition object.
* @param {Array} [paths=[]] keeps track of the nested path to properly derive the env var
* @param {number} [objectKeys=1] indicator of how many keys exist in current node to know when to remove current node after all keys are processed
* @param {object} [config] The current level of the configuration object.
* @param {object} [data] The current level of the config definition object.
* @param {Array} [paths] keeps track of the nested path to properly derive the env var
* @param {number} [objectKeys] indicator of how many keys exist in current node to know when to remove current node after all keys are processed
*/
Config.prototype._fromEnvironment = function _fromEnvironment(
config = this,
Expand Down
25 changes: 23 additions & 2 deletions lib/instrumentation/aws-sdk/v3/bedrock.js
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,21 @@ function addLlmMeta({ agent, segment }) {
* @param {BedrockCommand} params.bedrockCommand parsed input
* @param {Error|null} params.err error from request if exists
* @param params.bedrockResponse
* @param params.shim
*/
function recordChatCompletionMessages({ agent, segment, bedrockCommand, bedrockResponse, err }) {
function recordChatCompletionMessages({
agent,
shim,
segment,
bedrockCommand,
bedrockResponse,
err
}) {
if (shouldSkipInstrumentation(agent.config) === true) {
shim.logger.debug('skipping sending of ai data')
return
}

const summary = new LlmChatCompletionSummary({
agent,
bedrockResponse,
Expand Down Expand Up @@ -133,12 +146,18 @@ function recordChatCompletionMessages({ agent, segment, bedrockCommand, bedrockR
*
* @param {object} params function params
* @param {object} params.agent instance of agent
* @param {object} params.shim current shim instance
* @param {object} params.segment active segment
* @param {BedrockCommand} params.bedrockCommand parsed input
* @param {Error|null} params.err error from request if exists
* @param params.bedrockResponse
*/
function recordEmbeddingMessage({ agent, segment, bedrockCommand, bedrockResponse, err }) {
function recordEmbeddingMessage({ agent, shim, segment, bedrockCommand, bedrockResponse, err }) {
if (shouldSkipInstrumentation(agent.config) === true) {
shim.logger.debug('skipping sending of ai data')
return
}

const embedding = new LlmEmbedding({
agent,
segment,
Expand Down Expand Up @@ -239,6 +258,7 @@ function handleResponse({ shim, err, response, segment, bedrockCommand, modelTyp
if (modelType === 'completion') {
recordChatCompletionMessages({
agent,
shim,
segment,
bedrockCommand,
bedrockResponse,
Expand All @@ -247,6 +267,7 @@ function handleResponse({ shim, err, response, segment, bedrockCommand, modelTyp
} else if (modelType === 'embedding') {
recordEmbeddingMessage({
agent,
shim,
segment,
bedrockCommand,
bedrockResponse,
Expand Down
8 changes: 7 additions & 1 deletion lib/instrumentation/langchain/common.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,17 @@ common.mergeMetadata = function mergeMetadata(localMeta = {}, paramsMeta = {}) {
*
* @param {object} params function params
* @param {Agent} params.agent NR agent
* @param {Shim} params.shim current shim instance
* @param {string} params.type type of llm event(i.e.- LlmChatCompletionMessage, LlmTool, etc)
* @param {object} params.msg the llm event getting enqueued
* @param {string} params.pkgVersion version of langchain library instrumented
*/
common.recordEvent = function recordEvent({ agent, type, msg, pkgVersion }) {
common.recordEvent = function recordEvent({ agent, shim, type, msg, pkgVersion }) {
if (common.shouldSkipInstrumentation(agent.config) === true) {
shim.logger.debug('skipping sending of ai data')
return
}

agent.metrics.getOrCreateMetric(`${LANGCHAIN.TRACKING_PREFIX}/${pkgVersion}`).incrementCallCount()
agent.customEventAggregator.add([{ type, timestamp: Date.now() }, msg])
}
Expand Down
4 changes: 3 additions & 1 deletion lib/instrumentation/langchain/runnable.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ module.exports = function initialize(shim, langchain) {

if (common.shouldSkipInstrumentation(agent.config)) {
shim.logger.debug(
'langchain instrumentation is disabled. To enable set `config.ai_monitoring.enabled` to true'
'langchain instrumentation is disabled. To enable set `config.ai_monitoring.enabled` to true'
)
return
}
Expand Down Expand Up @@ -198,6 +198,7 @@ function recordChatCompletionEvents({ segment, messages, events, metadata, tags,

common.recordEvent({
agent,
shim,
type: 'LlmChatCompletionSummary',
pkgVersion,
msg: completionSummary
Expand Down Expand Up @@ -266,6 +267,7 @@ function recordCompletions({ events, completionSummary, agent, segment, shim })

common.recordEvent({
agent,
shim,
type: 'LlmChatCompletionMessage',
pkgVersion: shim.pkgVersion,
msg: completionMsg
Expand Down
2 changes: 1 addition & 1 deletion lib/instrumentation/langchain/tools.js
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ module.exports = function initialize(shim, tools) {
segment,
error: err != null
})
recordEvent({ agent, type: 'LlmTool', pkgVersion, msg: toolEvent })
recordEvent({ agent, shim, type: 'LlmTool', pkgVersion, msg: toolEvent })

if (err) {
agent.errors.add(
Expand Down
9 changes: 6 additions & 3 deletions lib/instrumentation/langchain/vectorstore.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,13 @@ const LlmErrorMessage = require('../../llm-events/error-message')
* @param {number} params.k vector search top k
* @param {object} params.output vector search documents
* @param {Agent} params.agent NR agent instance
* @param {Shim} params.shim current shim instance
* @param {TraceSegment} params.segment active segment from vector search
* @param {string} params.pkgVersion langchain version
* @param {err} params.err if it exists
* @param params.shim
*/
function recordVectorSearch({ request, k, output, agent, segment, pkgVersion, err }) {
function recordVectorSearch({ request, k, output, agent, shim, segment, pkgVersion, err }) {
const vectorSearch = new LangChainVectorSearch({
agent,
segment,
Expand All @@ -37,7 +39,7 @@ function recordVectorSearch({ request, k, output, agent, segment, pkgVersion, er
error: err !== null
})

recordEvent({ agent, type: 'LlmVectorSearch', pkgVersion, msg: vectorSearch })
recordEvent({ agent, shim, type: 'LlmVectorSearch', pkgVersion, msg: vectorSearch })

output.forEach((document, sequence) => {
const vectorSearchResult = new LangChainVectorSearchResult({
Expand All @@ -51,6 +53,7 @@ function recordVectorSearch({ request, k, output, agent, segment, pkgVersion, er

recordEvent({
agent,
shim,
type: 'LlmVectorSearchResult',
pkgVersion,
msg: vectorSearchResult
Expand Down Expand Up @@ -97,7 +100,7 @@ module.exports = function initialize(shim, vectorstores) {
}

segment.end()
recordVectorSearch({ request, k, output, agent, segment, pkgVersion, err })
recordVectorSearch({ request, k, output, agent, shim, segment, pkgVersion, err })

segment.transaction.trace.attributes.addAttribute(DESTINATIONS.TRANS_EVENT, 'llm', true)
}
Expand Down
10 changes: 9 additions & 1 deletion lib/instrumentation/openai.js
Original file line number Diff line number Diff line change
Expand Up @@ -99,12 +99,18 @@ function addLlmMeta({ agent, segment }) {
*
* @param {object} params input params
* @param {Agent} params.agent NR agent instance
* @param {Shim} params.shim the current shim instance
* @param {TraceSegment} params.segment active segment from chat completion
* @param {object} params.request chat completion params
* @param {object} params.response chat completion response
* @param {boolean} [params.err] err if it exists
*/
function recordChatCompletionMessages({ agent, segment, request, response, err }) {
function recordChatCompletionMessages({ agent, shim, segment, request, response, err }) {
if (shouldSkipInstrumentation(agent.config, shim) === true) {
shim.logger.debug('skipping sending of ai data')
return
}

if (!response) {
// If we get an error, it is possible that `response = null`.
// In that case, we define it to be an empty object.
Expand Down Expand Up @@ -195,6 +201,7 @@ function instrumentStream({ agent, shim, request, response, segment }) {

recordChatCompletionMessages({
agent: shim.agent,
shim,
segment,
request,
response: chunk,
Expand Down Expand Up @@ -268,6 +275,7 @@ module.exports = function initialize(agent, openai, moduleName, shim) {
} else {
recordChatCompletionMessages({
agent,
shim,
segment,
request,
response,
Expand Down
9 changes: 9 additions & 0 deletions test/unit/config/config-server-side.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,15 @@ tap.test('when receiving server-side configuration', (t) => {
t.end()
})

t.test('should disable ai monitoring', (t) => {
// Default is `false`, but let's verify it anyway.
t.equal(config.ai_monitoring.enabled, false)
config.onConnect({ collect_ai: false })
t.equal(config.ai_monitoring.enabled, false)

t.end()
})

t.test('should configure cross application tracing', (t) => {
config.cross_application_tracer.enabled = true

Expand Down
2 changes: 1 addition & 1 deletion test/unit/instrumentation/langchain/runnables.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ test('langchain/core/runnables unit tests', (t) => {
t.equal(shim.logger.debug.callCount, 1, 'should log 1 debug messages')
t.equal(
shim.logger.debug.args[0][0],
'langchain instrumentation is disabled. To enable set `config.ai_monitoring.enabled` to true'
'langchain instrumentation is disabled. To enable set `config.ai_monitoring.enabled` to true'
)
const isWrapped = shim.isWrapped(MockRunnable.RunnableSequence.prototype.invoke)
t.equal(isWrapped, false, 'should not wrap runnable invoke')
Expand Down

0 comments on commit baa3798

Please sign in to comment.