Skip to content

Commit

Permalink
fix(anthropic): Add cache usage metadata to Anthropic streaming respo…
Browse files Browse the repository at this point in the history
…nses (#7368)
  • Loading branch information
jacoblee93 authored Dec 15, 2024
1 parent 2cdf57c commit f05af16
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 3 deletions.
10 changes: 10 additions & 0 deletions libs/langchain-anthropic/src/tests/chat_models.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -721,6 +721,16 @@ test("system prompt caching", async () => {
expect(res2.response_metadata.usage.cache_read_input_tokens).toBeGreaterThan(
0
);
const stream = await model.stream(messages);
let agg;
for await (const chunk of stream) {
agg = agg === undefined ? chunk : concat(agg, chunk);
}
expect(agg).toBeDefined();
expect(agg!.response_metadata.usage.cache_creation_input_tokens).toBe(0);
expect(agg!.response_metadata.usage.cache_read_input_tokens).toBeGreaterThan(
0
);
});

// TODO: Add proper test with long tool content
Expand Down
12 changes: 9 additions & 3 deletions libs/langchain-anthropic/src/utils/message_outputs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,22 @@ export function _makeMessageChunkFromAnthropicEvent(
filteredAdditionalKwargs[key] = value;
}
}
const { input_tokens, output_tokens, ...rest } = usage ?? {};
const usageMetadata: UsageMetadata = {
input_tokens: usage.input_tokens,
output_tokens: usage.output_tokens,
total_tokens: usage.input_tokens + usage.output_tokens,
input_tokens,
output_tokens,
total_tokens: input_tokens + output_tokens,
};
return {
chunk: new AIMessageChunk({
content: fields.coerceContentToString ? "" : [],
additional_kwargs: filteredAdditionalKwargs,
usage_metadata: fields.streamUsage ? usageMetadata : undefined,
response_metadata: {
usage: {
...rest,
},
},
id: data.message.id,
}),
};
Expand Down

0 comments on commit f05af16

Please sign in to comment.