Skip to content

Commit

Permalink
google-genai[minor]: Add support for json output (langchain-ai#6519)
Browse files Browse the repository at this point in the history
* support json output for google-genai

* Apply suggestions from code review

* Add test

---------

Co-authored-by: juji <him@jujiyangasli.com>
Co-authored-by: Brace Sproul <braceasproul@gmail.com>
Co-authored-by: jacoblee93 <jacoblee93@gmail.com>
  • Loading branch information
4 people authored and CarterMorris committed Nov 10, 2024
1 parent bbf5062 commit 5e96b85
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 0 deletions.
10 changes: 10 additions & 0 deletions libs/langchain-google-genai/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ export interface GoogleGenerativeAIChatInput
extends BaseChatModelParams,
Pick<GoogleGenerativeAIChatCallOptions, "streamUsage"> {
/**
* @deprecated Use "model" instead.
*
* Model Name to use
*
* Alias for `model`
Expand Down Expand Up @@ -167,6 +169,13 @@ export interface GoogleGenerativeAIChatInput

/** Whether to stream the results or not */
streaming?: boolean;

/**
* Whether or not to force the model to respond with JSON.
* Available for `gemini-1.5` models and later.
* @default false
*/
json?: boolean;
}

/**
Expand Down Expand Up @@ -321,6 +330,7 @@ export class ChatGoogleGenerativeAI
temperature: this.temperature,
topP: this.topP,
topK: this.topK,
...(fields?.json ? { responseMimeType: "application/json" } : {}),
},
},
{
Expand Down
19 changes: 19 additions & 0 deletions libs/langchain-google-genai/src/tests/chat_models.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -519,3 +519,22 @@ test("Invoke token count usage_metadata", async () => {
res.usage_metadata.input_tokens + res.usage_metadata.output_tokens
);
});

test("Invoke with JSON mode", async () => {
const model = new ChatGoogleGenerativeAI({
model: "gemini-1.5-flash",
temperature: 0,
maxOutputTokens: 10,
json: true,
});
const res = await model.invoke("Why is the sky blue? Be concise.");
expect(res?.usage_metadata).toBeDefined();
if (!res?.usage_metadata) {
return;
}
expect(res.usage_metadata.input_tokens).toBeGreaterThan(1);
expect(res.usage_metadata.output_tokens).toBeGreaterThan(1);
expect(res.usage_metadata.total_tokens).toBe(
res.usage_metadata.input_tokens + res.usage_metadata.output_tokens
);
});

0 comments on commit 5e96b85

Please sign in to comment.