From a85f662a035514609a129a3b5fd71a218636d057 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Tue, 24 Sep 2024 14:16:32 -0700 Subject: [PATCH 1/4] Add payload formatting utils to Anthropic and OpenAI --- libs/langchain-anthropic/src/index.ts | 2 +- .../src/tests/chat_models.test.ts | 2 +- .../src/tests/prompts.int.test.ts | 25 +++++++++ libs/langchain-anthropic/src/utils/prompts.ts | 51 +++++++++++++++++++ libs/langchain-openai/src/index.ts | 1 + .../src/tests/prompts.int.test.ts | 22 ++++++++ libs/langchain-openai/src/utils/prompts.ts | 47 +++++++++++++++++ 7 files changed, 148 insertions(+), 2 deletions(-) create mode 100644 libs/langchain-anthropic/src/tests/prompts.int.test.ts create mode 100644 libs/langchain-anthropic/src/utils/prompts.ts create mode 100644 libs/langchain-openai/src/tests/prompts.int.test.ts create mode 100644 libs/langchain-openai/src/utils/prompts.ts diff --git a/libs/langchain-anthropic/src/index.ts b/libs/langchain-anthropic/src/index.ts index b45ffe6fb26b..f6dcb46020ce 100644 --- a/libs/langchain-anthropic/src/index.ts +++ b/libs/langchain-anthropic/src/index.ts @@ -1,2 +1,2 @@ export * from "./chat_models.js"; -export { _convertMessagesToAnthropicPayload } from "./utils/message_inputs.js"; +export { convertPromptToAnthropic } from "./utils/prompts.js"; diff --git a/libs/langchain-anthropic/src/tests/chat_models.test.ts b/libs/langchain-anthropic/src/tests/chat_models.test.ts index 68644d453187..e605337c2d8e 100644 --- a/libs/langchain-anthropic/src/tests/chat_models.test.ts +++ b/libs/langchain-anthropic/src/tests/chat_models.test.ts @@ -3,7 +3,7 @@ import { AIMessage, HumanMessage, ToolMessage } from "@langchain/core/messages"; import { z } from "zod"; import { OutputParserException } from "@langchain/core/output_parsers"; import { ChatAnthropic } from "../chat_models.js"; -import { _convertMessagesToAnthropicPayload } from "../index.js"; +import { _convertMessagesToAnthropicPayload } from "../utils/message_inputs.js"; test("withStructuredOutput with output validation", async () => { const model = new ChatAnthropic({ diff --git a/libs/langchain-anthropic/src/tests/prompts.int.test.ts b/libs/langchain-anthropic/src/tests/prompts.int.test.ts new file mode 100644 index 000000000000..83aec1095456 --- /dev/null +++ b/libs/langchain-anthropic/src/tests/prompts.int.test.ts @@ -0,0 +1,25 @@ +import Anthropic from "@anthropic-ai/sdk"; +import { pull } from "langchain/hub"; + +import { convertPromptToAnthropic } from "../utils/prompts.js"; + +test("basic traceable implementation", async () => { + const prompt = await pull("jacob/joke-generator"); + const formattedPrompt = await prompt.invoke({ + topic: "cats", + }); + + const { system, messages } = convertPromptToAnthropic(formattedPrompt); + + const anthropicClient = new Anthropic(); + + const anthropicResponse = await anthropicClient.messages.create({ + model: "claude-3-haiku-20240307", + system, + messages: messages, + max_tokens: 1024, + stream: false, + }); + + expect(anthropicResponse.content).toBeDefined(); +}); diff --git a/libs/langchain-anthropic/src/utils/prompts.ts b/libs/langchain-anthropic/src/utils/prompts.ts new file mode 100644 index 000000000000..7fb32b5903c2 --- /dev/null +++ b/libs/langchain-anthropic/src/utils/prompts.ts @@ -0,0 +1,51 @@ +import type { BasePromptValue } from "@langchain/core/prompt_values"; +import Anthropic from "@anthropic-ai/sdk"; + +import { _convertMessagesToAnthropicPayload } from "./message_inputs.js"; + +/** + * Convert a formatted LangChain prompt (e.g. pulled from the hub) into + * a format expected by Anthropic's JS SDK. + * + * Requires the "@langchain/anthropic" package to be installed in addition + * to the Anthropic SDK. + * + * @example + * ```ts + * import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic"; + * import { pull } from "langchain/hub"; + * + * import Anthropic from '@anthropic-ai/sdk'; + * + * const prompt = await pull("jacob/joke-generator"); + * const formattedPrompt = await prompt.invoke({ + * topic: "cats", + * }); + * + * const { system, messages } = convertPromptToAnthropic(formattedPrompt); + * + * const anthropicClient = new Anthropic({ + * apiKey: 'your_api_key', + * }); + * + * const anthropicResponse = await anthropicClient.messages.create({ + * model: "claude-3-5-sonnet-20240620", + * max_tokens: 1024, + * stream: false, + * system, + * messages, + * }); + * ``` + * @param formattedPrompt + * @returns A partial Anthropic payload. + */ +export function convertPromptToAnthropic( + formattedPrompt: BasePromptValue +): Anthropic.Messages.MessageCreateParams { + const messages = formattedPrompt.toChatMessages(); + const anthropicBody = _convertMessagesToAnthropicPayload(messages); + if (anthropicBody.messages === undefined) { + anthropicBody.messages = []; + } + return anthropicBody; +} diff --git a/libs/langchain-openai/src/index.ts b/libs/langchain-openai/src/index.ts index 76e8df482138..c18e60f26a0a 100644 --- a/libs/langchain-openai/src/index.ts +++ b/libs/langchain-openai/src/index.ts @@ -9,3 +9,4 @@ export * from "./types.js"; export * from "./utils/openai.js"; export * from "./utils/azure.js"; export * from "./tools/index.js"; +export { convertPromptToOpenAI } from "./utils/prompts.js"; diff --git a/libs/langchain-openai/src/tests/prompts.int.test.ts b/libs/langchain-openai/src/tests/prompts.int.test.ts new file mode 100644 index 000000000000..8d900388a302 --- /dev/null +++ b/libs/langchain-openai/src/tests/prompts.int.test.ts @@ -0,0 +1,22 @@ +import OpenAI from "openai"; +import { pull } from "langchain/hub"; + +import { convertPromptToOpenAI } from "../utils/prompts.js"; + +test("basic traceable implementation", async () => { + const prompt = await pull("jacob/joke-generator"); + const formattedPrompt = await prompt.invoke({ + topic: "cats", + }); + + const { messages } = convertPromptToOpenAI(formattedPrompt); + + const openAIClient = new OpenAI(); + + const openAIResponse = await openAIClient.chat.completions.create({ + model: "gpt-4o-mini", + messages, + }); + + expect(openAIResponse.choices.length).toBeGreaterThan(0); +}); diff --git a/libs/langchain-openai/src/utils/prompts.ts b/libs/langchain-openai/src/utils/prompts.ts new file mode 100644 index 000000000000..8951ed63a0d9 --- /dev/null +++ b/libs/langchain-openai/src/utils/prompts.ts @@ -0,0 +1,47 @@ +/* eslint-disable import/no-extraneous-dependencies */ +import type { BasePromptValue } from "@langchain/core/prompt_values"; +import type { OpenAI } from "openai"; + +import { _convertMessagesToOpenAIParams } from "../chat_models.js"; + +/** + * Convert a formatted LangChain prompt (e.g. pulled from the hub) into + * a format expected by OpenAI's JS SDK. + * + * Requires the "@langchain/openai" package to be installed in addition + * to the OpenAI SDK. + * + * @example + * ```ts + * import { convertPromptToOpenAI } from "langsmith/utils/hub/openai"; + * import { pull } from "langchain/hub"; + * + * import OpenAI from 'openai'; + * + * const prompt = await pull("jacob/joke-generator"); + * const formattedPrompt = await prompt.invoke({ + * topic: "cats", + * }); + * + * const { messages } = convertPromptToOpenAI(formattedPrompt); + * + * const openAIClient = new OpenAI(); + * + * const openaiResponse = await openAIClient.chat.completions.create({ + * model: "gpt-4o", + * messages, + * }); + * ``` + * @param formattedPrompt + * @returns A partial OpenAI payload. + */ +export function convertPromptToOpenAI(formattedPrompt: BasePromptValue): { + messages: OpenAI.Chat.ChatCompletionMessageParam[]; +} { + const messages = formattedPrompt.toChatMessages(); + return { + messages: _convertMessagesToOpenAIParams( + messages + ) as OpenAI.Chat.ChatCompletionMessageParam[], + }; +} From 8ba733c0c393ada8d97d758e27e9e920c127d412 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Tue, 24 Sep 2024 14:28:34 -0700 Subject: [PATCH 2/4] Fix test names --- libs/langchain-anthropic/src/tests/prompts.int.test.ts | 2 +- libs/langchain-openai/src/tests/prompts.int.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/langchain-anthropic/src/tests/prompts.int.test.ts b/libs/langchain-anthropic/src/tests/prompts.int.test.ts index 83aec1095456..9a6ac8c9a015 100644 --- a/libs/langchain-anthropic/src/tests/prompts.int.test.ts +++ b/libs/langchain-anthropic/src/tests/prompts.int.test.ts @@ -3,7 +3,7 @@ import { pull } from "langchain/hub"; import { convertPromptToAnthropic } from "../utils/prompts.js"; -test("basic traceable implementation", async () => { +test("Convert hub prompt to Anthropic payload and invoke", async () => { const prompt = await pull("jacob/joke-generator"); const formattedPrompt = await prompt.invoke({ topic: "cats", diff --git a/libs/langchain-openai/src/tests/prompts.int.test.ts b/libs/langchain-openai/src/tests/prompts.int.test.ts index 8d900388a302..71c89be6bf0f 100644 --- a/libs/langchain-openai/src/tests/prompts.int.test.ts +++ b/libs/langchain-openai/src/tests/prompts.int.test.ts @@ -3,7 +3,7 @@ import { pull } from "langchain/hub"; import { convertPromptToOpenAI } from "../utils/prompts.js"; -test("basic traceable implementation", async () => { +test("Convert hub prompt to OpenAI payload and invoke", async () => { const prompt = await pull("jacob/joke-generator"); const formattedPrompt = await prompt.invoke({ topic: "cats", From 2d9d8d68a7e481c65015ff1a1e5c92e8b6bbeb55 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Tue, 24 Sep 2024 14:33:55 -0700 Subject: [PATCH 3/4] Fix CI --- libs/langchain-anthropic/src/tests/prompts.int.test.ts | 7 +++++-- libs/langchain-openai/src/tests/prompts.int.test.ts | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/libs/langchain-anthropic/src/tests/prompts.int.test.ts b/libs/langchain-anthropic/src/tests/prompts.int.test.ts index 9a6ac8c9a015..1ac2f1dce3f6 100644 --- a/libs/langchain-anthropic/src/tests/prompts.int.test.ts +++ b/libs/langchain-anthropic/src/tests/prompts.int.test.ts @@ -1,10 +1,13 @@ import Anthropic from "@anthropic-ai/sdk"; -import { pull } from "langchain/hub"; +import { ChatPromptTemplate } from "@langchain/core/prompts"; import { convertPromptToAnthropic } from "../utils/prompts.js"; test("Convert hub prompt to Anthropic payload and invoke", async () => { - const prompt = await pull("jacob/joke-generator"); + const prompt = ChatPromptTemplate.fromMessages([ + ["system", "You are a world class comedian"], + ["human", "Tell me a joke about {topic}"], + ]); const formattedPrompt = await prompt.invoke({ topic: "cats", }); diff --git a/libs/langchain-openai/src/tests/prompts.int.test.ts b/libs/langchain-openai/src/tests/prompts.int.test.ts index 71c89be6bf0f..5e54da012fe0 100644 --- a/libs/langchain-openai/src/tests/prompts.int.test.ts +++ b/libs/langchain-openai/src/tests/prompts.int.test.ts @@ -1,10 +1,13 @@ import OpenAI from "openai"; -import { pull } from "langchain/hub"; +import { ChatPromptTemplate } from "@langchain/core/prompts"; import { convertPromptToOpenAI } from "../utils/prompts.js"; test("Convert hub prompt to OpenAI payload and invoke", async () => { - const prompt = await pull("jacob/joke-generator"); + const prompt = ChatPromptTemplate.fromMessages([ + ["system", "You are a world class comedian"], + ["human", "Tell me a joke about {topic}"], + ]); const formattedPrompt = await prompt.invoke({ topic: "cats", }); From 819c69017732059b06ee5d92bfbbf322ca209349 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Tue, 24 Sep 2024 14:38:36 -0700 Subject: [PATCH 4/4] Fix lint --- libs/langchain-anthropic/src/tests/prompts.int.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langchain-anthropic/src/tests/prompts.int.test.ts b/libs/langchain-anthropic/src/tests/prompts.int.test.ts index 1ac2f1dce3f6..14b662b9c420 100644 --- a/libs/langchain-anthropic/src/tests/prompts.int.test.ts +++ b/libs/langchain-anthropic/src/tests/prompts.int.test.ts @@ -19,7 +19,7 @@ test("Convert hub prompt to Anthropic payload and invoke", async () => { const anthropicResponse = await anthropicClient.messages.create({ model: "claude-3-haiku-20240307", system, - messages: messages, + messages, max_tokens: 1024, stream: false, });