Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: added ibm watsonx chat model and streaming to base llm model #3577

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/api_refs/typedoc.json
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@
"../../langchain/src/chat_models/llama_cpp.ts",
"../../langchain/src/chat_models/yandex.ts",
"../../langchain/src/chat_models/fake.ts",
"../../langchain/src/chat_models/watsonx_ai.ts",
"../../langchain/src/schema/index.ts",
"../../langchain/src/schema/document.ts",
"../../langchain/src/schema/output_parser.ts",
Expand Down
1 change: 1 addition & 0 deletions environment_tests/test-exports-bun/src/entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ export * from "langchain/chat_models/ollama";
export * from "langchain/chat_models/minimax";
export * from "langchain/chat_models/yandex";
export * from "langchain/chat_models/fake";
export * from "langchain/chat_models/watsonx_ai";
export * from "langchain/schema";
export * from "langchain/schema/document";
export * from "langchain/schema/output_parser";
Expand Down
1 change: 1 addition & 0 deletions environment_tests/test-exports-cf/src/entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ export * from "langchain/chat_models/ollama";
export * from "langchain/chat_models/minimax";
export * from "langchain/chat_models/yandex";
export * from "langchain/chat_models/fake";
export * from "langchain/chat_models/watsonx_ai";
export * from "langchain/schema";
export * from "langchain/schema/document";
export * from "langchain/schema/output_parser";
Expand Down
1 change: 1 addition & 0 deletions environment_tests/test-exports-cjs/src/entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ const chat_models_ollama = require("langchain/chat_models/ollama");
const chat_models_minimax = require("langchain/chat_models/minimax");
const chat_models_yandex = require("langchain/chat_models/yandex");
const chat_models_fake = require("langchain/chat_models/fake");
const chat_models_watsonx_ai = require("langchain/chat_models/watsonx_ai");
const schema = require("langchain/schema");
const schema_document = require("langchain/schema/document");
const schema_output_parser = require("langchain/schema/output_parser");
Expand Down
1 change: 1 addition & 0 deletions environment_tests/test-exports-esbuild/src/entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ import * as chat_models_ollama from "langchain/chat_models/ollama";
import * as chat_models_minimax from "langchain/chat_models/minimax";
import * as chat_models_yandex from "langchain/chat_models/yandex";
import * as chat_models_fake from "langchain/chat_models/fake";
import * as chat_models_watsonx_ai from "langchain/chat_models/watsonx_ai";
import * as schema from "langchain/schema";
import * as schema_document from "langchain/schema/document";
import * as schema_output_parser from "langchain/schema/output_parser";
Expand Down
1 change: 1 addition & 0 deletions environment_tests/test-exports-esm/src/entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ import * as chat_models_ollama from "langchain/chat_models/ollama";
import * as chat_models_minimax from "langchain/chat_models/minimax";
import * as chat_models_yandex from "langchain/chat_models/yandex";
import * as chat_models_fake from "langchain/chat_models/fake";
import * as chat_models_watsonx_ai from "langchain/chat_models/watsonx_ai";
import * as schema from "langchain/schema";
import * as schema_document from "langchain/schema/document";
import * as schema_output_parser from "langchain/schema/output_parser";
Expand Down
1 change: 1 addition & 0 deletions environment_tests/test-exports-vercel/src/entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ export * from "langchain/chat_models/ollama";
export * from "langchain/chat_models/minimax";
export * from "langchain/chat_models/yandex";
export * from "langchain/chat_models/fake";
export * from "langchain/chat_models/watsonx_ai";
export * from "langchain/schema";
export * from "langchain/schema/document";
export * from "langchain/schema/output_parser";
Expand Down
1 change: 1 addition & 0 deletions environment_tests/test-exports-vite/src/entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ export * from "langchain/chat_models/ollama";
export * from "langchain/chat_models/minimax";
export * from "langchain/chat_models/yandex";
export * from "langchain/chat_models/fake";
export * from "langchain/chat_models/watsonx_ai";
export * from "langchain/schema";
export * from "langchain/schema/document";
export * from "langchain/schema/output_parser";
Expand Down
20 changes: 20 additions & 0 deletions examples/src/llms/watsonx_ai-chat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { WatsonxAIChat } from "langchain/chat_models/watsonx_ai";

const model = new WatsonxAIChat({
clientConfig: {
region: "eu-de",
},
modelParameters: {
max_new_tokens: 100,
},
});

const stream = await model.stream(
"What would be a good company name for a company that makes colorful socks?"
);

let text = "";
for await (const chunk of stream) {
text += chunk.content;
console.log(text);
}
3 changes: 3 additions & 0 deletions langchain/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -556,6 +556,9 @@ chat_models/yandex.d.ts
chat_models/fake.cjs
chat_models/fake.js
chat_models/fake.d.ts
chat_models/watsonx_ai.cjs
chat_models/watsonx_ai.js
chat_models/watsonx_ai.d.ts
schema.cjs
schema.js
schema.d.ts
Expand Down
8 changes: 8 additions & 0 deletions langchain/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -568,6 +568,9 @@
"chat_models/fake.cjs",
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hey there! I've noticed that this PR introduces new dependencies related to chat models. This comment is just to flag the change for maintainers to review the impact on peer/dev/hard dependencies. Great work on the PR!

"chat_models/fake.js",
"chat_models/fake.d.ts",
"chat_models/watsonx_ai.cjs",
"chat_models/watsonx_ai.js",
"chat_models/watsonx_ai.d.ts",
"schema.cjs",
"schema.js",
"schema.d.ts",
Expand Down Expand Up @@ -2141,6 +2144,11 @@
"import": "./chat_models/fake.js",
"require": "./chat_models/fake.cjs"
},
"./chat_models/watsonx_ai": {
"types": "./chat_models/watsonx_ai.d.ts",
"import": "./chat_models/watsonx_ai.js",
"require": "./chat_models/watsonx_ai.cjs"
},
"./schema": {
"types": "./schema.d.ts",
"import": "./schema.js",
Expand Down
1 change: 1 addition & 0 deletions langchain/scripts/create-entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ const entrypoints = {
"chat_models/llama_cpp": "chat_models/llama_cpp",
"chat_models/yandex": "chat_models/yandex",
"chat_models/fake": "chat_models/fake",
"chat_models/watsonx_ai": "chat_models/watsonx_ai",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We no longer are adding integrations to the langchain package. Instead, only export inside langchain-community, from their create-entrypoints file and remove the re-exports from the main langchain package.
Thank you!

// schema
schema: "schema/index",
"schema/document": "schema/document",
Expand Down
1 change: 1 addition & 0 deletions langchain/src/chat_models/watsonx_ai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export * from "@langchain/community/chat_models/watsonx_ai";
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

drop file

1 change: 1 addition & 0 deletions langchain/src/load/import_map.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ export * as chat_models__ollama from "../chat_models/ollama.js";
export * as chat_models__minimax from "../chat_models/minimax.js";
export * as chat_models__yandex from "../chat_models/yandex.js";
export * as chat_models__fake from "../chat_models/fake.js";
export * as chat_models__watsonx_ai from "../chat_models/watsonx_ai.js";
export * as schema from "../schema/index.js";
export * as schema__document from "../schema/document.js";
export * as schema__output_parser from "../schema/output_parser.js";
Expand Down
3 changes: 3 additions & 0 deletions libs/langchain-community/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,9 @@ chat_models/ollama.d.ts
chat_models/portkey.cjs
chat_models/portkey.js
chat_models/portkey.d.ts
chat_models/watsonx_ai.cjs
chat_models/watsonx_ai.js
chat_models/watsonx_ai.d.ts
chat_models/yandex.cjs
chat_models/yandex.js
chat_models/yandex.d.ts
Expand Down
8 changes: 8 additions & 0 deletions libs/langchain-community/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -1011,6 +1011,11 @@
"import": "./chat_models/portkey.js",
"require": "./chat_models/portkey.cjs"
},
"./chat_models/watsonx_ai": {
"types": "./chat_models/watsonx_ai.d.ts",
"import": "./chat_models/watsonx_ai.js",
"require": "./chat_models/watsonx_ai.cjs"
},
"./chat_models/yandex": {
"types": "./chat_models/yandex.d.ts",
"import": "./chat_models/yandex.js",
Expand Down Expand Up @@ -1542,6 +1547,9 @@
"chat_models/portkey.cjs",
"chat_models/portkey.js",
"chat_models/portkey.d.ts",
"chat_models/watsonx_ai.cjs",
"chat_models/watsonx_ai.js",
"chat_models/watsonx_ai.d.ts",
"chat_models/yandex.cjs",
"chat_models/yandex.js",
"chat_models/yandex.d.ts",
Expand Down
1 change: 1 addition & 0 deletions libs/langchain-community/scripts/create-entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ const entrypoints = {
"chat_models/minimax": "chat_models/minimax",
"chat_models/ollama": "chat_models/ollama",
"chat_models/portkey": "chat_models/portkey",
"chat_models/watsonx_ai": "chat_models/watsonx_ai",
"chat_models/yandex": "chat_models/yandex",
// callbacks
"callbacks/handlers/llmonitor": "callbacks/handlers/llmonitor",
Expand Down
157 changes: 157 additions & 0 deletions libs/langchain-community/src/chat_models/watsonx_ai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import {
type BaseChatModelParams,
SimpleChatModel,
} from "@langchain/core/language_models/chat_models";
import {
AIMessageChunk,
BaseMessage,
ChatMessage,
} from "@langchain/core/messages";
import { ChatGenerationChunk } from "@langchain/core/outputs";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
import type {
WatsonModelParameters,
WatsonxAIParams,
} from "../types/watsonx-types.js";
import { WatsonApiClient } from "../utils/watsonx-client.js";

export class WatsonxAIChat extends SimpleChatModel {
private readonly watsonApiClient: WatsonApiClient;

readonly modelId!: string;
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Don't use definite assignment assertions, instead make it a non nullable type and add checks in the constructor that verify the value is defined.

These can come back to bite you in production (I've done this before and it was not fun to debug 😅)


readonly modelParameters?: WatsonModelParameters;

readonly projectId!: string;

constructor(fields: WatsonxAIParams & BaseChatModelParams) {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Lets redefine this as an interface above the class:

interface WatsonxAIChatParams extends WatsonxAIParams, BaseChatModelParams {};

This way it's easy to:

  1. export the class params type for use outside this file
  2. add extra properties in the future.

Also, I believe all the properties on those two interfaces are optional, so we should be able to do:

Suggested change
constructor(fields: WatsonxAIParams & BaseChatModelParams) {
constructor(fields?: WatsonxAIChatParams) {

super(fields);

const {
clientConfig = {},
modelId = "meta-llama/llama-2-70b-chat",
modelParameters,
projectId = getEnvironmentVariable("WATSONX_PROJECT_ID") ?? "",
} = fields;

this.modelId = modelId;
this.modelParameters = modelParameters;
this.projectId = projectId;

const {
apiKey = getEnvironmentVariable("IBM_CLOUD_API_KEY"),
apiVersion = "2023-05-29",
region = "us-south",
} = clientConfig;

if (!apiKey) {
throw new Error("Missing IBM Cloud API Key");
}

if (!this.projectId) {
throw new Error("Missing WatsonX AI Project ID");
}

this.watsonApiClient = new WatsonApiClient({
apiKey,
apiVersion,
region,
});
}

protected _formatMessagesAsPrompt(messages: BaseMessage[]): string {
Copy link
Collaborator

@jacoblee93 jacoblee93 Dec 13, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should make a Llama adapter or something at some point...

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes definitely, I was thinking about it too. Right now this is blatantly copy pasted from the Ollama model and assumes the user only uses llama based chat model. However Watson offers the ability to run many models, including spinning any model from HF...

return messages
.map((message) => {
let messageText;
if (message._getType() === "human") {
messageText = `[INST] ${message.content} [/INST]`;
} else if (message._getType() === "ai") {
messageText = message.content;
} else if (message._getType() === "system") {
messageText = `<<SYS>> ${message.content} <</SYS>>`;
} else if (ChatMessage.isInstance(message)) {
messageText = `\n\n${message.role[0].toUpperCase()}${message.role.slice(
1
)}: ${message.content}`;
} else {
console.warn(
`Unsupported message type passed to Watson: "${message._getType()}"`
);
messageText = "";
}
return messageText;
})
.join("\n");
}

_combineLLMOutput() {
return {};
}

async _call(
messages: BaseMessage[],
options: this["ParsedCallOptions"],
runManager: CallbackManagerForLLMRun | undefined
): Promise<string> {
const chunks = [];
const stream = this._streamResponseChunks(messages, options, runManager);
for await (const chunk of stream) {
chunks.push(chunk.message.content);
}
return chunks.join("");
}

override async *_streamResponseChunks(
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think you need the override here

_messages: BaseMessage[],
_options: this["ParsedCallOptions"],
_runManager?: CallbackManagerForLLMRun
Comment on lines +106 to +108
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Drop the underscore if they're being used. Typically, variables prefixed with an underscore are unused, and the underscore is used to bypass a lint rule for no-unused-variables

): AsyncGenerator<ChatGenerationChunk> {
const formattedMessages = this._formatMessagesAsPrompt(_messages);
const stream = await this.caller.call(async () =>
this.watsonApiClient.generateTextStream(
formattedMessages,
this.projectId,
this.modelId,
this.modelParameters
)
);

for await (const data of stream) {
const [
{
generated_text,
generated_token_count,
input_token_count,
stop_reason,
},
] = data.results;
const generationChunk = new ChatGenerationChunk({
text: generated_text,
message: new AIMessageChunk({ content: generated_text }),
generationInfo: {
generated_token_count,
input_token_count,
stop_reason,
},
});
yield generationChunk;
await _runManager?.handleLLMNewToken(generated_text);
}
}

static lc_name() {
return "WatsonxAIChat";
}

_llmType(): string {
return "watsonx_ai";
}

get lc_secrets(): { [key: string]: string } | undefined {
return {
ibmCloudApiKey: "IBM_CLOUD_API_KEY",
projectId: "WATSONX_PROJECT_ID",
};
}
}
Loading