diff --git a/langchain-core/src/language_models/base.ts b/langchain-core/src/language_models/base.ts index 80e1fa434969..4f1233426724 100644 --- a/langchain-core/src/language_models/base.ts +++ b/langchain-core/src/language_models/base.ts @@ -363,13 +363,14 @@ export abstract class BaseLanguageModel< callbackManager, ...params }: BaseLanguageModelParams) { + const { cache, ...rest } = params; super({ callbacks: callbacks ?? callbackManager, - ...params, + ...rest, }); - if (typeof params.cache === "object") { - this.cache = params.cache; - } else if (params.cache) { + if (typeof cache === "object") { + this.cache = cache; + } else if (cache) { this.cache = InMemoryCache.global(); } else { this.cache = undefined; diff --git a/langchain-core/src/language_models/tests/chat_models.test.ts b/langchain-core/src/language_models/tests/chat_models.test.ts index 70ff187243e8..f335d5edc40f 100644 --- a/langchain-core/src/language_models/tests/chat_models.test.ts +++ b/langchain-core/src/language_models/tests/chat_models.test.ts @@ -323,3 +323,15 @@ test("Test ChatModel can stream back a custom event", async () => { } expect(customEvent).toBeDefined(); }); + +test(`Test ChatModel should not serialize a passed "cache" parameter`, async () => { + const model = new FakeListChatModel({ + responses: ["hi"], + emitCustomEvent: true, + cache: true, + }); + console.log(JSON.stringify(model)); + expect(JSON.stringify(model)).toEqual( + `{"lc":1,"type":"constructor","id":["langchain","chat_models","fake-list","FakeListChatModel"],"kwargs":{"responses":["hi"],"emit_custom_event":true}}` + ); +}); diff --git a/langchain-core/src/utils/testing/index.ts b/langchain-core/src/utils/testing/index.ts index f14629794293..59f8c8437662 100644 --- a/langchain-core/src/utils/testing/index.ts +++ b/langchain-core/src/utils/testing/index.ts @@ -354,6 +354,8 @@ export class FakeListChatModel extends BaseChatModel