Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(community): add FileChatMessageHistory #7303

Merged
merged 5 commits into from
Dec 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions docs/core_docs/docs/integrations/memory/file.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
---
hide_table_of_contents: true
---

import CodeBlock from "@theme/CodeBlock";

# File Chat Message History

The `FileChatMessageHistory` uses a JSON file to store chat message history. For longer-term persistence across chat sessions, you can swap out the default in-memory `chatHistory` that backs chat memory classes like `BufferMemory`.

## Setup

You'll first need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package:

```bash npm2yarn
npm install @langchain/community @langchain/core
```

import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";

<IntegrationInstallTooltip></IntegrationInstallTooltip>

```bash npm2yarn
npm install @langchain/openai @langchain/community @langchain/core
```

## Usage

import Example from "@examples/memory/file.ts";

<CodeBlock language="typescript">{Example}</CodeBlock>
71 changes: 71 additions & 0 deletions examples/src/memory/file.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { ChatOpenAI } from "@langchain/openai";
import { FileSystemChatMessageHistory } from "@langchain/community/stores/message/file_system";
import { RunnableWithMessageHistory } from "@langchain/core/runnables";
import { StringOutputParser } from "@langchain/core/output_parsers";
import {
ChatPromptTemplate,
MessagesPlaceholder,
} from "@langchain/core/prompts";

const model = new ChatOpenAI({
model: "gpt-3.5-turbo",
temperature: 0,
});

const prompt = ChatPromptTemplate.fromMessages([
[
"system",
"You are a helpful assistant. Answer all questions to the best of your ability.",
],
new MessagesPlaceholder("chat_history"),
["human", "{input}"],
]);

const chain = prompt.pipe(model).pipe(new StringOutputParser());

const chainWithHistory = new RunnableWithMessageHistory({
runnable: chain,
inputMessagesKey: "input",
historyMessagesKey: "chat_history",
getMessageHistory: async (sessionId) => {
const chatHistory = new FileSystemChatMessageHistory({
sessionId,
userId: "user-id",
});
return chatHistory;
},
});

const res1 = await chainWithHistory.invoke(
{ input: "Hi! I'm Jim." },
{ configurable: { sessionId: "langchain-test-session" } }
);
console.log({ res1 });
/*
{ res1: 'Hi Jim! How can I assist you today?' }
*/

const res2 = await chainWithHistory.invoke(
{ input: "What did I just say my name was?" },
{ configurable: { sessionId: "langchain-test-session" } }
);
console.log({ res2 });
/*
{ res2: { response: 'You said your name was Jim.' }
*/

// Give this session a title
const chatHistory = (await chainWithHistory.getMessageHistory(
"langchain-test-session"
)) as FileSystemChatMessageHistory;

await chatHistory.setContext({ title: "Introducing Jim" });

// List all session for the user
const sessions = await chatHistory.getAllSessions();
console.log(sessions);
/*
[
{ sessionId: 'langchain-test-session', context: { title: "Introducing Jim" } }
]
*/
4 changes: 4 additions & 0 deletions libs/langchain-community/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -770,6 +770,10 @@ stores/message/firestore.cjs
stores/message/firestore.js
stores/message/firestore.d.ts
stores/message/firestore.d.cts
stores/message/file_system.cjs
stores/message/file_system.js
stores/message/file_system.d.ts
stores/message/file_system.d.cts
stores/message/in_memory.cjs
stores/message/in_memory.js
stores/message/in_memory.d.ts
Expand Down
1 change: 1 addition & 0 deletions libs/langchain-community/langchain.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,7 @@ export const config = {
"stores/message/convex": "stores/message/convex",
"stores/message/dynamodb": "stores/message/dynamodb",
"stores/message/firestore": "stores/message/firestore",
"stores/message/file_system": "stores/message/file_system",
"stores/message/in_memory": "stores/message/in_memory",
"stores/message/ipfs_datastore": "stores/message/ipfs_datastore",
"stores/message/ioredis": "stores/message/ioredis",
Expand Down
13 changes: 13 additions & 0 deletions libs/langchain-community/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2449,6 +2449,15 @@
"import": "./stores/message/firestore.js",
"require": "./stores/message/firestore.cjs"
},
"./stores/message/file_system": {
"types": {
"import": "./stores/message/file_system.d.ts",
"require": "./stores/message/file_system.d.cts",
"default": "./stores/message/file_system.d.ts"
},
"import": "./stores/message/file_system.js",
"require": "./stores/message/file_system.cjs"
},
"./stores/message/in_memory": {
"types": {
"import": "./stores/message/in_memory.d.ts",
Expand Down Expand Up @@ -3873,6 +3882,10 @@
"stores/message/firestore.js",
"stores/message/firestore.d.ts",
"stores/message/firestore.d.cts",
"stores/message/file_system.cjs",
"stores/message/file_system.js",
"stores/message/file_system.d.ts",
"stores/message/file_system.d.cts",
"stores/message/in_memory.cjs",
"stores/message/in_memory.js",
"stores/message/in_memory.d.ts",
Expand Down
1 change: 1 addition & 0 deletions libs/langchain-community/src/load/import_map.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ export * as caches__upstash_redis from "../caches/upstash_redis.js";
export * as stores__doc__base from "../stores/doc/base.js";
export * as stores__doc__gcs from "../stores/doc/gcs.js";
export * as stores__doc__in_memory from "../stores/doc/in_memory.js";
export * as stores__message__file_system from "../stores/message/file_system.js";
export * as stores__message__in_memory from "../stores/message/in_memory.js";
export * as memory__chat_memory from "../memory/chat_memory.js";
export * as indexes__base from "../indexes/base.js";
Expand Down
199 changes: 199 additions & 0 deletions libs/langchain-community/src/stores/message/file_system.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
import { promises as fs } from "node:fs";
import { dirname } from "node:path";

import { BaseListChatMessageHistory } from "@langchain/core/chat_history";
import {
BaseMessage,
StoredMessage,
mapChatMessagesToStoredMessages,
mapStoredMessagesToChatMessages,
} from "@langchain/core/messages";

export const FILE_HISTORY_DEFAULT_FILE_PATH = ".history/history.json";

/**
* Represents a lightweight file chat session.
*/
export type FileChatSession = {
id: string;
context: Record<string, unknown>;
};

/**
* Represents a stored chat session.
*/
export type StoredFileChatSession = FileChatSession & {
messages: StoredMessage[];
};

/**
* Type for the store of chat sessions.
*/
export type FileChatStore = {
[userId: string]: Record<string, StoredFileChatSession>;
};

/**
* Type for the input to the `FileSystemChatMessageHistory` constructor.
*/
export interface FileSystemChatMessageHistoryInput {
sessionId: string;
userId?: string;
filePath?: string;
}

let store: FileChatStore;

/**
* Store chat message history using a local JSON file.
* For demo and development purposes only.
*
* @example
* ```typescript
* const model = new ChatOpenAI({
* model: "gpt-3.5-turbo",
* temperature: 0,
* });
* const prompt = ChatPromptTemplate.fromMessages([
* [
* "system",
* "You are a helpful assistant. Answer all questions to the best of your ability.",
* ],
* ["placeholder", "chat_history"],
* ["human", "{input}"],
* ]);
*
* const chain = prompt.pipe(model).pipe(new StringOutputParser());
* const chainWithHistory = new RunnableWithMessageHistory({
* runnable: chain,
* inputMessagesKey: "input",
* historyMessagesKey: "chat_history",
* getMessageHistory: async (sessionId) => {
* const chatHistory = new FileSystemChatMessageHistory({
* sessionId: sessionId,
* userId: "userId", // Optional
* })
* return chatHistory;
* },
* });
* await chainWithHistory.invoke(
* { input: "What did I just say my name was?" },
* { configurable: { sessionId: "session-id" } }
* );
* ```
*/
export class FileSystemChatMessageHistory extends BaseListChatMessageHistory {
lc_namespace = ["langchain", "stores", "message", "file"];

private sessionId: string;

private userId: string;

private filePath: string;

constructor(chatHistoryInput: FileSystemChatMessageHistoryInput) {
super();

this.sessionId = chatHistoryInput.sessionId;
this.userId = chatHistoryInput.userId ?? "";
this.filePath = chatHistoryInput.filePath ?? FILE_HISTORY_DEFAULT_FILE_PATH;
}

private async init(): Promise<void> {
if (store) {
return;
}
try {
store = await this.loadStore();
} catch (error) {
console.error("Error initializing FileSystemChatMessageHistory:", error);
throw error;
}
}

protected async loadStore(): Promise<FileChatStore> {
try {
await fs.access(this.filePath, fs.constants.F_OK);
const store = await fs.readFile(this.filePath, "utf-8");
return JSON.parse(store) as FileChatStore;
} catch (_error) {
const error = _error as NodeJS.ErrnoException;
if (error.code === "ENOENT") {
return {};
}
throw new Error(
`Error loading FileSystemChatMessageHistory store: ${error}`
);
}
}

protected async saveStore(): Promise<void> {
try {
await fs.mkdir(dirname(this.filePath), { recursive: true });
await fs.writeFile(this.filePath, JSON.stringify(store));
} catch (error) {
throw new Error(
`Error saving FileSystemChatMessageHistory store: ${error}`
);
}
}

async getMessages(): Promise<BaseMessage[]> {
await this.init();
const messages = store[this.userId]?.[this.sessionId]?.messages ?? [];
return mapStoredMessagesToChatMessages(messages);
}

async addMessage(message: BaseMessage): Promise<void> {
await this.init();
const messages = await this.getMessages();
messages.push(message);
const storedMessages = mapChatMessagesToStoredMessages(messages);
store[this.userId] ??= {};
store[this.userId][this.sessionId] = {
...store[this.userId][this.sessionId],
messages: storedMessages,
};
await this.saveStore();
}

async clear(): Promise<void> {
await this.init();
if (store[this.userId]) {
delete store[this.userId][this.sessionId];
}
await this.saveStore();
}

async getContext(): Promise<Record<string, unknown>> {
await this.init();
return store[this.userId]?.[this.sessionId]?.context ?? {};
}

async setContext(context: Record<string, unknown>): Promise<void> {
await this.init();
store[this.userId] ??= {};
store[this.userId][this.sessionId] = {
...store[this.userId][this.sessionId],
context,
};
await this.saveStore();
}

async clearAllSessions() {
await this.init();
delete store[this.userId];
await this.saveStore();
}

async getAllSessions(): Promise<FileChatSession[]> {
await this.init();
const userSessions = store[this.userId]
? Object.values(store[this.userId]).map((session) => ({
id: session.id,
context: session.context,
}))
: [];
return userSessions;
}
}
Loading
Loading