Skip to content

Commit

Permalink
Add chat history to prompt for non-RAG workflow.
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewnguonly committed Mar 11, 2024
1 parent f93000f commit 7480958
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/components/ChatBar.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ import ChatHistory from "./ChatHistory";
import "@chatscope/chat-ui-kit-styles/dist/default/styles.min.css";
import "./ChatBar.css";

class LumosMessage {
export class LumosMessage {
constructor(
public sender: string,
public message: string,
Expand Down
30 changes: 28 additions & 2 deletions src/scripts/background.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { Document } from "@langchain/core/documents";
import { AIMessage, BaseMessage, HumanMessage } from "@langchain/core/messages";
import { StringOutputParser } from "@langchain/core/output_parsers";
import { PromptTemplate } from "@langchain/core/prompts";
import { ChatPromptTemplate, PromptTemplate } from "@langchain/core/prompts";
import {
RunnableSequence,
RunnablePassthrough,
Expand All @@ -11,6 +12,7 @@ import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
import { formatDocumentsAsString } from "langchain/util/document";
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama";
import { Ollama } from "@langchain/community/llms/ollama";
import { LumosMessage } from "../components/ChatBar";
import {
Calculator,
CLS_CALC_PROMPT,
Expand Down Expand Up @@ -88,6 +90,24 @@ const classifyPrompt = async (
});
};

const getMessages = async (): Promise<BaseMessage[]> => {
const data = await chrome.storage.session.get(["messages"]);

if (data.messages) {
const msgs = data.messages as LumosMessage[];
return msgs.map((msg: LumosMessage) => {
return msg.sender === "user"
? new HumanMessage({
content: msg.message,
})
: new AIMessage({
content: msg.message,
});
});
}
return [];
};

const computeK = (documentsCount: number): number => {
return Math.ceil(Math.sqrt(documentsCount));
};
Expand Down Expand Up @@ -148,15 +168,21 @@ chrome.runtime.onMessage.addListener(async (request) => {
return executeCalculatorTool(prompt);
}

// create prompt
const chatPrompt = ChatPromptTemplate.fromMessages(await getMessages());

// create model
const model = new Ollama({
baseUrl: options.ollamaHost,
model: options.ollamaModel,
keepAlive: DEFAULT_KEEP_ALIVE,
});

// create chain
const chain = chatPrompt.pipe(model);

// stream response chunks
const stream = await model.stream(prompt);
const stream = await chain.stream(prompt);
streamChunks(stream);
}

Expand Down

0 comments on commit 7480958

Please sign in to comment.