Skip to content

Commit

Permalink
chore (chat): migration prompts to langsmith for active testing
Browse files Browse the repository at this point in the history
  • Loading branch information
karahan-sahin committed Feb 12, 2024
1 parent 032ce96 commit 953890e
Showing 1 changed file with 32 additions and 38 deletions.
70 changes: 32 additions & 38 deletions indexer/src/app/modules/agent.module.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { ChatOpenAI, OpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { AIMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, PromptTemplate } from "@langchain/core/prompts";
import { ConversationalRetrievalQAChain, LLMChain, RetrievalQAChain } from "langchain/chains";
import { RunnableBranch, RunnableLambda, RunnableParallel, RunnablePassthrough, RunnableSequence } from "@langchain/core/runnables";
import { Runnable, RunnableBranch, RunnableLambda, RunnableParallel, RunnablePassthrough, RunnableSequence } from "@langchain/core/runnables";
import { formatDocumentsAsString } from "langchain/util/document";

import { DynamicModule, Inject, Logger, Module } from '@nestjs/common';
Expand All @@ -24,12 +24,6 @@ export class Agent {
// TODO: Can we also add context type by intent find?
// TODO: Research on low-computation models for subtasks


// IMPLEMENTATION TODOS:
// TODO: Add initial filtering according to the index (rag)
// TODO: Add initial filtering according to the index (retriever)
// TODO: Add standalone question pipeline

constructor (
) {
const apiKey = process.env.OPENAI_API_KEY;
Expand Down Expand Up @@ -94,31 +88,35 @@ export class Agent {

// TODO: Prior information context -> glossary, etc.
// Prompt link: https://langstream.ai/2023/10/13/rag-chatbot-with-conversation/
const questionPrompt = PromptTemplate.fromTemplate(`
Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.
The user will give you a question without context. You will reformulate the question to take into account the context of the conversation.
You should also consult with the Chat History below when reformulating the question.
For example, you will substitute pronouns for mostly likely noun in the conversation history.
When reformulating the question give higher value to the latest question and response in the Chat History.
The chat history is in reverse chronological order, so the most recent exchange is at the top.
Chat History:
{chat_history}
----------------
Follow Up Input: {question}
----------------
Standalone question:
`);

const answerPrompt = PromptTemplate.fromTemplate(`
Answer the question based only on the following context:
----------------
CONTEXT: {context}
----------------
QUESTION: {question}
`);

// const questionPrompt = PromptTemplate.fromTemplate(`
// Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.

// The user will give you a question without context. You will reformulate the question to take into account the context of the conversation.
// You should also consult with the Chat History below when reformulating the question.
// For example, you will substitute pronouns for mostly likely noun in the conversation history.

// When reformulating the question give higher value to the latest question and response in the Chat History.
// The chat history is in reverse chronological order, so the most recent exchange is at the top.

// Chat History:
// {chat_history}
// ----------------
// Follow Up Input: {question}
// ----------------
// Standalone question:
// `);

// const answerPrompt = PromptTemplate.fromTemplate(`
// Answer the question based on the following context if there is no:
// ----------------
// CONTEXT: {context}
// ----------------
// QUESTION: {question}
// `);

const questionPrompt = await pull("seref/standalone_question_index")
const answerPrompt = await pull("seref/answer_generation_prompt")

const formatChatHistory = (chatHistory: string | string[]) => {
if (Array.isArray(chatHistory)) {
Expand All @@ -134,13 +132,12 @@ export class Agent {
return '';
}

// const selfAskPrompt = await pull("hwchase17/self-ask-with-search")
const standalone_question = RunnableSequence.from([
{
question: (input) => input.question,
chat_history: (input) => formatChatHistory(input.chat_history),
},
questionPrompt,
questionPrompt as any,
model,
new StringOutputParser(),
]);
Expand Down Expand Up @@ -176,7 +173,7 @@ export class Agent {
return input.question
},
},
answerPrompt,
answerPrompt as any,
model,
new StringOutputParser(),
]),
Expand Down Expand Up @@ -270,9 +267,6 @@ export class Agent {
return final_chain;
}


//* Helper functions

}


Expand Down

0 comments on commit 953890e

Please sign in to comment.