-
Notifications
You must be signed in to change notification settings - Fork 0
/
9_chat_with_memory.py
28 lines (24 loc) · 1.13 KB
/
9_chat_with_memory.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
import load_keys
from langchain.prompts import (
ChatPromptTemplate,
MessagesPlaceholder,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate
)
from langchain.chains import ConversationChain
from langchain.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferMemory
instructions = "The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know."
prompt = ChatPromptTemplate.from_messages([
SystemMessagePromptTemplate.from_template(instructions),
MessagesPlaceholder(variable_name="history"),
HumanMessagePromptTemplate.from_template("{input}")
])
llm = ChatOpenAI(temperature=0)
memory = ConversationBufferMemory(return_messages=True)
conversation = ConversationChain(memory=memory, prompt=prompt, llm=llm)
input_list = ["Hi there!", "I'm doing well! Just having a conversation with an AI.", "Tell me about yourself."]
for input in input_list:
print(input)
result = conversation.predict(input=input)
print(result)