-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathchainlit_app.py
84 lines (76 loc) · 2.55 KB
/
chainlit_app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
from agent import graph_builder, Config, Configurable, ToolMessage, HumanMessage, system_prompt, RunnableConfig
from langgraph.checkpoint.memory import MemorySaver
from agent import Config, Configurable
import chainlit as cl
memory = MemorySaver()
graph = graph_builder.compile(checkpointer=memory)
@cl.on_chat_start
async def on_chat_start():
config = Config(
configurable=Configurable(
thread_id=cl.context.session.id
)
).model_dump()
cb = cl.LangchainCallbackHandler()
final_answer = cl.Message(content="")
for msg, metadata in graph.stream(
{
"messages": [
system_prompt,
HumanMessage(content="Hello!")
]
},
stream_mode="messages",
config=RunnableConfig(callbacks=[cb], **config)
):
msg.pretty_print()
if (
msg.content
and not isinstance(msg, HumanMessage)
and not isinstance(msg, ToolMessage)
):
await final_answer.stream_token(msg.content)
await final_answer.send()
@cl.on_message
async def on_message(umsg: cl.Message):
config = Config(
configurable=Configurable(
thread_id=cl.context.session.id
)
).model_dump()
cb = cl.LangchainCallbackHandler()
final_answer = cl.Message(content="")
if umsg.elements:
await final_answer.stream_token("The Model currently Does not support File Input")
await final_answer.send()
return
"""
if umsg.content == "Test--++--":
res = await cl.AskActionMessage(
content="Pick an action!",
actions=[
cl.Action(name="continue", value=True, label="✅ Continue"),
cl.Action(name="cancel", value=False, label="❌ Cancel"),
],
).send()
return
"""
for msg, metadata in graph.stream(
{
"messages": [
system_prompt,
HumanMessage(content=umsg.content)
]
},
stream_mode="messages",
config=RunnableConfig(callbacks=[cb], **config)
):
if isinstance(msg, ToolMessage):
msg.pretty_print()
if (
msg.content
and not isinstance(msg, HumanMessage)
and not isinstance(msg, ToolMessage)
):
await final_answer.stream_token(msg.content)
await final_answer.send()