-
Notifications
You must be signed in to change notification settings - Fork 270
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
xusenlin
committed
Nov 7, 2023
1 parent
a81b502
commit eec8154
Showing
10 changed files
with
364 additions
and
11 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
121 changes: 121 additions & 0 deletions
121
streamlit-demo/streamlit_gallery/components/code_interpreter/streamlit_app.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
import os | ||
|
||
import openai | ||
import streamlit as st | ||
|
||
from .utils import CodeKernel, extract_code, execute, postprocess_text | ||
|
||
|
||
@st.cache_resource | ||
def get_kernel(): | ||
return CodeKernel() | ||
|
||
|
||
SYSTEM_MESSAGE = [ | ||
{ | ||
"role": "system", | ||
"content": "你是一位智能AI助手,你叫ChatGLM,你连接着一台电脑,但请注意不能联网。在使用Python解决任务时,你可以运行代码并得到结果,如果运行结果有错误,你需要尽可能对代码进行改进。你可以处理用户上传到电脑上的文件,文件默认存储路径是/mnt/data/。" | ||
} | ||
] | ||
|
||
|
||
def chat_once(message_placeholder): | ||
params = dict( | ||
model="chatglm3", | ||
messages=SYSTEM_MESSAGE + st.session_state.messages, | ||
stream=True, | ||
max_tokens=st.session_state.get("max_tokens", 512), | ||
temperature=st.session_state.get("temperature", 0.9), | ||
) | ||
response = openai.ChatCompletion.create(**params) | ||
|
||
display = "" | ||
for _ in range(5): | ||
full_response = "" | ||
for chunk in response: | ||
content = chunk.choices[0].delta.get("content", "") | ||
full_response += content | ||
display += content | ||
message_placeholder.markdown(postprocess_text(display) + "▌") | ||
|
||
if chunk.choices[0].finish_reason == "stop": | ||
message_placeholder.markdown(postprocess_text(display) + "▌") | ||
st.session_state.messages.append( | ||
{ | ||
"role": "assistant", | ||
"content": full_response | ||
} | ||
) | ||
return | ||
|
||
elif chunk.choices[0].finish_reason == "function_call": | ||
try: | ||
code = extract_code(full_response) | ||
except: | ||
continue | ||
|
||
with message_placeholder: | ||
with st.spinner("Executing code..."): | ||
try: | ||
res_type, res = execute(code, get_kernel()) | ||
except Exception as e: | ||
st.error(f"Error when executing code: {e}") | ||
return | ||
|
||
if res_type == "text": | ||
res = postprocess_text(res) | ||
display += "\n" + res | ||
message_placeholder.markdown(postprocess_text(display) + "▌") | ||
elif res_type == "image": | ||
st.image(res) | ||
|
||
st.session_state.messages.append( | ||
{ | ||
"role": "assistant", | ||
"content": full_response, | ||
"function_call": {"name": "interpreter", "arguments": ""}, | ||
} | ||
) | ||
st.session_state.messages.append( | ||
{ | ||
"role": "function", | ||
"content": "[Image]" if res_type == "image" else res, # 调用函数返回结果 | ||
} | ||
) | ||
|
||
break | ||
|
||
params["messages"] = st.session_state.messages | ||
response = openai.ChatCompletion.create(**params) | ||
|
||
|
||
def main(): | ||
st.title("💬 Code Interpreter") | ||
|
||
openai.api_base = os.getenv("INTERPRETER_CHAT_API_BASE", "http://192.168.20.59:7891/v1") | ||
openai.api_key = os.getenv("API_KEY", "xxx") | ||
|
||
if "messages" not in st.session_state: | ||
st.session_state.messages = [] | ||
|
||
for message in st.session_state.messages: | ||
role = message["role"] | ||
if role in ["user", "function"]: | ||
with st.chat_message("user"): | ||
st.markdown(message["content"]) | ||
else: | ||
with st.chat_message("assistant"): | ||
st.markdown(postprocess_text(message["content"])) | ||
|
||
if prompt := st.chat_input("What is up?"): | ||
st.session_state.messages.append({"role": "user", "content": prompt}) | ||
with st.chat_message("user"): | ||
st.markdown(prompt) | ||
|
||
with st.chat_message("assistant"): | ||
message_placeholder = st.empty() | ||
chat_once(message_placeholder) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
Oops, something went wrong.