Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add more functions #364

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
236 changes: 176 additions & 60 deletions model/bing/new_bing_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,23 @@
from EdgeGPT import Chatbot, ConversationStyle
from ImageGen import ImageGen
from common import functions
from model.bing.jailbroken_sydney import SydneyBot
import random
import json

user_session = dict()
user_chat_history = dict()
suggestion_session = dict()
# newBing对话模型逆向网页gitAPI


class BingModel(Model):

style = ConversationStyle.creative
bot: Chatbot = None
cookies: list = None
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

cookies->cookies_list

cookies_list = None

def __init__(self):
try:
self.cookies = model_conf_val("bing", "cookies")
self.jailbreak = model_conf_val("bing", "jailbreak")
self.bot = SydneyBot(cookies=self.cookies, options={}) if (
self.jailbreak) else Chatbot(cookies=self.cookies)
except Exception as e:
log.warn(e)

Expand All @@ -34,79 +32,155 @@ async def handle_answer(final, answer):
try:
reply = self.build_source_attributions(answer, context)
log.info("[NewBing] reply:{}", reply)
await bot.close()
yield True, reply
except Exception as e:
log.warn(answer)
log.warn(e)
await user_session.get(context['from_user_id'], None).reset()
await user_chat_history.get(context['from_user_id'], None).reset()
yield True, answer
else:
try:
yield False, answer
except Exception as e:
log.warn(answer)
log.warn(e)
await user_session.get(context['from_user_id'], None).reset()
await user_chat_history.get(context['from_user_id'], None).reset()
yield True, answer

if not context or not context.get('type') or context.get('type') == 'TEXT':
clear_memory_commands = common_conf_val(
'clear_memory_commands', ['#清除记忆'])
if query in clear_memory_commands:
user_session[context['from_user_id']] = None
user_chat_history[context['from_user_id']] = None
yield True, '记忆已清除'
bot = user_session.get(context['from_user_id'], None)
if not bot:
bot = self.bot

chat_style = ""
chat_history = ""
if user_chat_history.get(context['from_user_id'], None) == None:
if (self.jailbreak):
chars = model_conf_val("bing", "jailbreak_prompt")
chars = chars + "\n\n"
chat_history = ''.join(chars)
user_chat_history[context['from_user_id']] = ['creative', chat_history]
else:
query = self.get_quick_ask_query(query, context)
user_session[context['from_user_id']] = bot
if not chat_history.endswith("\n\n"):
if chat_history.endswith("\n"):
chat_history += "\n"
else:
chat_history += "\n\n"
chat_style = user_chat_history[context['from_user_id']][0]
chat_history = user_chat_history[context['from_user_id']][1]

query = self.get_quick_ask_query(query, context)
bot = await Chatbot.create(cookies=self.cookies)
user_chat_history[context['from_user_id']][1] += f"[user](#message)\n{query}\n\n"
log.info("[NewBing] query={}".format(query))
if self.jailbreak:
async for final, answer in bot.ask_stream(query, conversation_style=self.style, message_id=bot.user_message_id):
async for result in handle_answer(final, answer):
yield result
else:
async for final, answer in bot.ask_stream(query, conversation_style=self.style):
async for result in handle_answer(final, answer):
yield result

def reply(self, query: str, context=None) -> tuple[str, dict]:
if not context or not context.get('type') or context.get('type') == 'TEXT':
clear_memory_commands = common_conf_val(
'clear_memory_commands', ['#清除记忆'])
if query in clear_memory_commands:
user_session[context['from_user_id']] = None
return '记忆已清除'
bot = user_session.get(context['from_user_id'], None)
if (bot == None):
bot = self.bot
else:
query = self.get_quick_ask_query(query, context)
async for final, answer in bot.ask_stream(prompt=query, raw=True, webpage_context=chat_history, conversation_style=chat_style, search_result=True):
async for result in handle_answer(final, answer):
yield result

user_session[context['from_user_id']] = bot
log.info("[NewBing] query={}".format(query))
if (self.jailbreak):
task = bot.ask(query, conversation_style=self.style,
message_id=bot.user_message_id)
else:
task = bot.ask(query, conversation_style=self.style)

answer = asyncio.run(task)
if isinstance(answer, str):
return answer
try:
reply = answer["item"]["messages"][-1]
except Exception as e:
user_session.get(context['from_user_id'], None).reset()
log.warn(answer)
return "本轮对话已超时,已开启新的一轮对话,请重新提问。"
return self.build_source_attributions(answer, context)
def reply(self, query: str, context=None):
if not context or not context.get('type') or context.get('type') == 'TEXT':
return asyncio.run(self.__reply(query, context))
elif context.get('type', None) == 'IMAGE_CREATE':
if functions.contain_chinese(query):
return "ImageGen目前仅支持使用英文关键词生成图片"
return self.create_img(query)

async def __reply(self, query: str, context):
clear_memory_commands = common_conf_val(
'clear_memory_commands', ['#清除记忆'])
if query in clear_memory_commands:
user_chat_history[context['from_user_id']] = None
return '记忆已清除'

# deal chat_history
chat_style = ""
chat_history = ""
if user_chat_history.get(context['from_user_id'], None) == None:
if (self.jailbreak):
chars = model_conf_val("bing", "jailbreak_prompt")
chars = chars + "\n\n"
chat_history = ''.join(chars)
user_chat_history[context['from_user_id']] = ['creative', chat_history]
else:
if not chat_history.endswith("\n\n"):
if chat_history.endswith("\n"):
chat_history += "\n"
else:
chat_history += "\n\n"
chat_style = user_chat_history[context['from_user_id']][0]
chat_history = user_chat_history[context['from_user_id']][1]

query = self.get_quick_ask_query(query, context)
if query == "输入的序号不在建议列表范围中":
return "对不起,您输入的序号不在建议列表范围中(数字1-9均会被认为是建议列表),请重新输入。"
if "[style]已切换至" in query:
return query

log.info("[NewBing] query={}".format(query))
try:
bot = await Chatbot.create(cookies=self.cookies)
except Exception as e:
log.info(e)
return "RemoteProtocolError: Bing Server disconnected without sending a response."
reply_text = ""
reference = ""
suggestion = ""
async def stream_output():
nonlocal chat_history
nonlocal chat_style
nonlocal reply_text
nonlocal reference
nonlocal suggestion
chat_history += f"[user](#message)\n{query}\n\n"
wrote = 0
async for final, response in bot.ask_stream(
prompt=query,
raw=True,
webpage_context=chat_history,
conversation_style=chat_style,
search_result=True
):
if not final and response["type"] == 1 and "messages" in response["arguments"][0]:
message = response["arguments"][0]["messages"][0]
if message.get("messageType") == "InternalSearchQuery":
#chat_history += f"[assistant](#search_query)\n{message['hiddenText']}\n\n"
pass
elif message.get("messageType") == "InternalSearchResult":
#chat_history += f"[assistant](#search_results)\n{message['hiddenText']}\n\n"
reference += f"[assistant](#search_results)\n{message['hiddenText']}"
elif message.get("messageType") == None:
if "cursor" in response["arguments"][0]:
chat_history += "[assistant](#message)\n"
wrote = 0
if message.get("contentOrigin") == "Apology":
log.info("检测到AI生成内容被撤回...已阻止")
break
else:
chat_history += message["text"][wrote:]
reply_text += message["text"][wrote:]
wrote = len(message["text"])
if "suggestedResponses" in message:
suggestion = list(map(lambda x: x["text"], message["suggestedResponses"]))
chat_history += f"""\n[assistant](#suggestions)
```json
{{"suggestedUserResponses": {suggestion}}}
```\n\n"""
break
if final and not response["item"]["messages"][-1].get("text"):
raise Exception("发送的消息被过滤或者对话超时")

try:
await stream_output()
except Exception as e:
log.info(e)

# 更新历史对话
user_chat_history[context['from_user_id']][1] = chat_history
await bot.close()
return self.build_source_text(reply_text, reference, suggestion, context)

def create_img(self, query):
try:
log.info("[NewBing] image_query={}".format(query))
Expand All @@ -123,17 +197,30 @@ def get_quick_ask_query(self, query, context):
if (len(query) == 1 and query.isdigit() and query != "0"):
suggestion_dict = suggestion_session[context['from_user_id']]
if (suggestion_dict != None):
query = suggestion_dict[int(query)-1]
if (query == None):
try:
query = suggestion_dict[int(query)-1]
if (query == None):
return "输入的序号不在建议列表范围中"
else:
query = "在上面的基础上,"+query
except:
return "输入的序号不在建议列表范围中"
else:
query = "在上面的基础上,"+query
elif(query == "/creative"):
user_chat_history[context['from_user_id']][0] = query[1:]
return "[style]已切换至创造模式"
elif(query == "/balanced"):
user_chat_history[context['from_user_id']][0] = query[1:]
return "[style]已切换至平衡模式"
elif(query == "/precise"):
user_chat_history[context['from_user_id']][0] = query[1:]
return "[style]已切换至精确模式"
return query

def build_source_attributions(self, answer, context):
reference = ""
reply = answer["item"]["messages"][-1]
reply_text = reply["text"]
user_chat_history[context['from_user_id']][1] += f"[assistant](#message)\n{reply_text}\n"
if "sourceAttributions" in reply:
for i, attribution in enumerate(reply["sourceAttributions"]):
display_name = attribution["providerDisplayName"]
Expand All @@ -159,7 +246,7 @@ def build_source_attributions(self, answer, context):
throttling_str = ""

if throttling["numUserMessagesInConversation"] == throttling["maxNumUserMessagesInConversation"]:
user_session.get(context['from_user_id'], None).reset()
user_chat_history.get(context['from_user_id'], None).reset()
throttling_str = "(对话轮次已达上限,本次聊天已结束,将开启新的对话)"
else:
throttling_str = f"对话轮次: {throttling['numUserMessagesInConversation']}/{throttling['maxNumUserMessagesInConversation']}\n"
Expand All @@ -168,6 +255,35 @@ def build_source_attributions(self, answer, context):
log.info("[NewBing] reply={}", response)
return response
else:
user_session.get(context['from_user_id'], None).reset()
user_chat_history.get(context['from_user_id'], None).reset()
log.warn("[NewBing] reply={}", answer)
return "对话被接口拒绝,已开启新的一轮对话。"

def build_source_text(self, reply_text, reference, suggestion, context):
if not reply_text.endswith("\n\n"):
if reply_text.endswith("\n"):
reply_text += "\n"
else:
reply_text += "\n\n"

references = ""
if 'json' in reference and reference[29] != 'W':
reference_dict = json.loads(reference[37:-4])
for i in range(len(reference_dict['web_search_results'])):
r = reference_dict['web_search_results'][i]
title = r['title']
url = r['url']
references += f"{i+1}、[{title}]({url})\n\n"

suggestions = ""
suggestion_dict = dict()
if len(suggestion) > 0:
for i in range(len(suggestion)):
suggestion_dict[i] = suggestion[i]
suggestions += f">{i+1}、{suggestion[i]}\n\n"
suggestions = "=====\n💡你可能想问(输入序号):\n\n" + suggestions
suggestion_session[context['from_user_id']] = suggestion_dict

response = f"{reply_text}******\n{references}{suggestions}******\n"
log.info("[NewBing] reply={}", response)
return response
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ flask
flask_socketio
itchat-uos==1.5.0.dev0
openai
EdgeGPT
EdgeGPT==0.3.5
requests
discord.py>=2.0.0
wechatpy
Expand Down