From 760ff1840ce3b9c71f2ff36f3a2bb78894946f10 Mon Sep 17 00:00:00 2001 From: binary-husky Date: Fri, 15 Sep 2023 17:08:23 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E4=B8=80=E4=B8=AA=E5=BE=AA?= =?UTF-8?q?=E7=8E=AF=E7=9A=84Bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- request_llm/bridge_chatgpt.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index 52290d6e76..a1b6ba47d3 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -72,6 +72,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", stream_response = response.iter_lines() result = '' + json_data = None while True: try: chunk = next(stream_response).decode() except StopIteration: @@ -90,20 +91,21 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", delta = json_data["delta"] if len(delta) == 0: break if "role" in delta: continue - if "content" in delta: + if "content" in delta: result += delta["content"] if not console_slience: print(delta["content"], end='') if observe_window is not None: # 观测窗,把已经获取的数据显示出去 - if len(observe_window) >= 1: observe_window[0] += delta["content"] + if len(observe_window) >= 1: + observe_window[0] += delta["content"] # 看门狗,如果超过期限没有喂狗,则终止 - if len(observe_window) >= 2: + if len(observe_window) >= 2: if (time.time()-observe_window[1]) > watch_dog_patience: raise RuntimeError("用户取消了程序。") else: raise RuntimeError("意外Json结构:"+delta) - if json_data['finish_reason'] == 'content_filter': + if json_data and json_data['finish_reason'] == 'content_filter': raise RuntimeError("由于提问含不合规内容被Azure过滤。") - if json_data['finish_reason'] == 'length': + if json_data and json_data['finish_reason'] == 'length': raise ConnectionAbortedError("正常结束,但显示Token不足,导致输出不完整,请削减单次输入的文本量。") return result