From 059d8fea2b4726dc67c7bc66cda2e6ae69a64045 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=B8=80=E5=B8=86?= Date: Wed, 22 Nov 2023 15:28:03 +0800 Subject: [PATCH 1/2] bugfix: fix index out of range error due to Azure Openai reponses an empty chunk at first --- pilot/model/proxy/llms/chatgpt.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/pilot/model/proxy/llms/chatgpt.py b/pilot/model/proxy/llms/chatgpt.py index 1da815bfa..92727689d 100644 --- a/pilot/model/proxy/llms/chatgpt.py +++ b/pilot/model/proxy/llms/chatgpt.py @@ -172,6 +172,11 @@ def chatgpt_generate_stream( res = client.chat.completions.create(messages=history, **payloads) text = "" for r in res: + # logger.info(str(r)) + # Azure Openai reponse may have empty choices body in the first chunk + # to avoid index out of range error + if not r.get('choices'): + continue if r.choices[0].delta.content is not None: content = r.choices[0].delta.content text += content @@ -186,6 +191,8 @@ def chatgpt_generate_stream( text = "" for r in res: + if not r.get('choices'): + continue if r["choices"][0]["delta"].get("content") is not None: content = r["choices"][0]["delta"]["content"] text += content @@ -220,6 +227,8 @@ async def async_chatgpt_generate_stream( res = await client.chat.completions.create(messages=history, **payloads) text = "" for r in res: + if not r.get('choices'): + continue if r.choices[0].delta.content is not None: content = r.choices[0].delta.content text += content @@ -233,6 +242,8 @@ async def async_chatgpt_generate_stream( text = "" async for r in res: + if not r.get('choices'): + continue if r["choices"][0]["delta"].get("content") is not None: content = r["choices"][0]["delta"]["content"] text += content From 7498613c04da8095ff31be3b1000b9c6a4e4d7ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=B8=80=E5=B8=86?= Date: Tue, 28 Nov 2023 11:38:54 +0800 Subject: [PATCH 2/2] use black to reformat code --- pilot/model/proxy/llms/chatgpt.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pilot/model/proxy/llms/chatgpt.py b/pilot/model/proxy/llms/chatgpt.py index 92727689d..d0adaf606 100644 --- a/pilot/model/proxy/llms/chatgpt.py +++ b/pilot/model/proxy/llms/chatgpt.py @@ -175,7 +175,7 @@ def chatgpt_generate_stream( # logger.info(str(r)) # Azure Openai reponse may have empty choices body in the first chunk # to avoid index out of range error - if not r.get('choices'): + if not r.get("choices"): continue if r.choices[0].delta.content is not None: content = r.choices[0].delta.content @@ -191,7 +191,7 @@ def chatgpt_generate_stream( text = "" for r in res: - if not r.get('choices'): + if not r.get("choices"): continue if r["choices"][0]["delta"].get("content") is not None: content = r["choices"][0]["delta"]["content"] @@ -227,7 +227,7 @@ async def async_chatgpt_generate_stream( res = await client.chat.completions.create(messages=history, **payloads) text = "" for r in res: - if not r.get('choices'): + if not r.get("choices"): continue if r.choices[0].delta.content is not None: content = r.choices[0].delta.content @@ -242,7 +242,7 @@ async def async_chatgpt_generate_stream( text = "" async for r in res: - if not r.get('choices'): + if not r.get("choices"): continue if r["choices"][0]["delta"].get("content") is not None: content = r["choices"][0]["delta"]["content"]