From af2888d3947654668ff01f9c6606fcfa0d0f6381 Mon Sep 17 00:00:00 2001 From: -LAN- Date: Mon, 23 Dec 2024 17:53:57 +0800 Subject: [PATCH] fix: remove json_schema if response format is disabled. (#12014) Signed-off-by: -LAN- --- api/core/model_runtime/model_providers/openai/llm/llm.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/api/core/model_runtime/model_providers/openai/llm/llm.py b/api/core/model_runtime/model_providers/openai/llm/llm.py index b73ce8752f13f3..73cd7e3c341881 100644 --- a/api/core/model_runtime/model_providers/openai/llm/llm.py +++ b/api/core/model_runtime/model_providers/openai/llm/llm.py @@ -421,7 +421,11 @@ def _generate( # text completion model response = client.completions.create( - prompt=prompt_messages[0].content, model=model, stream=stream, **model_parameters, **extra_model_kwargs + prompt=prompt_messages[0].content, + model=model, + stream=stream, + **model_parameters, + **extra_model_kwargs, ) if stream: @@ -593,6 +597,8 @@ def _chat_generate( model_parameters["response_format"] = {"type": "json_schema", "json_schema": schema} else: model_parameters["response_format"] = {"type": response_format} + elif "json_schema" in model_parameters: + del model_parameters["json_schema"] extra_model_kwargs = {}