From 141b747653800b7344d06ecc4a66d5d9f3bd8af7 Mon Sep 17 00:00:00 2001 From: wurui Date: Wed, 11 Sep 2024 10:33:01 +0800 Subject: [PATCH] feat(llm-bridge): Azure OpenAI Provider returns the token usage when requested with include_usage (#906) as described in: - https://github.com/Azure/azure-sdk-for-net/issues/44237 Azure OpenAI has added support for this feature. --- pkg/bridge/ai/service.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkg/bridge/ai/service.go b/pkg/bridge/ai/service.go index 7aaac147d..4cef9e8ab 100644 --- a/pkg/bridge/ai/service.go +++ b/pkg/bridge/ai/service.go @@ -291,10 +291,10 @@ func (srv *Service) GetChatCompletions(ctx context.Context, req openai.ChatCompl completionUsage = streamRes.Usage.CompletionTokens totalUsage = streamRes.Usage.TotalTokens } - if len(streamRes.Choices) == 0 { - continue - } - if tc := streamRes.Choices[0].Delta.ToolCalls; len(tc) > 0 { + + choices := streamRes.Choices + if len(choices) > 0 && len(choices[0].Delta.ToolCalls) > 0 { + tc := choices[0].Delta.ToolCalls isFunctionCall = true if j == 0 { firstCallSpan.End() @@ -321,7 +321,7 @@ func (srv *Service) GetChatCompletions(ctx context.Context, req openai.ChatCompl toolCallsMap[index] = item } j++ - } else if streamRes.Choices[0].FinishReason != openai.FinishReasonToolCalls { + } else if !isFunctionCall { _ = writeStreamEvent(w, flusher, streamRes) } if i == 0 && j == 0 && !isFunctionCall {