From 705c2b44b5b710e5b15e7b0bab9fe929e22e8f2e Mon Sep 17 00:00:00 2001 From: Stephen Toub Date: Tue, 22 Oct 2024 16:03:05 -0400 Subject: [PATCH] Update M.E.AI.AzureAIInference for its beta2 release - Adapt to breaking changes - Temporarily work around lack of Index on streaming updates - Add streaming usage support - Add an embedding generator --- eng/packages/General.props | 2 +- .../Embeddings/Embedding.cs | 2 + .../AzureAIChatToolJson.cs | 25 +++ .../AzureAIInferenceChatClient.cs | 152 +++++++-------- .../AzureAIInferenceEmbeddingGenerator.cs | 178 ++++++++++++++++++ .../AzureAIInferenceExtensions.cs | 12 +- .../JsonContext.cs | 70 +++++++ ...reAIInferenceChatClientIntegrationTests.cs | 5 - .../AzureAIInferenceChatClientTests.cs | 15 +- ...renceEmbeddingGeneratorIntegrationTests.cs | 13 ++ ...AzureAIInferenceEmbeddingGeneratorTests.cs | 135 +++++++++++++ .../IntegrationTestHelpers.cs | 31 +-- .../ChatClientIntegrationTests.cs | 8 +- 13 files changed, 531 insertions(+), 117 deletions(-) create mode 100644 src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIChatToolJson.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs create mode 100644 src/Libraries/Microsoft.Extensions.AI.AzureAIInference/JsonContext.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorIntegrationTests.cs create mode 100644 test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorTests.cs diff --git a/eng/packages/General.props b/eng/packages/General.props index fbefcb50550..9c54a2351ab 100644 --- a/eng/packages/General.props +++ b/eng/packages/General.props @@ -1,7 +1,7 @@ - + diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/Embedding.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/Embedding.cs index e70469eaed3..19b8feaa182 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/Embedding.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/Embedding.cs @@ -14,6 +14,8 @@ namespace Microsoft.Extensions.AI; #endif [JsonDerivedType(typeof(Embedding), typeDiscriminator: "floats")] [JsonDerivedType(typeof(Embedding), typeDiscriminator: "doubles")] +[JsonDerivedType(typeof(Embedding), typeDiscriminator: "bytes")] +[JsonDerivedType(typeof(Embedding), typeDiscriminator: "sbytes")] public class Embedding { /// Initializes a new instance of the class. diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIChatToolJson.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIChatToolJson.cs new file mode 100644 index 00000000000..77e675c0830 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIChatToolJson.cs @@ -0,0 +1,25 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.Extensions.AI; + +/// Used to create the JSON payload for an AzureAI chat tool description. +internal sealed class AzureAIChatToolJson +{ + /// Gets a singleton JSON data for empty parameters. Optimization for the reasonably common case of a parameterless function. + public static BinaryData ZeroFunctionParametersSchema { get; } = new("""{"type":"object","required":[],"properties":{}}"""u8.ToArray()); + + [JsonPropertyName("type")] + public string Type { get; set; } = "object"; + + [JsonPropertyName("required")] + public List Required { get; set; } = []; + + [JsonPropertyName("properties")] + public Dictionary Properties { get; set; } = []; +} diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs index 263830b5ba3..ecc41140b27 100644 --- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs @@ -7,7 +7,6 @@ using System.Runtime.CompilerServices; using System.Text; using System.Text.Json; -using System.Text.Json.Serialization; using System.Threading; using System.Threading.Tasks; using Azure.AI.Inference; @@ -20,8 +19,9 @@ namespace Microsoft.Extensions.AI; /// An for an Azure AI Inference . -public sealed partial class AzureAIInferenceChatClient : IChatClient +public sealed class AzureAIInferenceChatClient : IChatClient { + /// A default schema to use when a parameter lacks a pre-defined schema. private static readonly JsonElement _defaultParameterSchema = JsonDocument.Parse("{}").RootElement; /// The underlying . @@ -77,43 +77,33 @@ public async Task CompleteAsync( List returnMessages = []; // Populate its content from those in the response content. - ChatFinishReason? finishReason = null; - foreach (var choice in response.Choices) + ChatMessage message = new() { - ChatMessage returnMessage = new() - { - RawRepresentation = choice, - Role = ToChatRole(choice.Message.Role), - AdditionalProperties = new() { [nameof(choice.Index)] = choice.Index }, - }; + RawRepresentation = response, + Role = ToChatRole(response.Role), + }; - finishReason ??= ToFinishReason(choice.FinishReason); + if (response.Content is string content) + { + message.Text = content; + } - if (choice.Message.ToolCalls is { Count: > 0 } toolCalls) + if (response.ToolCalls is { Count: > 0 } toolCalls) + { + foreach (var toolCall in toolCalls) { - foreach (var toolCall in toolCalls) + if (toolCall is ChatCompletionsToolCall ftc && !string.IsNullOrWhiteSpace(ftc.Name)) { - if (toolCall is ChatCompletionsFunctionToolCall ftc && !string.IsNullOrWhiteSpace(ftc.Name)) - { - FunctionCallContent callContent = ParseCallContentFromJsonString(ftc.Arguments, toolCall.Id, ftc.Name); - callContent.RawRepresentation = toolCall; + FunctionCallContent callContent = ParseCallContentFromJsonString(ftc.Arguments, toolCall.Id, ftc.Name); + callContent.RawRepresentation = toolCall; - returnMessage.Contents.Add(callContent); - } + message.Contents.Add(callContent); } } - - if (!string.IsNullOrEmpty(choice.Message.Content)) - { - returnMessage.Contents.Add(new TextContent(choice.Message.Content) - { - RawRepresentation = choice.Message - }); - } - - returnMessages.Add(returnMessage); } + returnMessages.Add(message); + UsageDetails? usage = null; if (response.Usage is CompletionsUsage completionsUsage) { @@ -128,11 +118,11 @@ public async Task CompleteAsync( // Wrap the content in a ChatCompletion to return. return new ChatCompletion(returnMessages) { - RawRepresentation = response, CompletionId = response.Id, CreatedAt = response.Created, ModelId = response.Model, - FinishReason = finishReason, + FinishReason = ToFinishReason(response.FinishReason), + RawRepresentation = response, Usage = usage, }; } @@ -143,13 +133,13 @@ public async IAsyncEnumerable CompleteStreamingAs { _ = Throw.IfNull(chatMessages); - Dictionary? functionCallInfos = null; + Dictionary? functionCallInfos = null; ChatRole? streamedRole = default; ChatFinishReason? finishReason = default; string? completionId = null; DateTimeOffset? createdAt = null; string? modelId = null; - string? authorName = null; + string lastCallId = string.Empty; // Process each update as it arrives var updates = await _chatCompletionsClient.CompleteStreamingAsync(ToAzureAIOptions(chatMessages, options), cancellationToken).ConfigureAwait(false); @@ -161,12 +151,10 @@ public async IAsyncEnumerable CompleteStreamingAs completionId ??= chatCompletionUpdate.Id; createdAt ??= chatCompletionUpdate.Created; modelId ??= chatCompletionUpdate.Model; - authorName ??= chatCompletionUpdate.AuthorName; // Create the response content object. StreamingChatCompletionUpdate completionUpdate = new() { - AuthorName = authorName, CompletionId = chatCompletionUpdate.Id, CreatedAt = chatCompletionUpdate.Created, FinishReason = finishReason, @@ -182,34 +170,52 @@ public async IAsyncEnumerable CompleteStreamingAs } // Transfer over tool call updates. - if (chatCompletionUpdate.ToolCallUpdate is StreamingFunctionToolCallUpdate toolCallUpdate) + if (chatCompletionUpdate.ToolCallUpdate is { } toolCallUpdate) { + // TODO https://github.com/Azure/azure-sdk-for-net/issues/46830: Azure.AI.Inference + // has removed the Index property from ToolCallUpdate. It's now impossible via the + // exposed APIs to correctly handle multiple parallel tool calls, as the CallId is + // often null for anything other than the first update for a given call, and Index + // isn't available to correlate which updates are for which call. This is a temporary + // workaround to at least make a single tool call work and also make work multiple + // tool calls when their updates aren't interleaved. + if (toolCallUpdate.Id is not null) + { + lastCallId = toolCallUpdate.Id; + } + functionCallInfos ??= []; - if (!functionCallInfos.TryGetValue(toolCallUpdate.ToolCallIndex, out FunctionCallInfo? existing)) + if (!functionCallInfos.TryGetValue(lastCallId, out FunctionCallInfo? existing)) { - functionCallInfos[toolCallUpdate.ToolCallIndex] = existing = new(); + functionCallInfos[lastCallId] = existing = new(); } - existing.CallId ??= toolCallUpdate.Id; - existing.Name ??= toolCallUpdate.Name; - if (toolCallUpdate.ArgumentsUpdate is not null) + existing.Name ??= toolCallUpdate.Function.Name; + if (toolCallUpdate.Function.Arguments is { } arguments) { - _ = (existing.Arguments ??= new()).Append(toolCallUpdate.ArgumentsUpdate); + _ = (existing.Arguments ??= new()).Append(arguments); } } + if (chatCompletionUpdate.Usage is { } usage) + { + completionUpdate.Contents.Add(new UsageContent(new() + { + InputTokenCount = usage.PromptTokens, + OutputTokenCount = usage.CompletionTokens, + TotalTokenCount = usage.TotalTokens, + })); + } + // Now yield the item. yield return completionUpdate; } - // TODO: Add usage as content when it's exposed by Azure.AI.Inference. - // Now that we've received all updates, combine any for function calls into a single item to yield. if (functionCallInfos is not null) { var completionUpdate = new StreamingChatCompletionUpdate { - AuthorName = authorName, CompletionId = completionId, CreatedAt = createdAt, FinishReason = finishReason, @@ -224,7 +230,7 @@ public async IAsyncEnumerable CompleteStreamingAs { FunctionCallContent callContent = ParseCallContentFromJsonString( fci.Arguments?.ToString() ?? string.Empty, - fci.CallId!, + entry.Key, fci.Name!); completionUpdate.Contents.Add(callContent); } @@ -243,7 +249,6 @@ void IDisposable.Dispose() /// POCO representing function calling info. Used to concatenation information for a single function call from across multiple streaming updates. private sealed class FunctionCallInfo { - public string? CallId; public string? Name; public StringBuilder? Arguments; } @@ -292,7 +297,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, // These properties are strongly-typed on ChatOptions but not on ChatCompletionsOptions. if (options.TopK is int topK) { - result.AdditionalProperties["top_k"] = BinaryData.FromObjectAsJson(topK, JsonContext.Default.Options); + result.AdditionalProperties["top_k"] = new BinaryData(JsonSerializer.SerializeToUtf8Bytes(topK, JsonContext.Default.Int32)); } if (options.AdditionalProperties is { } props) @@ -310,7 +315,8 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, default: if (prop.Value is not null) { - result.AdditionalProperties[prop.Key] = BinaryData.FromObjectAsJson(prop.Value, ToolCallJsonSerializerOptions); + byte[] data = JsonSerializer.SerializeToUtf8Bytes(prop.Value, JsonContext.GetTypeInfo(prop.Value.GetType(), ToolCallJsonSerializerOptions)); + result.AdditionalProperties[prop.Key] = new BinaryData(data); } break; @@ -356,7 +362,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, } /// Converts an Extensions function to an AzureAI chat tool. - private static ChatCompletionsFunctionToolDefinition ToAzureAIChatTool(AIFunction aiFunction) + private static ChatCompletionsToolDefinition ToAzureAIChatTool(AIFunction aiFunction) { BinaryData resultParameters = AzureAIChatToolJson.ZeroFunctionParametersSchema; @@ -381,28 +387,11 @@ private static ChatCompletionsFunctionToolDefinition ToAzureAIChatTool(AIFunctio JsonSerializer.SerializeToUtf8Bytes(tool, JsonContext.Default.AzureAIChatToolJson)); } - return new() + return new(new FunctionDefinition(aiFunction.Metadata.Name) { - Name = aiFunction.Metadata.Name, Description = aiFunction.Metadata.Description, Parameters = resultParameters, - }; - } - - /// Used to create the JSON payload for an AzureAI chat tool description. - private sealed class AzureAIChatToolJson - { - /// Gets a singleton JSON data for empty parameters. Optimization for the reasonably common case of a parameterless function. - public static BinaryData ZeroFunctionParametersSchema { get; } = new("""{"type":"object","required":[],"properties":{}}"""u8.ToArray()); - - [JsonPropertyName("type")] - public string Type { get; set; } = "object"; - - [JsonPropertyName("required")] - public List Required { get; set; } = []; - - [JsonPropertyName("properties")] - public Dictionary Properties { get; set; } = []; + }); } /// Converts an Extensions chat message enumerable to an AzureAI chat message enumerable. @@ -426,10 +415,9 @@ private IEnumerable ToAzureAIInferenceChatMessages(IEnumerab string? result = resultContent.Result as string; if (result is null && resultContent.Result is not null) { - JsonSerializerOptions options = ToolCallJsonSerializerOptions ?? JsonContext.Default.Options; try { - result = JsonSerializer.Serialize(resultContent.Result, options.GetTypeInfo(typeof(object))); + result = JsonSerializer.Serialize(resultContent.Result, JsonContext.GetTypeInfo(typeof(object), ToolCallJsonSerializerOptions)); } catch (NotSupportedException) { @@ -449,20 +437,17 @@ private IEnumerable ToAzureAIInferenceChatMessages(IEnumerab { // TODO: ChatRequestAssistantMessage only enables text content currently. // Update it with other content types when it supports that. - ChatRequestAssistantMessage message = new() - { - Content = input.Text - }; + ChatRequestAssistantMessage message = new(input.Text ?? string.Empty); foreach (var content in input.Contents) { if (content is FunctionCallContent { CallId: not null } callRequest) { - JsonSerializerOptions serializerOptions = ToolCallJsonSerializerOptions ?? JsonContext.Default.Options; - message.ToolCalls.Add(new ChatCompletionsFunctionToolCall( - callRequest.CallId, - callRequest.Name, - JsonSerializer.Serialize(callRequest.Arguments, serializerOptions.GetTypeInfo(typeof(IDictionary))))); + message.ToolCalls.Add(new ChatCompletionsToolCall( + callRequest.CallId, + new FunctionCall( + callRequest.Name, + JsonSerializer.Serialize(callRequest.Arguments, JsonContext.GetTypeInfo(typeof(IDictionary), ToolCallJsonSerializerOptions))))); } } @@ -504,11 +489,4 @@ private static List GetContentParts(IList con private static FunctionCallContent ParseCallContentFromJsonString(string json, string callId, string name) => FunctionCallContent.CreateFromParsedArguments(json, callId, name, argumentParser: static json => JsonSerializer.Deserialize(json, JsonContext.Default.IDictionaryStringObject)!); - - /// Source-generated JSON type information. - [JsonSerializable(typeof(AzureAIChatToolJson))] - [JsonSerializable(typeof(IDictionary))] - [JsonSerializable(typeof(JsonElement))] - [JsonSerializable(typeof(int))] - private sealed partial class JsonContext : JsonSerializerContext; } diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs new file mode 100644 index 00000000000..84198e6b2cc --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs @@ -0,0 +1,178 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Buffers; +using System.Buffers.Binary; +using System.Buffers.Text; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.Inference; +using Microsoft.Shared.Diagnostics; + +#pragma warning disable EA0002 // Use 'System.TimeProvider' to make the code easier to test +#pragma warning disable S3011 // Reflection should not be used to increase accessibility of classes, methods, or fields +#pragma warning disable S109 // Magic numbers should not be used + +namespace Microsoft.Extensions.AI; + +/// An for an Azure.AI.Inference . +public sealed class AzureAIInferenceEmbeddingGenerator : + IEmbeddingGenerator> +{ + /// The underlying . + private readonly EmbeddingsClient _embeddingsClient; + + /// The number of dimensions produced by the generator. + private readonly int? _dimensions; + + /// Initializes a new instance of the class. + /// The underlying client. + /// + /// The id of the model to use. This may also be overridden per request via . + /// Either this parameter or must provide a valid model id. + /// + /// The number of dimensions to generate in each embedding. + public AzureAIInferenceEmbeddingGenerator( + EmbeddingsClient embeddingsClient, string? modelId = null, int? dimensions = null) + { + _ = Throw.IfNull(embeddingsClient); + + if (modelId is not null) + { + _ = Throw.IfNullOrWhitespace(modelId); + } + + if (dimensions is < 1) + { + Throw.ArgumentOutOfRangeException(nameof(dimensions), "Value must be greater than 0."); + } + + _embeddingsClient = embeddingsClient; + _dimensions = dimensions; + + // https://github.com/Azure/azure-sdk-for-net/issues/46278 + // The endpoint isn't currently exposed, so use reflection to get at it, temporarily. Once packages + // implement the abstractions directly rather than providing adapters on top of the public APIs, + // the package can provide such implementations separate from what's exposed in the public API. + var providerUrl = typeof(EmbeddingsClient).GetField("_endpoint", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.GetValue(embeddingsClient) as Uri; + + Metadata = new("az.ai.inference", providerUrl, modelId, dimensions); + } + + /// + public EmbeddingGeneratorMetadata Metadata { get; } + + /// + public TService? GetService(object? key = null) + where TService : class => + typeof(TService) == typeof(EmbeddingsClient) ? (TService)(object)_embeddingsClient : + this as TService; + + /// + public async Task>> GenerateAsync( + IEnumerable values, EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default) + { + var azureAIOptions = ToAzureAIOptions(values, options, EmbeddingEncodingFormat.Base64); + + var embeddings = (await _embeddingsClient.EmbedAsync(azureAIOptions, cancellationToken).ConfigureAwait(false)).Value; + + GeneratedEmbeddings> result = new(embeddings.Data.Select(e => + new Embedding(ParseBase64Floats(e.Embedding)) + { + CreatedAt = DateTimeOffset.UtcNow, + ModelId = embeddings.Model ?? azureAIOptions.Model, + })); + + if (embeddings.Usage is not null) + { + result.Usage = new() + { + InputTokenCount = embeddings.Usage.PromptTokens, + TotalTokenCount = embeddings.Usage.TotalTokens + }; + } + + return result; + } + + /// + void IDisposable.Dispose() + { + // Nothing to dispose. Implementation required for the IEmbeddingGenerator interface. + } + + private static float[] ParseBase64Floats(BinaryData binaryData) + { + ReadOnlySpan base64 = binaryData.ToMemory().Span; + + // Remove quotes around base64 string. + if (base64.Length < 2 || base64[0] != (byte)'"' || base64[base64.Length - 1] != (byte)'"') + { + ThrowInvalidData(); + } + + base64 = base64.Slice(1, base64.Length - 2); + + // Decode base64 string to bytes. + byte[] bytes = ArrayPool.Shared.Rent(Base64.GetMaxDecodedFromUtf8Length(base64.Length)); + OperationStatus status = Base64.DecodeFromUtf8(base64, bytes.AsSpan(), out int bytesConsumed, out int bytesWritten); + if (status != OperationStatus.Done || bytesWritten % sizeof(float) != 0) + { + ThrowInvalidData(); + } + + // Interpret bytes as floats + float[] vector = new float[bytesWritten / sizeof(float)]; + bytes.AsSpan(0, bytesWritten).CopyTo(MemoryMarshal.AsBytes(vector.AsSpan())); + if (!BitConverter.IsLittleEndian) + { + Span ints = MemoryMarshal.Cast(vector.AsSpan()); +#if NET + BinaryPrimitives.ReverseEndianness(ints, ints); +#else + for (int i = 0; i < ints.Length; i++) + { + ints[i] = BinaryPrimitives.ReverseEndianness(ints[i]); + } +#endif + } + + ArrayPool.Shared.Return(bytes); + return vector; + + static void ThrowInvalidData() => + throw new FormatException("The input is not a valid Base64 string of encoded floats."); + } + + /// Converts an extensions options instance to an OpenAI options instance. + private EmbeddingsOptions ToAzureAIOptions(IEnumerable inputs, EmbeddingGenerationOptions? options, EmbeddingEncodingFormat format) + { + EmbeddingsOptions result = new(inputs) + { + Dimensions = _dimensions, + Model = options?.ModelId ?? Metadata.ModelId, + EncodingFormat = format, + }; + + if (options?.AdditionalProperties is { } props) + { + foreach (var prop in props) + { + if (prop.Value is not null) + { + byte[] data = JsonSerializer.SerializeToUtf8Bytes(prop.Value, JsonContext.GetTypeInfo(prop.Value.GetType(), null)); + result.AdditionalProperties[prop.Key] = new BinaryData(data); + } + } + } + + return result; + } +} diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceExtensions.cs index d8ba7616316..05a6c87b33b 100644 --- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceExtensions.cs @@ -12,6 +12,16 @@ public static class AzureAIInferenceExtensions /// The client. /// The id of the model to use. If null, it may be provided per request via . /// An that may be used to converse via the . - public static IChatClient AsChatClient(this ChatCompletionsClient chatCompletionsClient, string? modelId = null) => + public static IChatClient AsChatClient( + this ChatCompletionsClient chatCompletionsClient, string? modelId = null) => new AzureAIInferenceChatClient(chatCompletionsClient, modelId); + + /// Gets an for use with this . + /// The client. + /// The id of the model to use. If null, it may be provided per request via . + /// The number of dimensions to generate in each embedding. + /// An that may be used to generate embeddings via the . + public static IEmbeddingGenerator> AsEmbeddingGenerator( + this EmbeddingsClient embeddingsClient, string? modelId = null, int? dimensions = null) => + new AzureAIInferenceEmbeddingGenerator(embeddingsClient, modelId, dimensions); } diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/JsonContext.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/JsonContext.cs new file mode 100644 index 00000000000..5576cbf134a --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/JsonContext.cs @@ -0,0 +1,70 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; + +namespace Microsoft.Extensions.AI; + +/// Source-generated JSON type information. +[JsonSourceGenerationOptions(JsonSerializerDefaults.Web, + UseStringEnumConverter = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = true)] +[JsonSerializable(typeof(AzureAIChatToolJson))] +[JsonSerializable(typeof(IDictionary))] +[JsonSerializable(typeof(JsonElement))] +[JsonSerializable(typeof(int))] +[JsonSerializable(typeof(long))] +[JsonSerializable(typeof(float))] +[JsonSerializable(typeof(double))] +[JsonSerializable(typeof(bool))] +[JsonSerializable(typeof(float[]))] +[JsonSerializable(typeof(byte[]))] +[JsonSerializable(typeof(sbyte[]))] +internal sealed partial class JsonContext : JsonSerializerContext +{ + /// Gets the singleton used as the default in JSON serialization operations. + private static readonly JsonSerializerOptions _defaultToolJsonOptions = CreateDefaultToolJsonOptions(); + + /// Gets JSON type information for the specified type. + /// + /// This first tries to get the type information from , + /// falling back to if it can't. + /// + public static JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions? firstOptions) => + firstOptions?.TryGetTypeInfo(type, out JsonTypeInfo? info) is true ? + info : + _defaultToolJsonOptions.GetTypeInfo(type); + + /// Creates the default to use for serialization-related operations. + [UnconditionalSuppressMessage("AotAnalysis", "IL3050", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")] + [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")] + private static JsonSerializerOptions CreateDefaultToolJsonOptions() + { + // If reflection-based serialization is enabled by default, use it, as it's the most permissive in terms of what it can serialize, + // and we want to be flexible in terms of what can be put into the various collections in the object model. + // Otherwise, use the source-generated options to enable Native AOT. + + if (JsonSerializer.IsReflectionEnabledByDefault) + { + // Keep in sync with the JsonSourceGenerationOptions on JsonContext below. + JsonSerializerOptions options = new(JsonSerializerDefaults.Web) + { + TypeInfoResolver = new DefaultJsonTypeInfoResolver(), + Converters = { new JsonStringEnumConverter() }, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = true, + }; + + options.MakeReadOnly(); + return options; + } + + return Default.Options; + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientIntegrationTests.cs index 29aef62fd77..a42f1bd4ddf 100644 --- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientIntegrationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientIntegrationTests.cs @@ -2,8 +2,6 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; -using System.Threading.Tasks; -using Microsoft.TestUtilities; namespace Microsoft.Extensions.AI; @@ -12,7 +10,4 @@ public class AzureAIInferenceChatClientIntegrationTests : ChatClientIntegrationT protected override IChatClient? CreateChatClient() => IntegrationTestHelpers.GetChatCompletionsClient() ?.AsChatClient(Environment.GetEnvironmentVariable("AZURE_AI_INFERENCE_CHAT_MODEL") ?? "gpt-4o-mini"); - - public override Task CompleteStreamingAsync_UsageDataAvailable() => - throw new SkipTestException("Azure.AI.Inference library doesn't currently surface streaming usage data."); } diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs index 9a860014b8f..4fb5122cc93 100644 --- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs @@ -203,7 +203,7 @@ public async Task BasicRequestResponse_Streaming() Assert.Equal(createdAt, updates[i].CreatedAt); Assert.Equal("gpt-4o-mini-2024-07-18", updates[i].ModelId); Assert.Equal(ChatRole.Assistant, updates[i].Role); - Assert.Equal(i < 10 ? 1 : 0, updates[i].Contents.Count); + Assert.Equal(i is < 10 or 11 ? 1 : 0, updates[i].Contents.Count); Assert.Equal(i < 10 ? null : ChatFinishReason.Stop, updates[i].FinishReason); } } @@ -322,12 +322,13 @@ public async Task MultipleMessages_NonStreaming() } [Fact] - public async Task NullAssistantText_ContentSkipped_NonStreaming() + public async Task NullAssistantText_ContentEmpty_NonStreaming() { const string Input = """ { "messages": [ { + "content": "", "role": "assistant" }, { @@ -423,6 +424,7 @@ public async Task FunctionCallContent_NonStreaming() "model": "gpt-4o-mini", "tools": [ { + "type": "function", "function": { "name": "GetPersonAge", "description": "Gets the age of the specified person.", @@ -436,8 +438,7 @@ public async Task FunctionCallContent_NonStreaming() } } } - }, - "type": "function" + } } ], "tool_choice": "auto" @@ -534,6 +535,7 @@ public async Task FunctionCallContent_Streaming() "model": "gpt-4o-mini", "tools": [ { + "type": "function", "function": { "name": "GetPersonAge", "description": "Gets the age of the specified person.", @@ -547,8 +549,7 @@ public async Task FunctionCallContent_Streaming() } } } - }, - "type": "function" + } } ], "tool_choice": "auto" @@ -614,6 +615,6 @@ private static IChatClient CreateChatClient(HttpClient httpClient, string modelI new ChatCompletionsClient( new("http://somewhere"), new AzureKeyCredential("key"), - new ChatCompletionsClientOptions { Transport = new HttpClientTransport(httpClient) }) + new AzureAIInferenceClientOptions { Transport = new HttpClientTransport(httpClient) }) .AsChatClient(modelId); } diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorIntegrationTests.cs new file mode 100644 index 00000000000..637c1475747 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorIntegrationTests.cs @@ -0,0 +1,13 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; + +namespace Microsoft.Extensions.AI; + +public class AzureAIInferenceEmbeddingGeneratorIntegrationTests : EmbeddingGeneratorIntegrationTests +{ + protected override IEmbeddingGenerator>? CreateEmbeddingGenerator() => + IntegrationTestHelpers.GetEmbeddingsClient() + ?.AsEmbeddingGenerator(Environment.GetEnvironmentVariable("OPENAI_EMBEDDING_MODEL") ?? "text-embedding-3-small"); +} diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorTests.cs new file mode 100644 index 00000000000..abd5f609ed2 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorTests.cs @@ -0,0 +1,135 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Net.Http; +using System.Threading.Tasks; +using Azure; +using Azure.AI.Inference; +using Azure.Core.Pipeline; +using Microsoft.Extensions.Caching.Distributed; +using Microsoft.Extensions.Caching.Memory; +using Xunit; + +#pragma warning disable S103 // Lines should not be too long + +namespace Microsoft.Extensions.AI; + +public class AzureAIInferenceEmbeddingGeneratorTests +{ + [Fact] + public void Ctor_InvalidArgs_Throws() + { + Assert.Throws("embeddingsClient", () => new AzureAIInferenceEmbeddingGenerator(null!)); + + EmbeddingsClient client = new(new("http://somewhere"), new AzureKeyCredential("key")); + Assert.Throws("modelId", () => new AzureAIInferenceEmbeddingGenerator(client, "")); + Assert.Throws("modelId", () => new AzureAIInferenceEmbeddingGenerator(client, " ")); + + using var _ = new AzureAIInferenceEmbeddingGenerator(client); + } + + [Fact] + public void AsEmbeddingGenerator_InvalidArgs_Throws() + { + Assert.Throws("embeddingsClient", () => ((EmbeddingsClient)null!).AsEmbeddingGenerator()); + + EmbeddingsClient client = new(new("http://somewhere"), new AzureKeyCredential("key")); + Assert.Throws("modelId", () => client.AsEmbeddingGenerator(" ")); + + client.AsEmbeddingGenerator(null); + } + + [Fact] + public void AsEmbeddingGenerator_OpenAIClient_ProducesExpectedMetadata() + { + Uri endpoint = new("http://localhost/some/endpoint"); + string model = "amazingModel"; + + EmbeddingsClient client = new(endpoint, new AzureKeyCredential("key")); + + IEmbeddingGenerator> embeddingGenerator = client.AsEmbeddingGenerator(model); + Assert.Equal("az.ai.inference", embeddingGenerator.Metadata.ProviderName); + Assert.Equal(endpoint, embeddingGenerator.Metadata.ProviderUri); + Assert.Equal(model, embeddingGenerator.Metadata.ModelId); + } + + [Fact] + public void GetService_SuccessfullyReturnsUnderlyingClient() + { + var client = new EmbeddingsClient(new("http://somewhere"), new AzureKeyCredential("key")); + var embeddingGenerator = client.AsEmbeddingGenerator("model"); + + Assert.Same(embeddingGenerator, embeddingGenerator.GetService>>()); + Assert.Same(client, embeddingGenerator.GetService()); + + using IEmbeddingGenerator> pipeline = new EmbeddingGeneratorBuilder>() + .UseOpenTelemetry() + .UseDistributedCache(new MemoryDistributedCache(Options.Options.Create(new MemoryDistributedCacheOptions()))) + .Use(embeddingGenerator); + + Assert.NotNull(pipeline.GetService>>()); + Assert.NotNull(pipeline.GetService>>()); + Assert.NotNull(pipeline.GetService>>()); + + Assert.Same(client, pipeline.GetService()); + Assert.IsType>>(pipeline.GetService>>()); + } + + [Fact] + public async Task GenerateAsync_ExpectedRequestResponse() + { + const string Input = """ + {"input":["hello, world!","red, white, blue"],"encoding_format":"base64","model":"text-embedding-3-small"} + """; + + const string Output = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "qjH+vMcj07wP1+U7kbwjOv4cwLyL3iy9DkgpvCkBQD0bthW98o6SvMMwmTrQRQa9r7b1uy4tuLzssJs7jZspPe0JG70KJy89ae4fPNLUwjytoHk9BX/1OlXCfTzc07M8JAMIPU7cibsUJiC8pTNGPWUbJztfwW69oNwOPQIQ+rwm60M7oAfOvDMAsTxb+fM77WIaPIverDqcu5S84f+rvFyr8rxqoB686/4cPVnj9ztLHw29mJqaPAhH8Lz/db86qga/PGhnYD1WST28YgWru1AdRTz/db899PIPPBzBE720ie47ujymPbh/Kb0scLs8V1Q7PGIFqzwVMR48xp+UOhNGYTxfwW67CaDvvOeEI7tgc228uQNoPXrLBztd2TI9HRqTvLuVJbytoPm8YVMsOvi6irzweJY7/WpBvI5NKL040ym95ccmPAfj8rxJCZG9bsGYvJkpVzszp7G8wOxcu6/ZN7xXrTo7Q90YvGTtZjz/SgA8RWxVPL/hXjynl8O8ZzGjvHK0Uj0dRVI954QjvaqKfTxmUeS8Abf6O0RhV7tr+R098rnRPAju8DtoiiK95SCmvGV0pjwQMOW9wJPdPPutxDxYivi8NLKvPI3pKj3UDYE9Fg5cvQsyrTz+HEC9uuMmPMEaHbzJ4E8778YXvVDERb2cFBS9tsIsPLU7bT3+R/+8b55WPLhRaTzsgls9Nb2tuhNG4btlzSW9Y7cpvO1iGr0lh0a8u8BkvadJQj24f6k9J51CvbAPdbwCEHq8CicvvIKROr0ESbg7GMvYPE6OCLxS2sG7/WrBPOzbWj3uP1i9TVXKPPJg0rtp7h87TSqLPCmowLxrfdy8XbbwPG06WT33jEo9uxlkvcQN17tAmVy8h72yPEdMFLz4Ewo7BPs2va35eLynScI8WpV2PENW2bwQBSa9lSufu32+wTwl4MU8vohfvRyT07ylCIe8dHHPPPg+ST0Ooag8EsIiO9F7w7ylM0Y7dfgOPADaPLwX7hq7iG8xPDW9Lb1Q8oU98twTPYDUvTomwIQ8akcfvUhXkj3mK6Q8syXxvAMb+DwfMI87bsGYPGUbJ71GHtS8XbbwvFQ+P70f14+7Uq+CPSXgxbvHfFK9icgwPQsEbbwm60O9EpRiPDjTKb3uFJm7p/BCPazDuzxh+iy8Xj2wvBqrl71a7nU9guq5PYNDOb1X2Pk8raD5u+bSpLsMD2u7C9ktPVS6gDzyjhI9vl2gPNO0AT0/vJ68XQTyvMMCWbubYhU9rzK3vLhRaToSlOK6qYIAvQAovrsa1la8CEdwPKOkCT1jEKm8Y7epvOv+HLsoJII704ZBPXbVTDubjVQ8aRnfOvspBr2imYs8MDi2vPFVVDxSrwK9hac2PYverLyxGnO9nqNQvfVLD71UEP+8tDDvurN+8Lzkbqc6tsKsu5WvXTtDKxo72b03PdDshryvXfY81JE/vLYbLL2Fp7Y7JbUGPEQ2GLyagla7fAxDPaVhhrxu7Ne7wzAZPOxXHDx5nUe9s35wPHcOizx1fM26FTGePAsEbbzzQBE9zCQMPW6TWDygucy8zPZLPM2oSjzfmy48EF4lvUttDj3NL4q8WIp4PRoEFzxKFA89uKpou9H3BDvK6009a33cPLq15rzv8VY9AQX8O1gxebzjCqo7EeJjPaA1DrxoZ2C65tIkvS0iOjxln2W8o0sKPMPXGb3Ak908cxhQvR8wDzzN1gq8DnNovMZGFbwUJiA9moJWPBl9VzkVA148TrlHO/nFCL1f7y68xe2VPIROtzvCJRu88YMUvaUzRj1qR5+7e6jFPGyrHL3/SgC9GMtYPJcT27yqMX688YOUO32+QT18iAS9cdeUPFbN+zvlx6a83d6xOzQLL7sZJNi8mSnXOuqan7uqin09CievvPw0hLyuq/c866Udu4T1t7wBXnu7zQFKvE5gyDxhUyw8qzx8vIrTLr0Kq+26TgdJPWmVoDzOiIk8aDwhPVug9Lq6iie9iSEwvOKxqjwMiyy7E59gPepMnjth+iw9ntGQOyDijbw76SW9i96sO7qKJ7ybYhU8R/6Su+GmLLzsgtu7inovPRG3pLwZUpi7YzvoucrAjjwOSKm8uuOmvLbt67wKUu68XCc0vbd0Kz0LXWy8lHmgPAAoPjxRpAS99oHMvOlBoDprUh09teLtOxoEl7z0mRA89tpLvVQQ/zyjdkk9ZZ/lvHLikrw76SW82LI5vXyIBLzVnL06NyGrPPXPzTta7nW8FTEePSVcB73FGFU9SFcSPbzL4rtXrbo84lirvcd8Urw9/yG9+63EvPdhCz2rPPw8PPQjvbXibbuo+0C8oWtLPWVG5juL3qw71Zw9PMUY1Tk3yKu8WWq3vLnYKL25A+i8zH2LvMW/1bxDr1g8Cqvtu3pPRr0FrbU8vVKiO0LSGj1b+fM7Why2ux1FUjwhv0s89lYNPUbFVLzJ4M88t/hpvdpvNj0EzfY7gC29u0HyW7yv2Tc8dSPOvNhZurzrpR28jUIqPM0vijxyDdK8iBYyvZ0fkrxalXa9JeBFPO/GF71dBHK8X8FuPKnY/jpQmQY9S5jNPGBz7TrpQaA87/FWvUHyWzwCEPq78HiWOhfuGr0ltYY9I/iJPamCgLwLBO28jZupu38ivzuIbzG8Cfnuu0dMlLypKQG7BzxyvR5QULwCEHo8k8ehPUXoFjzPvka9MDi2vPsphjwjfMi854QjvcW/VbzO4Yg7Li04vL/h3jsaL9a5iG8xuybrwzz3YYu8Gw8VvVGkBD1UugA99MRPuCjLArzvxhc8XICzPFyrcr0gDU296h7eu8jV0TxNKos8lSufuqT9CD1oDmE8sqGyu2PiaLz6osY5YjBqPBAFJrwIlfG8PlihOBE74zzzQJG8r112vJPHobyrPPw7YawrPb5doLqtzrk7qHcCPVIoQzz5l0i81UM+vFd/eryaVxc9xA3XO/6YgbweJZG7W840PF0Ecj19ZUI8x1GTOtb1vDyDnLg8yxkOvOywGz0kqgg8fTqDvKlUQL3Bnlu992ELvZPHobybCZa82LK5vf2NgzwnnUK8YMzsPKOkiTxDr9g6la/duz3/IbusR/q8lmFcvFbN+zztCRu95nklPVKBwjwEJnY6V9j5PPK50bz6okY7R6UTPPnFiDwCafk8N8grO/gTCr1iiWm8AhB6vHHXlLyV3Z08vtZgPMDsXDsck9O7mdBXvRLCojzkbqe8XxpuvDSyLzu0MO87cxhQvd3eMbxtDxo9JKqIvB8CT72zrDC7s37wPHvWhbuXQZs8UlYDu7ef6rzsV5y8IkYLvUo/Tjz+R/88PrGgujSyrzxsBJy8P7yeO7f46byfKpA8cFDVPLygIzsdGpO77LCbvLSJ7rtgzOy7sA91O0hXkrwhO408XKvyvMUYVT2mPsQ8d+DKu9lkuLy+iF89xZSWPJFjpDwIlfE8bC9bPBE7Y7z/+f08W6B0PAc8crhmquO7RvOUPDybJLwlXAe9cuKSvMPXGbxK5s48sZY0O+4UmT1/Ij+8oNyOvPIH07tNKos8yTnPO2RpKDwRO+O7vl2gvKSvB7xGmpW7nD9TPZpXFzyXQRs9InHKurhR6bwb4VS8iiwuO3pPxrxeD3A8CfluO//OPr0MaOq8r112vAwP6zynHgM9T+cHPJuNVLzLRE07EmkjvWHX6rzBGh285G4nPe6Y17sCafm8//n9PJkpVzv9P4K7IWbMPCtlvTxHKVK8JNXHO/uCBblAFZ48xyPTvGaqY7wXlRs9EDDlPHcOizyNQiq9W3W1O7iq6LxwqdQ69MRPvSJGC7n3CIy8HOxSvSjLAryU0p87QJncvEoUjzsi7Qu9U4xAOwn5brzfm668Wu71uu002rw/Y588o6SJPFfY+Tyfg4+8u5WlPMDBnTzVnD08ljadu3sBxbzfm668n4OPO9VDvrz0mZC8kFimPNiyOT134Mo8vquhvDA4Njyjz0i7zVpJu1rudbwmksQ794xKuhN0ITz/zj68Vvu7unBQ1bv8NAS97FecOyxwOzs1ZC68AIG9PKLyCryvtvU8ntEQPBkkWD2xwfO7QfLbOhqIVTykVog7lSufvKOkiTwpqEA9/RFCvKxHejx3tYu74woqPMS0VzoMtuu8ViZ7PL8PH72+L2C81JE/vN3eMTwoywK9z5OHOx4lkTwGBrW8c5QRu4khMDyvBPc8nR8SvdlkuLw0si+9S8aNvCkBwLsXwFo7Od4nPbo8pryp2P68GfkYPKpfvjrsV5w6zuEIvbHB8zxnMSM9C9mtu1nj97zjYym8XFJzPAiVcTyNm6m7X5YvPJ8qED1l+OS8WTx3vGKJ6bt+F0G9jk2oPAR0dzwIR/A8umdlvNLUwjzI1dE7yuvNvBdnW7zdhTI9xkaVPCVcB70Mtus7G7aVPDchK7xuwRi8oDWOu/SZkLxOuUe8c5QRPLBo9Dz/+f07zS+KvNBFBr1n2CO8TKNLO4ZZNbym5US5HsyRvGi1YTwxnDO71vW8PM3WCr3E4he816e7O7QFML2asBa8jZspPSVcBzvjvCi9ZGmoPHV8zbyyobK830KvOgw9q7xzZtG7R6WTPMpnjzxj4mg8mrAWPS+GN7xoZ2C8tsKsOVMIAj1fli89Zc0lO00qCzz+R/87XKvyvLxy4zy52Cg9YjBqvW9F1zybjVS8mwmWvLvA5DymugU9DOQrPJWvXbvT38C8TrnHvLbt67sgiQ49e32GPPTETzv7goW7cKnUOoOcuLpG85S8CoCuO7ef6rkaqxe90tTCPJ8qkDvuuxk8FFFfPK9ddrtAbh08roC4PAnOrztV8D08jemquwR09ziL3iy7xkaVumVG5rygNQ69CfnuPGBzbTyE9Tc9Z9ijPK8yNzxgoa084woqu1F2RLwN76m7hrI0vf7xgLwaXRY6JmeFO68ytzrrpR29XbZwPYI4uzvkFai8qHcCPRCJ5DxKFI+7dHHPPE65xzxvnta8BPs2vWaq4zwrvjy8tDDvvEq7D7076SU9q+N8PAsyLTxb+XM9xZQWPP7ufzxsXZu6BEk4vGXNJbwBXvu8xA3XO8lcEbuuJzk8GEeavGnun7sMPSs9ITsNu1yr8roj+Ik8To6IvKjQgbwIwzG8wqlZvDfIK7xln2W8B+Pyu1HPw7sBjDs9Ba01PGSU57w/Yx867FecPFdUu7w2b6w7X5avvA8l57ypKQE9oGBNPeyC27vGytM828i1PP9KAD2/4V68eZ1HvDHqtDvR94Q6UwgCPLMlcbz+w0C8HwJPu/I1k7yZ/pe8aLXhPHYDDT28oKO8p2wEvdVDvrxh+qy8WDF5vJBYpjpaR3U8vgQhPNItwrsJoG88UaQEu3e1C7yagtY6HOzSOw9+5ryYTBk9q+N8POMKqrwoywI9DLZrPCN8SDxYivi8b3MXPf/OvruvBHc8M6exvA3vKbxz7RA8Fdieu4rTrrwFVDa8Vvu7PF0Ecjs6N6e8BzzyPP/Ovrv2rww9t59qvEoUDz3HUZO7UJkGPRigmbz/+X28qjH+u3jACbxlzaW7DA9rvFLawbwLBO2547yoO1t1NTr1pI68Vs37PAI+Ojx8s8O8xnHUvPg+yTwLBO26ybUQPfUoTTw76SU8i96sPKWMRbwUqt46pj7EPGX4ZL3ILtG8AV77vM0BSjzKZ488CByxvIWnNjyIFrI83CwzPN2FsjzHUZO8rzK3O+iPIbyGCzQ98NGVuxpdlrxhrKs8hQC2vFWXvjsCaXm8oRJMPHyIBLz+HMA8W/nzvHkZCb0pqMC87m0YPCu+vDsM5Ks8VnR8vG0Pmrt0yk48y3KNvKcegzwGMXS9xZQWPDYWrTxxAtQ7IWZMPU4Hybw89CO8/eaCPPMSUTxuk9i8WAY6vGfYozsQMGW8Li24vI+mJzxKFI88HwJPPFru9btRz8O6L9+2u29F1zwC5bq7RGHXvMtyjbr5bIm7V626uxsPlTv1KE29UB3FPMwkDDupggC8SQkRvH4XQT1cJ7Q8nvzPvKsRvTu9+SI8JbUGuiP4iTx460i99JkQPNF7Qz26Dma8u+4kvHO/0LyzfvA8EIlkPUPdmLpmUWS8uxnku8f4E72ruL27BzxyvKeXwz1plSC8gpG6vEQ2mLvtYho91Zy9vLvA5DtnXGK7sZY0uyu+PLwXlZu8GquXvE2uSb0ezBG8wn6au470KD1Abh28YMzsvPQdT7xKP867Xg/wO81aSb0IarK7SY1PO5EKJTsMi6y8cH4VvcXtlbwdGhM8xTsXPQvZLbxgzOw7Pf8hPRsPlbzDMJm8ZGmoPM1aSb0HEbO8PPQjvX5wwDwQXiW9wlDaO7SJ7jxFE9a8FTEePG5omTvPkwc8vtZgux9bzrmwD3W8U2EBPAVUNj0hlIw7comTPAEF/DvKwI68YKGtPJ78Tz1boHQ9sOS1vHiSSTlVG307HsyRPHEwFDxQmQY8CaBvvB0aE70PfuY8+neHvHOUET3ssBu7+tCGPJl3WDx4wAk9d1yMPOqanzwGBjW8ZialPB7MEby1O+07J0RDu4yQq7xpGV88ZXQmPc3WCruRCqU8Xbbwu+0JG7kXGVq8SY1PvKblxDv/oH68r7Z1OynWgDklh0a8E/hfPBCJZL31/Y08sD21vA9+Zjy6DmY82WQ4PAJp+TxHTJQ8JKoIvUBunbwgDc26BzxyvVUb/bz+w8A8Wu51u8guUbyHZLM8Iu0LvJqCVj3nhKO96kwevVDyBb3UDYG79zNLO7KhMj1IgtE83NOzO0f+krw89CM9z5OHuz+OXj2TxyE8wOzcPP91v7zUZgA8DyVnvILqOTzn3aI8j/+mO8xPyzt1UQ48+R4IvQnOrzt1I067QtKau9vINb1+7AE8sA/1uy7UOLzpQSC8dqoNPSnWgDsJoO+8ANo8vfDRlbwefpC89wgMPI1CKrrYsrm78mBSvFFLBb1Pa0a8s1MxPHbVzLw+WCG9kbyjvNt6tLwfMA+8HwLPvGO3qTyyobK8DcFpPInIsLwXGdq7nBSUPGdc4ryTx6G8T+eHPBxolDvIqhK8rqv3u1fY+Tz3M0s9qNCBO/GDlL2N6Sq9XKtyPFMIgrw0Cy+7Y7epPLJzcrz/+X28la/du8MC2bwTn+C5YSXsvDneJzz/SoC8H9ePvHMY0Lx0nw+9lSsfvS3Jujz/SgC94rEqvQwP67zd3rE83NOzPKvj/DyYmpo8h2SzvF8abjye0ZC8vSRivCKfijs/vJ48NAuvvFIoQzzFGFU9dtVMPa2g+TtpGd88Uv2DO3kZiTwA2rw79f2Nu1ugdDx0nw+8di7MvIrTrjz08g+8j6anvGH6LLxQ8oW8LBc8Pf0/Ajxl+OQ8SQkRPYrTrrzyNRM8GquXu9ItQjz1Sw87C9mtuxXYnrwDl7m87Y1ZO2ChrbyhQIy4EsIiPWpHHz0inwo7teJtPJ0fEroHPPK7fp4APV/B7rwwODa8L4Y3OiaSxLsBBfw7RI8XvP5H/zxVlz68n1VPvEBuHbwTzSA8fOEDvV49sDs2b6y8mf6XPMVm1jvjvCg8ETvjPEQ2GLxK5s47Q92YuxOfYLyod4K8EDDlPHAlFj1zGFC8pWGGPE65R7wBMzy8nJjSvLoO5rwwkbU7Eu3hvLOsMDyyobI6YHNtPKs8fLzXp7s6AV57PV49MLsVMR68+4KFPIkhMLxeaG87mXdYulyAMzzQRQY9ljadu3YDDby7GWS7phOFPEJ5mzq6tea6Eu1hPJjzmTz+R388di5MvJn+F7wi7Qs8K768PFnj9zu5MSi8Gl2WvJfomzxHd1O8vw8fvONjqbxuaBk980ARPSNRiTwLMi272Fk6vDGcs7z60Ia8vX1hOzvppbuKLK48jZspvZkpV7pWJns7G7YVPdPfwLyruL08FFHfu7ZprbwT+N84+1TFPGpHn7y9JOI8xe2Vu08SR7zs29o8/RFCPCbAhDzfQi89OpCmvL194boeJZE8kQqlvES6VjrzEtE7eGeKu2kZX71rfdw8D6wmu6Y+xLzJXJE8DnPovJrbVbvkFai8KX0Bvfr7RbuXbNq8Gw+VPRCJ5LyA1D28uQPoPLygo7xENpi8/RHCvEOv2DwRtyS9o0uKPNshNbvmeSU8IyPJvCedQjy7GWQ8Wkf1vGKJ6bztYho8vHLju5cT2zzKZw+88jWTvFb7uznYCzm8" + }, + { + "object": "embedding", + "index": 1, + "embedding": "eyfbu150UDkC6hQ9ip9oPG7jWDw3AOm8DQlcvFiY5Lt3Z6W8BLPPOV0uOz3FlQk8h5AYvH6Aobv0z/E8nOQRvHI8H7rQA+s8F6X9vPplyDzuZ1u8T2cTvAUeoDt0v0Q9/xx5vOhqlT1EgXu8zfQavTK0CDxRxX08v3MIPAY29bzIpFm8bGAzvQkkazxCciu8mjyxvIK0rDx6mzC7Eqg3O8H2rTz9vo482RNiPUYRB7xaQMU80h8hu8kPqrtyPB+8dvxUvfplSD21bJY8oQ8YPZbCEDvxegw9bTJzvYNlEj0h2q+9mw5xPQ5P8TyWwpA7rmvvO2Go27xw2tO6luNqO2pEfTztTwa7KnbRvAbw37vkEU89uKAhPGfvF7u6I8c8DPGGvB1gjzxU2K48+oqDPLCo/zsskoc8PUclvXCUvjzOpQC9qxaKO1iY5LyT9XS9ZNzmvI74Lr03azk93CYTvFJVCTzd+FK8lwgmvcMzPr00q4O9k46FvEx5HbyIqO083xSJvC7PFzy/lOK7HPW+PF2ikDxeAHu9QnIrvSz59rl/UmG8ZNzmu2b4nD3V31Y5aXK9O/2+jrxljUw8y9jkPGuvTTxX5/48u44XPXFFpDwAiEm8lcuVvX6h+zwe7Lm8SUUSPHmkNTu9Eb08cP8OvYgcw7xU2C49Wm4FPeV8H72AA8c7eH/6vBI0Yj3L2GQ8/0G0PHg5ZTvHjAS9fNhAPcE8wzws2By6RWAhvWTcZjz+1uM8H1eKvHdnJT0TWR29KcVrPdu7wrvMQzW9VhW/Ozo09LvFtuM8OlmvPO5GAT3eHY68zTqwvIhiWLs1w1i9sGJqPaurOb0s2Jy8Z++XOwAU9Lggb988vnyNvVfGpLypKBS8IouVO60NBb26r/G6w+0ovbVslrz+kE68MQOjOxdf6DvoRdo8Z4RHPCvhIT3e7009P4Q1PQ0JXDyD8Ty8/ZnTuhu4Lj3X1lG9sVnlvMxDNb3wySY9cUWkPNZKJ73qyP+8rS7fPNhBojwpxes8kt0fPM7rlbwYEE68zoBFvdrExzsMzEu9BflkvF0uu7zNFfW8UyfJPPSJ3LrEBf68+6JYvef/xDpAe7C8f5h2vPqKA7xUTAS9eDllPVK8eL0+GeW7654gPQuGNr3/+x69YajbPAehRTyc5BE8pfQIPMGwGL2QoA87iGJYPYXoN7s4sc69f1JhPdYEkjxgkIa6uxpCvHtMljtYvR88uCzMPBeEo7wm1/U8GBDOvBkHybwyG3i7aeaSvQzMyzy3e2a9xZUJvVSSmTu7SII8x4yEPKAYHTxUTIQ8lcsVO5x5QT3VDRe963llO4K0rLqI1i07DX0xvQv6CznrniA9nL9WPTvl2Tw6WS+8NcPYvEL+VbzZfrK9NDcuO4wBNL0jXVW980PHvNZKJz1Oti09StG8vIZTiDwu8PE8zP0fO9340juv1j890vFgvMFqAz2kHui7PNxUPQehxTzjGlQ9vcunPL+U4jyfrUw8R+NGPHQF2jtSdmO8mYtLvF50ULyT1Bo9ONaJPC1kx7woznC83xQJvUdv8byEXA29keaku6Qe6Ly+fA29kKAPOxLuzLxjxJG9JnCGur58jTws2Jy8CkmmO3pVm7uwqH87Eu7Mu/SJXL0IUis9MFI9vGnmEr1Oti09Z+8XvH1DkbwcaZS8NDcuvT0BkLyPNT89Haakuza607wv5+w81KLGO80VdT3MiUq8J4hbPHHRzrwr4aG8PSJqvJOOBT3t2zC8eBgLvXchkLymOp66y9jkPDdG/jw2ulO983GHPDvl2Tt+Ooy9NwDpOzZ0Pr3xegw7bhGZvEpd57s5YjS9Gk1evIbfMjxBwcW8NnQ+PMlVPzxR6ji9M8zdPImHk7wQsby8u0gCPXtMFr22YxE9Wm4FPaXPzbygGJ093bK9OuYtBTxyXfk8iYeTvNH65byk/Q29QO+FvKbGyLxCcqs9nL/WvPtcQ72XTjs8kt2fuhaNKDxqRH08KX9WPbmXnDtXDDo96GoVPVw3QL0eeGS8ayOjvAIL7zywQZC9at0NvUMjET1Q8707eTDgvIio7Tv60Jg87kYBOw50LLx7BgE96qclPUXsSz0nQkY5aDUtvQF/RD1bZQC73fjSPHgYCzyPNT+9q315vbMvhjsvodc8tEdbPGcQ8jz8U768cYs5PIwBtL38x5M9PtPPvIex8jzfFIk9vsIivLsaQj2/uZ072y8YvSV5C7uoA9k8JA67PO5nWzvS8eC8av7nuxSWrbybpwE9f5h2vG3sXTmoA1k9sjiLvTBSPbxc8Sq9UpuePB+dHz2/cwg9BWS1vCrqJr2M3Pg86LAqPS/GEj3oRdq8GiyEvACISbuiJ+28FFAYuzBSvTzwDzy8K5uMvE5wmDpd6CW6dkJqPGlyvTwF2Iq9f1JhPSHarzwDdr88JXkLu4ADxzx5pDW7zqUAvdAoJj24wXs8doj/PH46jD2/2vc893fSuyxtTL0YnPg7IWbaPOiwqrxLDk27ZxDyPBpymbwW0z08M/odPTufRL1AVvU849Q+vBGDfD3JDyq6Z6kCPL9OzTz0rpe8FtM9vaDqXLx+W2Y7jHWJPGXT4TwJ3lW9M4bIPPCDkTwoZwE9XH1VOmksqLxLPI08cNrTvCyz4bz+Srm8kiO1vDP6nbvIpNk8MrSIvPe95zoTWR29SYsnPYC9MT2F6De93qm4PCbX9bqqhv47yky6PENE67x/DEw8JdYAvUdvcbywh6W8//ueO8fSmTyjTCi9yky6O/qr3TzvGEE8wqcTPeDmSDyuJVo8ip/ou1HqOLxOtq28y5LPuxk1Cb0Ddr+7c+2EvKQeaL1SVQk8XS47PGTcZjwdpiQ8uFqMO0QaDD1XxqS8mLmLuuSFJDz1xmy8PvgKvJAHf7yC+kE8VapuvetYC7tHCAI8oidtPOiwqjyoSW68xCo5vfzobTzz2HY88/0xPNkT4rty9om8RexLu9SiRrsVaG081gSSO5IjtTsOLpc72sTHPGCQBj0QJRI9BCclPI1sBDzCyO07QHuwvOYthTz4tGK5QHuwvWfvFz2CQNc8PviKPO8YwTuQoA89fjoMPBnBs7zGZ8m8uiPHvMdeRLx+gKE8keaku0wziDzZWfe8I4KQPJ0qpzs4sc47dyEQPEQaDDzVmcE8//uePJcIJjztTwa9ogaTOftcwztU2K48opvCuyz5drzqM1C7iYcTvfDJJjxXxiQ9o0wovO1PBrwqvGa7dSoVPbI4izvnuS88zzGrPH3POzzHXkQ9PSJqOXCUPryW4+o8ELE8PNZKp7z+Sjm8foChPPIGtzyTaUq8JA47vBiceDw3a7m6jWyEOmksKDwH59q5GMo4veALBL0SqDe7IaxvvBD3Ubxn7xc9+dkdPSBOBTxHCAI8mYvLOydCxjw5HB88zTqwvJXs77w9AZA9CxvmvIeQGL2rffm8JXkLPKqGfjyoSe464d1DPPd3UrpO/EK8qxYKvUuCojwhZlq8EPfRPKaAs7xKF9K85i0FvEYRhzyPNT88m6cBvdSiRjxnqQI9uOY2vcBFSLx4OeW7BxUbPCz59rt+W2Y7SWZsPGzUCLzE5KM7sIclvIdr3buoSW47AK0EPImHE7wgToU8IdovO7FZ5bxbzO+8uMF7PGayB7z6ioO8zzErPEcIgrxSm568FJYtvNf7jDyrffm8KaQRPcoGpTwleQu8EWKiPHPthLz44qI8pEOjvWh7QjzpPNU8lcuVPHCUPr3n/8Q8bNQIu0WmNr1Erzs95VfkPCeIW7vT0Aa7656gudH65bxw/w49ZrKHPHsn27sIUiu8mEU2vdUNF7wBf8Q809CGPFtlgDo1fcO85i2FPEcIAjwL+os653OavOu1AL2EN9K8H52fPKzoybuMdYk8T2cTO8lVPzyK5X07iNYtvD74ijzT0IY8RIF7vLLENbyZi8s8KwJ8vAne1TvGZ8k71gSSumJZwTybp4G8656gPG8IFL27SAI9arjSvKVbeDxljcy83fjSuxu4Lr2DZRK9G0TZvLFZ5bxR6ji8NPEYPbI4izyAvTE9riVaPCCUGrw0Ny48f1LhuzIb+DolBTY8UH9ou/4EpLyAvTG9CFIrvCBOBTlkIvy8WJhkvHIXZLkf47Q8GQfJvBpNXr1pcr07c8jJO2nmkrxOcJi8sy8GuzjWibu2Pta8WQO1PFPhs7z7XEO8pEMjvb9OzTz4bs08EWKiu0YyYbzeHQ695D+PPKVbeDzvGEG9B6HFO0uCojws+Xa7JQW2OpRgRbxjCqc8Sw7NPDTxmLwjXVW8sRNQvFPhszzM/Z88rVMavZPUGj06WS+8JpHgO3etursdx369uZccvKplJDws+Xa8fzGHPB1gj7yqZaQ887ecPBNZHbzoi2+7NwDpPMxDtbzfWh49H+O0PO+kaztI2kE8/xz5PImHE73fNWO8T60ovIPxPDvR2Yu8XH3VvMcYr7wfnR+9fUORPIdr3Tyn6wO9nkL8vM2uhTzGIbS66u26vE2/MrxFYKE8iwo5vLSNcLy+wiK9GTUJPK10dLzrniC8qkBpvPxTPrwzQLO8illTvFi9H7yMATS7ayOjO14Ae7z19Cy87dswPKbGyDzujJa93EdtPdsB2LYT5Ue9RhEHPKurubxm+By9+mVIvIy7HrxZj987yOpuvUdv8TvgCwS8TDMIO9xsqLsL+gs8BWS1PFRMBD1yXXm86GoVvK+QqjxRXg46TZHyu2ayhzx7TJa8uKAhPLyFkjsV3MI7niGiPGNQvDxgkIa887ccPUmLJ7yZsIa8KDnBvHgYi7yMR0m82ukCvRuK7junUvO8aeYSPXtt8LqXCKa84kgUPd5jIzxlRze93xQJPNNcMT2v1j889GiCPKRkfbxz7YQ8b06pO8cYL7xg9/U8yQ+qPGlyvbzfNWO8vZ3nPBGD/DtB5gC7yKRZPPTPcbz6q928bleuPI74rrzVDRe9CQORvMmb1Dzv0qs8DBLhu4dr3bta1fQ8aeYSvRD3UTugpMe8CxvmPP9BNDzHjAQ742DpOzXD2Dz4bk28c1T0Onxka7zEBf48uiNHvGayBz1pcj29NcPYvDnu3jz5kwg9WkBFvL58jTx/mHY8wTzDPDZ0Pru/uZ08PQGQPOFRmby4oKE8JktLPIx1iTsppBG9dyGQvHfzT7wzhki44KAzPSOCkDzv0iu8lGBFO2VHNzyKxKM72EEiPYtQzryT9fQ8UDnTPEx5nTzuZ9s8QO8FvG8IlDx7J9s6MUk4O9k4nbx7TBa7G7iuvCzYHDocr6k8/7UJPY2ymTwVIlg8KjC8OvSuFz2iJ+28cCBpvE0qAzw41ok7sgrLvPjiojyG37K6lwimvKcxGTwRHI28y5LPO/mTiDx82MC5VJIZPWkH7TwPusG8YhOsvH1DkbzUx4E8TQXIvO+ka7zKwI+8w+2oPNLxYLzxegy9zEM1PDo0dDxIINc8FdxCO46E2TwPRmw9+ooDvMmb1LwBf0S8CQMRvEXsS7zPvdU80qvLPLfvO7wbuK68iBzDO0cpXL2WndU7dXCqvOTLubytLl88LokCvZj/IDw0q4M8G7guvNkTYrq5UQe7vcunvIrEI7xuERm9RexLvAdbsDwLQCE7uVEHPYjWrbuM3Pi8g2WSO3R5L7x4XiC8vKZsu9Sixros+fa8UH/ouxxpFL3wyaa72sRHu2YZ9zuiJ2274o4pOjkcnzyagka7za4FvYrEozwCMCo7cJQ+vfqKAzzJ4em8fNhAPUB7sLylz80833v4vOU2ir1ty4M8UV4OPXQF2jyu30S9EjRivBVo7TwXX2g70ANrvEJyq7wQJRK99jE9O7c10brUxwE9SUUSPS4VLbzBsJg7FHHyPMz9n7latJo8bleuvBpN3jsF+WS8Ye7wO4nNKL0TWZ08iRM+vOn2v7sB8xm9jY3ePJ/zYbkLG+a7ZvicvGxgM73L2OS761iLPKcxmTrX+ww8J0JGu1MnyTtJZuw7pIm4PJbCED29V1K9PFCqPLBBkLxhYka8hXTiPEB7MDzrniA7h5CYvIR9ZzzARcg7TZHyu4sKOb1in9Y7nL9WO6gD2TxSduO8UaQjPQO81Lxw/w69KwL8O4FJ3D2XTju8SE6XPGDWGz0K1VC8YhMsvObCtDyndy49BCclu68cVbxemYu8sGLqOksOzTzj1L47ISBFvLly4Ttk3Oa8RhGHNwzxBj0v5+y7ogaTPA+6QbxiE6w8ubj2PDixzrstZEe9jbKZPPd30rwqMDw8TQXIPFurlTxx0c68jLsePfSJ3LuXTru8yeHpu6Ewcjx5D4a8BvBfvN8Uibs9R6W8lsIQvaEw8rvVUyw8SJQsPebCNDwu8PE8GMo4OxAlkjwJmMA8KaQRvdYlbDwNNxy9ouHXPDffDrxwZv46AK0EPJqCRrpWz6k8/0E0POAs3rxmsoe7zTqwO5mLyzyP7ym7wTzDvFB/aLx5D4a7doj/O67fxDtsO/g7uq9xvMWViTtC/tU7PhnlvIEogjxxRSQ9SJSsPIJA1zyBKAI9ockCPYC9MbxBTXC83xSJvPFVUb1n75c8uiNHOxdf6Drt27A8/FM+vJOvXz3a6QI8UaQjuvqKgzyOhNm831oevF+xYLxjCic8sn6gPDdrOTs3Rv66cP+Ou5785rycBew8J0JGPJOOBbw9Imq8q335O3MOX7xemQs8PtNPPE1L3Tx5dnU4A+EPPLrdsTzfFIm7LJIHPB4yz7zbAdi8FWjtu1h3Cj0oznA8kv55PKgDWbxIINc8xdsePa8cVbzmlHQ8IJSavAgMlrx4XiA8z3dAu2PEET3xm+a75//EvK2Zr7xbqxU8zP2fvOSFJD1xRSS7k44FvPzHkzz5+ne8+tAYvd5jIz1GMuE8yxSAO3KCNDyRuOS8wzO+vObCNDwzQLO7isQjva1TGrz6ioM79GgCPF66Zbx1KpW8qW6pu4RcDTzcJhO9SJQsO5G45LsAiMm8lRErvJqCxjzQbju7w3nTuTclpDywqP88ysCPvAF/xLxfa0u88cChPBjKODyaPLE8k69fvGFiRrvuRgG9ATmvvJEsOr21+EC9KX/WOrmXnDwDAuo8yky6PI1sBDvztxy8PviKPKInbbzbdS276mGQO2Kf1rwn/DC8ZrIHPBRxcj0z+h264d1DPdG0ULxvTqm5bDt4vToTmjuGJcg7tmMRO9YEEr3oJAC9THmdPKn607vcJhM8Zj6yvHR5r7ywYmq83fjSO5mLyzshIEU8EWKiuu9eVjw75dk7fzGHvNl+sjwJJOs8YllBPAtheztz7QQ92lDyvDEDozzEKrk7KnZRvG8pbjsdYI+7yky6OfWAVzzjYGk7NX3DOzrNhDyeIaI8joTZvFcMOryYRba8G7iuu893QDw9RyW7za6FvDUJ7rva6YK9D7rBPD1o/zxCLJa65TaKvHsGAT2g6ly8+tCYu+wqy7xeAHu8vZ1nPBv+QzwfVwo8CMYAvM+91TzKTDq8Ueo4u2uvzTsBf8Q8p+uDvKofDz12tj+8wP+yOlkDtTwYyji6ZdPhPGv14rwqdtE8YPf1vLIKy7yFLs28ouFXvO1PBj15pDU83xQJPdfWUTz8x5O64kgUPBQKA72eIaK6A3a/OyzYnLoYnPg4XMNqPdxsqLsKSaY7pfSIvBoshLupKJS8G0TZOu/SqzzFcE47cvaJPA19Mb14dQC8sVllvJmwhjycBey8cvaJOmSWUbvRtFC8WtX0O2r+57twIGm8yeFpvFuG2rzCyO08PUelPK5rbzouFS29uCxMPQAUdDqtma88wqeTu5gge7zH8/O7l067PJdOO7uKxCO8/xx5vKt9+TztTwa8OhOaO+Q/Dzw33w49CZhAvSubjDydttG8IdovPIADR7stHrI7ATmvvOAs3rzL2OQ69K4XvNccZ7zlV2S8c+0EPfNDxzydKqc6LLPhO8YhtDyJhxM9H1eKOaNMKLtOcBg9HPU+PTsrbzvT0Ia8BG26PB2mpDp7TJa8wP8yPVvM77t0ea86eTBgvFurFT1C/tW7CkkmvKOSPT2aPDG9lGDFPAhSq7u5UYc8l5TQPFh3ijz9vg68lGBFO4/vKTxViZS7eQ8GPTNAs7xmsoe8o0yoPJfaZbwlvyA8IazvO0XsS717TJY8flvmOgHFWbyWnVW8mdFgvJbCkDynDF68" + } + ], + "model": "text-embedding-3-small", + "usage": { + "prompt_tokens": 9, + "total_tokens": 9 + } + } + """; + + using VerbatimHttpHandler handler = new(Input, Output); + using HttpClient httpClient = new(handler); + using IEmbeddingGenerator> generator = new EmbeddingsClient(new("http://somewhere"), new AzureKeyCredential("key"), new() + { + Transport = new HttpClientTransport(httpClient), + }).AsEmbeddingGenerator("text-embedding-3-small"); + + var response = await generator.GenerateAsync([ + "hello, world!", + "red, white, blue", + ]); + Assert.NotNull(response); + Assert.Equal(2, response.Count); + + Assert.NotNull(response.Usage); + Assert.Equal(9, response.Usage.InputTokenCount); + Assert.Equal(9, response.Usage.TotalTokenCount); + + foreach (Embedding e in response) + { + Assert.Equal("text-embedding-3-small", e.ModelId); + Assert.NotNull(e.CreatedAt); + Assert.Equal(1536, e.Vector.Length); + Assert.Contains(e.Vector.ToArray(), f => !f.Equals(0)); + } + } +} diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs index 4c4086e1157..e1a2076a6c7 100644 --- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs @@ -10,22 +10,23 @@ namespace Microsoft.Extensions.AI; /// Shared utility methods for integration tests. internal static class IntegrationTestHelpers { - /// Gets an to use for testing, or null if the associated tests should be disabled. - public static ChatCompletionsClient? GetChatCompletionsClient() - { - string? apiKey = - Environment.GetEnvironmentVariable("AZURE_AI_INFERENCE_APIKEY") ?? - Environment.GetEnvironmentVariable("OPENAI_API_KEY"); + private static readonly string? _apiKey = + Environment.GetEnvironmentVariable("AZURE_AI_INFERENCE_APIKEY") ?? + Environment.GetEnvironmentVariable("OPENAI_API_KEY"); - if (apiKey is not null) - { - string? endpoint = - Environment.GetEnvironmentVariable("AZURE_AI_INFERENCE_ENDPOINT") ?? - "https://api.openai.com/v1"; + private static readonly string _endpoint = + Environment.GetEnvironmentVariable("AZURE_AI_INFERENCE_ENDPOINT") ?? + "https://api.openai.com/v1"; - return new(new Uri(endpoint), new AzureKeyCredential(apiKey)); - } + /// Gets an to use for testing, or null if the associated tests should be disabled. + public static ChatCompletionsClient? GetChatCompletionsClient() => + _apiKey is string apiKey ? + new ChatCompletionsClient(new Uri(_endpoint), new AzureKeyCredential(apiKey)) : + null; - return null; - } + /// Gets an to use for testing, or null if the associated tests should be disabled. + public static EmbeddingsClient? GetEmbeddingsClient() => + _apiKey is string apiKey ? + new EmbeddingsClient(new Uri(_endpoint), new AzureKeyCredential(apiKey)) : + null; } diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs index 3f5ce32fc37..0863e31db37 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs @@ -110,7 +110,13 @@ public virtual async Task CompleteStreamingAsync_UsageDataAvailable() { SkipIfNotEnabled(); - var response = _chatClient.CompleteStreamingAsync("Explain in 10 words how AI works"); + var response = _chatClient.CompleteStreamingAsync("Explain in 10 words how AI works", new() + { + AdditionalProperties = new() + { + ["stream_options"] = new Dictionary { ["include_usage"] = true, }, + }, + }); List chunks = []; await foreach (var chunk in response)