diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
index 1bb4dc4e5fb..72ddb13b2ac 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
@@ -30,6 +30,18 @@ public sealed class OllamaChatClient : IChatClient
/// The to use for sending requests.
private readonly HttpClient _httpClient;
+ /// Initializes a new instance of the class.
+ /// The endpoint URI where Ollama is hosted.
+ ///
+ /// The id of the model to use. This may also be overridden per request via .
+ /// Either this parameter or must provide a valid model id.
+ ///
+ /// An instance to use for HTTP operations.
+ public OllamaChatClient(string endpoint, string? modelId = null, HttpClient? httpClient = null)
+ : this(new Uri(Throw.IfNull(endpoint)), modelId, httpClient)
+ {
+ }
+
/// Initializes a new instance of the class.
/// The endpoint URI where Ollama is hosted.
///
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
index 22fa54391cc..3e281173c8b 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
@@ -22,14 +22,14 @@ public class OllamaChatClientTests
[Fact]
public void Ctor_InvalidArgs_Throws()
{
- Assert.Throws("endpoint", () => new OllamaChatClient(null!));
- Assert.Throws("modelId", () => new OllamaChatClient(new("http://localhost"), " "));
+ Assert.Throws("endpoint", () => new OllamaChatClient((Uri)null!));
+ Assert.Throws("modelId", () => new OllamaChatClient("http://localhost", " "));
}
[Fact]
public void GetService_SuccessfullyReturnsUnderlyingClient()
{
- using OllamaChatClient client = new(new("http://localhost"));
+ using OllamaChatClient client = new("http://localhost");
Assert.Same(client, client.GetService());
Assert.Same(client, client.GetService());
@@ -94,7 +94,7 @@ public async Task BasicRequestResponse_NonStreaming()
using VerbatimHttpHandler handler = new(Input, Output);
using HttpClient httpClient = new(handler);
- using OllamaChatClient client = new(new("http://localhost:11434"), "llama3.1", httpClient);
+ using OllamaChatClient client = new("http://localhost:11434", "llama3.1", httpClient);
var response = await client.CompleteAsync("hello", new()
{
MaxOutputTokens = 10,
@@ -152,7 +152,7 @@ public async Task BasicRequestResponse_Streaming()
using VerbatimHttpHandler handler = new(Input, Output);
using HttpClient httpClient = new(handler);
- using IChatClient client = new OllamaChatClient(new("http://localhost:11434"), "llama3.1", httpClient);
+ using IChatClient client = new OllamaChatClient("http://localhost:11434", "llama3.1", httpClient);
List updates = [];
await foreach (var update in client.CompleteStreamingAsync("hello", new()
@@ -238,7 +238,7 @@ public async Task MultipleMessages_NonStreaming()
using VerbatimHttpHandler handler = new(Input, Output);
using HttpClient httpClient = new(handler);
- using IChatClient client = new OllamaChatClient(new("http://localhost:11434"), httpClient: httpClient);
+ using IChatClient client = new OllamaChatClient("http://localhost:11434", httpClient: httpClient);
List messages =
[
@@ -342,7 +342,7 @@ public async Task FunctionCallContent_NonStreaming()
using VerbatimHttpHandler handler = new(Input, Output);
using HttpClient httpClient = new(handler) { Timeout = Timeout.InfiniteTimeSpan };
- using IChatClient client = new OllamaChatClient(new("http://localhost:11434"), "llama3.1", httpClient)
+ using IChatClient client = new OllamaChatClient("http://localhost:11434", "llama3.1", httpClient)
{
ToolCallJsonSerializerOptions = TestJsonSerializerContext.Default.Options,
};
@@ -434,7 +434,7 @@ public async Task FunctionResultContent_NonStreaming()
using VerbatimHttpHandler handler = new(Input, Output);
using HttpClient httpClient = new(handler) { Timeout = Timeout.InfiniteTimeSpan };
- using IChatClient client = new OllamaChatClient(new("http://localhost:11434"), "llama3.1", httpClient)
+ using IChatClient client = new OllamaChatClient("http://localhost:11434", "llama3.1", httpClient)
{
ToolCallJsonSerializerOptions = TestJsonSerializerContext.Default.Options,
};