Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[.Net] fix #3014 by adding local model function call in dotnet website #3044

Merged
merged 3 commits into from
Jun 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,68 +1,3 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Example13_OpenAIAgent_JsonMode.cs

using System.Text.Json;
using System.Text.Json.Serialization;
using AutoGen.Core;
using AutoGen.OpenAI;
using AutoGen.OpenAI.Extension;
using Azure.AI.OpenAI;
using FluentAssertions;

namespace AutoGen.BasicSample;

public class Example13_OpenAIAgent_JsonMode
{
public static async Task RunAsync()
{
#region create_agent
var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(deployName: "gpt-35-turbo"); // json mode only works with 0125 and later model.
var apiKey = config.ApiKey;
var endPoint = new Uri(config.Endpoint);

var openAIClient = new OpenAIClient(endPoint, new Azure.AzureKeyCredential(apiKey));
var openAIClientAgent = new OpenAIChatAgent(
openAIClient: openAIClient,
name: "assistant",
modelName: config.DeploymentName,
systemMessage: "You are a helpful assistant designed to output JSON.",
seed: 0, // explicitly set a seed to enable deterministic output
responseFormat: ChatCompletionsResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode
.RegisterMessageConnector()
.RegisterPrintMessage();
#endregion create_agent

#region chat_with_agent
var reply = await openAIClientAgent.SendAsync("My name is John, I am 25 years old, and I live in Seattle.");

var person = JsonSerializer.Deserialize<Person>(reply.GetContent());
Console.WriteLine($"Name: {person.Name}");
Console.WriteLine($"Age: {person.Age}");

if (!string.IsNullOrEmpty(person.Address))
{
Console.WriteLine($"Address: {person.Address}");
}

Console.WriteLine("Done.");
#endregion chat_with_agent

person.Name.Should().Be("John");
person.Age.Should().Be(25);
person.Address.Should().BeNullOrEmpty();
}
}

#region person_class
public class Person
{
[JsonPropertyName("name")]
public string Name { get; set; }

[JsonPropertyName("age")]
public int Age { get; set; }

[JsonPropertyName("address")]
public string Address { get; set; }
}
#endregion person_class
// this example has been moved to https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs
Original file line number Diff line number Diff line change
@@ -1,62 +1,3 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Example16_OpenAIChatAgent_ConnectToThirdPartyBackend.cs
#region using_statement
using AutoGen.Core;
using AutoGen.OpenAI;
using AutoGen.OpenAI.Extension;
using Azure.AI.OpenAI;
using Azure.Core.Pipeline;
#endregion using_statement

namespace AutoGen.BasicSample;

#region CustomHttpClientHandler
public sealed class CustomHttpClientHandler : HttpClientHandler
{
private string _modelServiceUrl;

public CustomHttpClientHandler(string modelServiceUrl)
{
_modelServiceUrl = modelServiceUrl;
}

protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
request.RequestUri = new Uri($"{_modelServiceUrl}{request.RequestUri.PathAndQuery}");

return base.SendAsync(request, cancellationToken);
}
}
#endregion CustomHttpClientHandler

public class Example16_OpenAIChatAgent_ConnectToThirdPartyBackend
{
public static async Task RunAsync()
{
#region create_agent
using var client = new HttpClient(new CustomHttpClientHandler("http://localhost:11434"));
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview)
{
Transport = new HttpClientTransport(client),
};

// api-key is not required for local server
// so you can use any string here
var openAIClient = new OpenAIClient("api-key", option);
var model = "llama3";

var agent = new OpenAIChatAgent(
openAIClient: openAIClient,
name: "assistant",
modelName: model,
systemMessage: "You are a helpful assistant designed to output JSON.",
seed: 0)
.RegisterMessageConnector()
.RegisterPrintMessage();
#endregion create_agent

#region send_message
await agent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?");
#endregion send_message
}
}
// this example has been moved to https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

namespace AutoGen.OpenAI.Sample;

#region Function
public partial class Function
{
[Function]
Expand All @@ -16,25 +17,37 @@ public async Task<string> GetWeatherAsync(string city)
return await Task.FromResult("The weather in " + city + " is 72 degrees and sunny.");
}
}
#endregion Function

public class Tool_Call_With_Ollama_And_LiteLLM
{
public static async Task RunAsync()
{
#region Create_Agent
var liteLLMUrl = "http://localhost:4000";
using var httpClient = new HttpClient(new CustomHttpClientHandler(liteLLMUrl));
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview)
{
Transport = new HttpClientTransport(httpClient),
};
// Before running this code, make sure you have
// - Ollama:
// - Install dolphincoder:latest in Ollama
// - Ollama running on http://localhost:11434
// - LiteLLM
// - Install LiteLLM
// - Start LiteLLM with the following command:
// - litellm --model ollama_chat/dolphincoder --port 4000

# region Create_tools
var functions = new Function();
var functionMiddleware = new FunctionCallMiddleware(
functions: [functions.GetWeatherAsyncFunctionContract],
functionMap: new Dictionary<string, Func<string, Task<string>>>
{
{ functions.GetWeatherAsyncFunctionContract.Name!, functions.GetWeatherAsyncWrapper },
});
#endregion Create_tools
#region Create_Agent
var liteLLMUrl = "http://localhost:4000";
using var httpClient = new HttpClient(new CustomHttpClientHandler(liteLLMUrl));
var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview)
{
Transport = new HttpClientTransport(httpClient),
};

// api-key is not required for local server
// so you can use any string here
Expand All @@ -43,7 +56,7 @@ public static async Task RunAsync()
var agent = new OpenAIChatAgent(
openAIClient: openAIClient,
name: "assistant",
modelName: "placeholder",
modelName: "dolphincoder:latest",
systemMessage: "You are a helpful AI assistant")
.RegisterMessageConnector()
.RegisterMiddleware(functionMiddleware)
Expand Down
67 changes: 67 additions & 0 deletions dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Example13_OpenAIAgent_JsonMode.cs

using System.Text.Json;
using System.Text.Json.Serialization;
using AutoGen.Core;
using AutoGen.OpenAI;
using AutoGen.OpenAI.Extension;
using Azure.AI.OpenAI;
using FluentAssertions;

namespace AutoGen.BasicSample;

public class Use_Json_Mode
{
public static async Task RunAsync()
{
#region create_agent
var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable.");
var model = "gpt-3.5-turbo";

var openAIClient = new OpenAIClient(apiKey);
var openAIClientAgent = new OpenAIChatAgent(
openAIClient: openAIClient,
name: "assistant",
modelName: model,
systemMessage: "You are a helpful assistant designed to output JSON.",
seed: 0, // explicitly set a seed to enable deterministic output
responseFormat: ChatCompletionsResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode
.RegisterMessageConnector()
.RegisterPrintMessage();
#endregion create_agent

#region chat_with_agent
var reply = await openAIClientAgent.SendAsync("My name is John, I am 25 years old, and I live in Seattle.");

var person = JsonSerializer.Deserialize<Person>(reply.GetContent());
Console.WriteLine($"Name: {person.Name}");
Console.WriteLine($"Age: {person.Age}");

if (!string.IsNullOrEmpty(person.Address))
{
Console.WriteLine($"Address: {person.Address}");
}

Console.WriteLine("Done.");
#endregion chat_with_agent

person.Name.Should().Be("John");
person.Age.Should().Be(25);
person.Address.Should().BeNullOrEmpty();
}
}

#region person_class
public class Person
{
[JsonPropertyName("name")]
public string Name { get; set; }

[JsonPropertyName("age")]
public int Age { get; set; }

[JsonPropertyName("address")]
public string Address { get; set; }
}
#endregion person_class
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
</PropertyGroup>

<ItemGroup>
<ProjectReference Include="..\..\sample\AutoGen.OpenAI.Sample\AutoGen.OpenAI.Sample.csproj" />
<ProjectReference Include="..\..\src\AutoGen.SourceGenerator\AutoGen.SourceGenerator.csproj" OutputItemType="Analyzer" ReferenceOutputAssembly="false" />
<ProjectReference Include="..\..\src\AutoGen\AutoGen.csproj" />
<ProjectReference Include="..\AutoGen.Tests\AutoGen.Tests.csproj" />
Expand Down
5 changes: 0 additions & 5 deletions dotnet/test/AutoGen.Tests/BasicSampleTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,6 @@ public async Task AgentFunctionCallTestAsync()
await Example03_Agent_FunctionCall.RunAsync();
}

[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")]
public async Task OpenAIAgent_JsonMode()
{
await Example13_OpenAIAgent_JsonMode.RunAsync();
}

[ApiKeyFact("MISTRAL_API_KEY")]
public async Task MistralClientAgent_TokenCount()
Expand Down
93 changes: 93 additions & 0 deletions dotnet/website/articles/Function-call-with-ollama-and-litellm.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
This example shows how to use function call with local LLM models where [Ollama](https://ollama.com/) as local model provider and [LiteLLM](https://docs.litellm.ai/docs/) proxy server which provides an openai-api compatible interface.

[![](https://img.shields.io/badge/Open%20on%20Github-grey?logo=github)](https://github.com/microsoft/autogen/blob/main/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs)

To run this example, the following prerequisites are required:
- Install [Ollama](https://ollama.com/) and [LiteLLM](https://docs.litellm.ai/docs/) on your local machine.
- A local model that supports function call. In this example `dolphincoder:latest` is used.

## Install Ollama and pull `dolphincoder:latest` model
First, install Ollama by following the instructions on the [Ollama website](https://ollama.com/).

After installing Ollama, pull the `dolphincoder:latest` model by running the following command:
```bash
ollama pull dolphincoder:latest
```

## Install LiteLLM and start the proxy server

You can install LiteLLM by following the instructions on the [LiteLLM website](https://docs.litellm.ai/docs/).
```bash
pip install 'litellm[proxy]'
```

Then, start the proxy server by running the following command:

```bash
litellm --model ollama_chat/dolphincoder --port 4000
```

This will start an openai-api compatible proxy server at `http://localhost:4000`. You can verify if the server is running by observing the following output in the terminal:

```bash
#------------------------------------------------------------#
# #
# 'The worst thing about this product is...' #
# https://github.com/BerriAI/litellm/issues/new #
# #
#------------------------------------------------------------#

INFO: Application startup complete.
INFO: Uvicorn running on http://0.0.0.0:4000 (Press CTRL+C to quit)
```

## Install AutoGen and AutoGen.SourceGenerator
In your project, install the AutoGen and AutoGen.SourceGenerator package using the following command:

```bash
dotnet add package AutoGen
dotnet add package AutoGen.SourceGenerator
```

The `AutoGen.SourceGenerator` package is used to automatically generate type-safe `FunctionContract` instead of manually defining them. For more information, please check out [Create type-safe function](Create-type-safe-function-call.md).

And in your project file, enable structural xml document support by setting the `GenerateDocumentationFile` property to `true`:

```xml
<PropertyGroup>
<!-- This enables structural xml document support -->
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup>
```

## Define `WeatherReport` function and create @AutoGen.Core.FunctionCallMiddleware

Create a `public partial` class to host the methods you want to use in AutoGen agents. The method has to be a `public` instance method and its return type must be `Task<string>`. After the methods are defined, mark them with `AutoGen.Core.FunctionAttribute` attribute.

[!code-csharp[Define WeatherReport function](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Function)]

Then create a @AutoGen.Core.FunctionCallMiddleware and add the `WeatherReport` function to the middleware. The middleware will pass the `FunctionContract` to the agent when generating a response, and process the tool call response when receiving a `ToolCallMessage`.
[!code-csharp[Define WeatherReport function](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Create_tools)]

## Create @AutoGen.OpenAI.OpenAIChatAgent with `GetWeatherReport` tool and chat with it

Because LiteLLM proxy server is openai-api compatible, we can use @AutoGen.OpenAI.OpenAIChatAgent to connect to it as a third-party openai-api provider. The agent is also registered with a @AutoGen.Core.FunctionCallMiddleware which contains the `WeatherReport` tool. Therefore, the agent can call the `WeatherReport` tool when generating a response.

[!code-csharp[Create an agent with tools](../../sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs?name=Create_Agent)]

The reply from the agent will similar to the following:
```bash
AggregateMessage from assistant
--------------------
ToolCallMessage:
ToolCallMessage from assistant
--------------------
- GetWeatherAsync: {"city": "new york"}
--------------------

ToolCallResultMessage:
ToolCallResultMessage from assistant
--------------------
- GetWeatherAsync: The weather in new york is 72 degrees and sunny.
--------------------
```
Loading
Loading