Skip to content

Commit

Permalink
[.Net] rename Autogen.Ollama to AutoGen.Ollama and add more test case…
Browse files Browse the repository at this point in the history
…s to AutoGen.Ollama (microsoft#2772)

* update test

* add llava test

* add more tests

* rm Autogen.Ollama

* add AutoGen.ollama

* update

* rename to temp

* remove ollama

* rename

* update

* rename

* rename

* update
  • Loading branch information
LittleLittleCloud authored May 23, 2024
1 parent 1539427 commit e03fb13
Show file tree
Hide file tree
Showing 28 changed files with 735 additions and 295 deletions.
7 changes: 6 additions & 1 deletion dotnet/.editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ csharp_preserve_single_line_statements = true
csharp_preserve_single_line_blocks = true

# Code block
csharp_prefer_braces = false:none
csharp_prefer_braces = true:warning

# Using statements
csharp_using_directive_placement = outside_namespace:error
Expand Down Expand Up @@ -173,6 +173,11 @@ dotnet_diagnostic.CS1573.severity = none
# disable CS1570: XML comment has badly formed XML
dotnet_diagnostic.CS1570.severity = none

dotnet_diagnostic.IDE0035.severity = warning # Remove unreachable code
dotnet_diagnostic.IDE0161.severity = warning # Use file-scoped namespace

csharp_style_var_elsewhere = true:suggestion # Prefer 'var' everywhere

# disable check for generated code
[*.generated.cs]
generated_code = true
29 changes: 15 additions & 14 deletions dotnet/AutoGen.sln
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,13 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Mistral.Tests", "te
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.SemanticKernel.Tests", "test\AutoGen.SemanticKernel.Tests\AutoGen.SemanticKernel.Tests.csproj", "{1DFABC4A-8458-4875-8DCB-59F3802DAC65}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AutoGen.OpenAI.Tests", "test\AutoGen.OpenAI.Tests\AutoGen.OpenAI.Tests.csproj", "{D36A85F9-C172-487D-8192-6BFE5D05B4A7}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AutoGen.DotnetInteractive.Tests", "test\AutoGen.DotnetInteractive.Tests\AutoGen.DotnetInteractive.Tests.csproj", "{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI.Tests", "test\AutoGen.OpenAI.Tests\AutoGen.OpenAI.Tests.csproj", "{D36A85F9-C172-487D-8192-6BFE5D05B4A7}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Autogen.Ollama", "src\Autogen.Ollama\Autogen.Ollama.csproj", "{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.DotnetInteractive.Tests", "test\AutoGen.DotnetInteractive.Tests\AutoGen.DotnetInteractive.Tests.csproj", "{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Autogen.Ollama.Tests", "test\Autogen.Ollama.Tests\Autogen.Ollama.Tests.csproj", "{C24FDE63-952D-4F8E-A807-AF31D43AD675}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama", "src\AutoGen.Ollama\AutoGen.Ollama.csproj", "{9F9E6DED-3D92-4970-909A-70FC11F1A665}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama.Tests", "test\AutoGen.Ollama.Tests\AutoGen.Ollama.Tests.csproj", "{03E31CAA-3728-48D3-B936-9F11CF6C18FE}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Expand Down Expand Up @@ -96,14 +97,6 @@ Global
{15441693-3659-4868-B6C1-B106F52FF3BA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{15441693-3659-4868-B6C1-B106F52FF3BA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{15441693-3659-4868-B6C1-B106F52FF3BA}.Release|Any CPU.Build.0 = Release|Any CPU
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}.Release|Any CPU.Build.0 = Release|Any CPU
{C24FDE63-952D-4F8E-A807-AF31D43AD675}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C24FDE63-952D-4F8E-A807-AF31D43AD675}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C24FDE63-952D-4F8E-A807-AF31D43AD675}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C24FDE63-952D-4F8E-A807-AF31D43AD675}.Release|Any CPU.Build.0 = Release|Any CPU
{1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Release|Any CPU.ActiveCfg = Release|Any CPU
Expand All @@ -116,6 +109,14 @@ Global
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Release|Any CPU.Build.0 = Release|Any CPU
{9F9E6DED-3D92-4970-909A-70FC11F1A665}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9F9E6DED-3D92-4970-909A-70FC11F1A665}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9F9E6DED-3D92-4970-909A-70FC11F1A665}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9F9E6DED-3D92-4970-909A-70FC11F1A665}.Release|Any CPU.Build.0 = Release|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand All @@ -133,11 +134,11 @@ Global
{63445BB7-DBB9-4AEF-9D6F-98BBE75EE1EC} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{6585D1A4-3D97-4D76-A688-1933B61AEB19} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{15441693-3659-4868-B6C1-B106F52FF3BA} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{C24FDE63-952D-4F8E-A807-AF31D43AD675} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{1DFABC4A-8458-4875-8DCB-59F3802DAC65} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{D36A85F9-C172-487D-8192-6BFE5D05B4A7} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{9F9E6DED-3D92-4970-909A-70FC11F1A665} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{03E31CAA-3728-48D3-B936-9F11CF6C18FE} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {93384647-528D-46C8-922C-8DB36A382F0B}
Expand Down
2 changes: 2 additions & 0 deletions dotnet/src/AutoGen.Core/Message/ImageMessage.cs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,9 @@ public ImageMessage(Role role, BinaryData data, string? from = null)
public string BuildDataUri()
{
if (this.Data is null)
{
throw new NullReferenceException($"{nameof(Data)}");
}

return $"data:{this.Data.MediaType};base64,{Convert.ToBase64String(this.Data.ToArray())}";
}
Expand Down
2 changes: 1 addition & 1 deletion dotnet/src/AutoGen.Mistral/DTOs/ChatMessage.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public class ChatMessage
/// </summary>
/// <param name="role">role.</param>
/// <param name="content">content.</param>
public ChatMessage(RoleEnum? role = default(RoleEnum?), string? content = null)
public ChatMessage(RoleEnum? role = default, string? content = null)
{
this.Role = role;
this.Content = content;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,14 @@
using System.Threading.Tasks;
using AutoGen.Core;

namespace Autogen.Ollama;
namespace AutoGen.Ollama;

/// <summary>
/// An agent that can interact with ollama models.
/// </summary>
public class OllamaAgent : IStreamingAgent
{
private readonly HttpClient _httpClient;
public string Name { get; }
private readonly string _modelName;
private readonly string _systemMessage;
private readonly OllamaReplyOptions? _replyOptions;
Expand All @@ -36,13 +35,14 @@ public OllamaAgent(HttpClient httpClient, string name, string modelName,
_systemMessage = systemMessage;
_replyOptions = replyOptions;
}

public async Task<IMessage> GenerateReplyAsync(
IEnumerable<IMessage> messages, GenerateReplyOptions? options = null, CancellationToken cancellation = default)
{
ChatRequest request = await BuildChatRequest(messages, options);
request.Stream = false;
using (HttpResponseMessage? response = await _httpClient
.SendAsync(BuildRequestMessage(request), HttpCompletionOption.ResponseContentRead, cancellation))
var httpRequest = BuildRequest(request);
using (HttpResponseMessage? response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseContentRead, cancellation))
{
response.EnsureSuccessStatusCode();
Stream? streamResponse = await response.Content.ReadAsStreamAsync();
Expand All @@ -52,14 +52,15 @@ public async Task<IMessage> GenerateReplyAsync(
return output;
}
}

public async IAsyncEnumerable<IStreamingMessage> GenerateStreamingReplyAsync(
IEnumerable<IMessage> messages,
GenerateReplyOptions? options = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ChatRequest request = await BuildChatRequest(messages, options);
request.Stream = true;
HttpRequestMessage message = BuildRequestMessage(request);
HttpRequestMessage message = BuildRequest(request);
using (HttpResponseMessage? response = await _httpClient.SendAsync(message, HttpCompletionOption.ResponseHeadersRead, cancellationToken))
{
response.EnsureSuccessStatusCode();
Expand All @@ -69,22 +70,28 @@ public async IAsyncEnumerable<IStreamingMessage> GenerateStreamingReplyAsync(
while (!reader.EndOfStream && !cancellationToken.IsCancellationRequested)
{
string? line = await reader.ReadLineAsync();
if (string.IsNullOrWhiteSpace(line)) continue;
if (string.IsNullOrWhiteSpace(line))
{
continue;
}

ChatResponseUpdate? update = JsonSerializer.Deserialize<ChatResponseUpdate>(line);
if (update != null)
if (update is { Done: false })
{
yield return new MessageEnvelope<ChatResponseUpdate>(update, from: Name);
}
else
{
var finalUpdate = JsonSerializer.Deserialize<ChatResponse>(line) ?? throw new Exception("Failed to deserialize response");

if (update is { Done: false }) continue;

ChatResponse? chatMessage = JsonSerializer.Deserialize<ChatResponse>(line);
if (chatMessage == null) continue;
yield return new MessageEnvelope<ChatResponse>(chatMessage, from: Name);
yield return new MessageEnvelope<ChatResponse>(finalUpdate, from: Name);
}
}
}
}

public string Name { get; }

private async Task<ChatRequest> BuildChatRequest(IEnumerable<IMessage> messages, GenerateReplyOptions? options)
{
var request = new ChatRequest
Expand Down Expand Up @@ -152,65 +159,27 @@ private void BuildChatRequestOptions(OllamaReplyOptions replyOptions, ChatReques
}
private async Task<List<Message>> BuildChatHistory(IEnumerable<IMessage> messages)
{
if (!messages.Any(m => m.IsSystemMessage()))
var history = messages.Select(m => m switch
{
var systemMessage = new TextMessage(Role.System, _systemMessage, from: Name);
messages = new[] { systemMessage }.Concat(messages);
}
IMessage<Message> chatMessage => chatMessage.Content,
_ => throw new ArgumentException("Invalid message type")
});

var collection = new List<Message>();
foreach (IMessage? message in messages)
// if there's no system message in the history, add one to the beginning
if (!history.Any(m => m.Role == "system"))
{
Message item;
switch (message)
{
case TextMessage tm:
item = new Message { Role = tm.Role.ToString(), Value = tm.Content };
break;
case ImageMessage im:
string base64Image = await ImageUrlToBase64(im.Url!);
item = new Message { Role = im.Role.ToString(), Images = [base64Image] };
break;
case MultiModalMessage mm:
var textsGroupedByRole = mm.Content.OfType<TextMessage>().GroupBy(tm => tm.Role)
.ToDictionary(g => g.Key, g => string.Join(Environment.NewLine, g.Select(tm => tm.Content)));

string content = string.Join($"{Environment.NewLine}", textsGroupedByRole
.Select(g => $"{g.Key}{Environment.NewLine}:{g.Value}"));

IEnumerable<Task<string>> imagesConversionTasks = mm.Content
.OfType<ImageMessage>()
.Select(async im => await ImageUrlToBase64(im.Url!));

string[]? imagesBase64 = await Task.WhenAll(imagesConversionTasks);
item = new Message { Role = mm.Role.ToString(), Value = content, Images = imagesBase64 };
break;
default:
throw new NotSupportedException();
}

collection.Add(item);
history = new[] { new Message() { Role = "system", Value = _systemMessage } }.Concat(history);
}

return collection;
return history.ToList();
}
private static HttpRequestMessage BuildRequestMessage(ChatRequest request)

private static HttpRequestMessage BuildRequest(ChatRequest request)
{
string serialized = JsonSerializer.Serialize(request);
return new HttpRequestMessage(HttpMethod.Post, OllamaConsts.ChatCompletionEndpoint)
{
Content = new StringContent(serialized, Encoding.UTF8, OllamaConsts.JsonMediaType)
};
}
private async Task<string> ImageUrlToBase64(string imageUrl)
{
if (string.IsNullOrWhiteSpace(imageUrl))
{
throw new ArgumentException("required parameter", nameof(imageUrl));
}
byte[] imageBytes = await _httpClient.GetByteArrayAsync(imageUrl);
return imageBytes != null
? Convert.ToBase64String(imageBytes)
: throw new InvalidOperationException("no image byte array");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
<RootNamespace>AutoGen.Ollama</RootNamespace>
<GenerateDocumentationFile>True</GenerateDocumentationFile>
</PropertyGroup>

Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// ChatRequest.cs

using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;

namespace Autogen.Ollama;
namespace AutoGen.Ollama;

public class ChatRequest
{
Expand All @@ -19,7 +18,7 @@ public class ChatRequest
/// the messages of the chat, this can be used to keep a chat memory
/// </summary>
[JsonPropertyName("messages")]
public IList<Message> Messages { get; set; } = Array.Empty<Message>();
public IList<Message> Messages { get; set; } = [];

/// <summary>
/// the format to return a response in. Currently, the only accepted value is json
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

using System.Text.Json.Serialization;

namespace Autogen.Ollama;
namespace AutoGen.Ollama;

public class ChatResponse : ChatResponseUpdate
{
Expand Down
21 changes: 21 additions & 0 deletions dotnet/src/AutoGen.Ollama/DTOs/ChatResponseUpdate.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// ChatResponseUpdate.cs

using System.Text.Json.Serialization;

namespace AutoGen.Ollama;

public class ChatResponseUpdate
{
[JsonPropertyName("model")]
public string Model { get; set; } = string.Empty;

[JsonPropertyName("created_at")]
public string CreatedAt { get; set; } = string.Empty;

[JsonPropertyName("message")]
public Message? Message { get; set; }

[JsonPropertyName("done")]
public bool Done { get; set; }
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,25 +4,20 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;

namespace Autogen.Ollama;
namespace AutoGen.Ollama;

public class ChatResponseUpdate
public class Message
{
[JsonPropertyName("model")]
public string Model { get; set; } = string.Empty;

[JsonPropertyName("created_at")]
public string CreatedAt { get; set; } = string.Empty;

[JsonPropertyName("message")]
public Message? Message { get; set; }
public Message()
{
}

[JsonPropertyName("done")]
public bool Done { get; set; }
}
public Message(string role, string value)
{
Role = role;
Value = value;
}

public class Message
{
/// <summary>
/// the role of the message, either system, user or assistant
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

using System.Text.Json.Serialization;

namespace Autogen.Ollama;
namespace AutoGen.Ollama;

//https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values
public class ModelReplyOptions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@

using AutoGen.Core;

namespace Autogen.Ollama;
namespace AutoGen.Ollama;

public enum FormatType
{
None,
Json
Json,
}

public class OllamaReplyOptions : GenerateReplyOptions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
using System.Threading;
using System.Threading.Tasks;

namespace Autogen.Ollama;
namespace AutoGen.Ollama;

public interface ITextEmbeddingService
{
Expand Down
Loading

0 comments on commit e03fb13

Please sign in to comment.