diff --git a/.devcontainer/csollamaphi3.5/devcontainer.json b/.devcontainer/csollamaphi3.5/devcontainer.json
index 521f5059..1f0be5f6 100644
--- a/.devcontainer/csollamaphi3.5/devcontainer.json
+++ b/.devcontainer/csollamaphi3.5/devcontainer.json
@@ -1,49 +1,44 @@
{
- "name": "Ollama with Phi-3.5 for C#",
- "image": "mcr.microsoft.com/devcontainers/dotnet:9.0",
- "features": {
- "ghcr.io/devcontainers/features/docker-in-docker:2": {},
- "ghcr.io/devcontainers/features/github-cli:1": {},
- "ghcr.io/devcontainers/features/common-utils:2": {},
- "ghcr.io/devcontainers/features/dotnet:2": {
- "version": "none",
- "dotnetRuntimeVersions": "8.0",
- "aspNetCoreRuntimeVersions": "8.0"
- },
- "ghcr.io/prulloac/devcontainer-features/ollama:1": {
- "pull": "phi3"
- },
- "sshd": "latest"
+ "name": "Ollama with Phi-3.5 for C#",
+ "image": "mcr.microsoft.com/devcontainers/dotnet:9.0",
+ "features": {
+ "ghcr.io/devcontainers/features/docker-in-docker:2": {},
+ "ghcr.io/devcontainers/features/github-cli:1": {},
+ "ghcr.io/devcontainers/features/common-utils:2": {},
+ "ghcr.io/prulloac/devcontainer-features/ollama:1": {
+ "pull": "phi3.5"
},
- "customizations": {
- "vscode": {
- "extensions": [
- "ms-vscode.vscode-node-azure-pack",
- "github.vscode-github-actions",
- "ms-dotnettools.csdevkit",
- "ms-dotnettools.vscode-dotnet-runtime",
- "github.copilot",
- "ms-azuretools.vscode-docker"
- ]
- }
- },
- "forwardPorts": [
- 32000,
- 32001
- ],
- "postCreateCommand": "sudo dotnet workload update",
- "postStartCommand": "ollama pull phi3.5",
- "remoteUser": "vscode",
- "hostRequirements": {
- "memory": "8gb",
- "cpus": 4
+ "sshd": "latest"
+ },
+ "customizations": {
+ "vscode": {
+ "extensions": [
+ "ms-vscode.vscode-node-azure-pack",
+ "github.vscode-github-actions",
+ "ms-dotnettools.csdevkit",
+ "ms-dotnettools.vscode-dotnet-runtime",
+ "github.copilot",
+ "ms-azuretools.vscode-docker"
+ ]
+ }
+ },
+ "forwardPorts": [
+ 32000,
+ 32001
+ ],
+ "postCreateCommand": "sudo dotnet workload update",
+ "postStartCommand": "ollama pull all-minilm",
+ "remoteUser": "vscode",
+ "hostRequirements": {
+ "memory": "8gb",
+ "cpus": 4
+ },
+ "portsAttributes": {
+ "32001": {
+ "label": "Back End"
},
- "portsAttributes": {
- "32001": {
- "label": "Back End"
- },
- "32000": {
- "label": "Front End"
- }
+ "32000": {
+ "label": "Front End"
}
+ }
}
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs
index d6990f3e..89cdd292 100644
--- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs
@@ -21,20 +21,19 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
-#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052
+#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070
+
using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
+var ollamaEndpoint = "http://localhost:11434";
+var modelIdChat = "phi3.5";
+
// Create kernel with a custom http address
-var builder = Kernel.CreateBuilder();
-builder.AddOpenAIChatCompletion(
- modelId: "phi3.5",
- endpoint: new Uri("http://localhost:11434"),
- apiKey: "apikey");
-var kernel = builder.Build();
+var kernel = Kernel.CreateBuilder()
+ .AddOllamaChatCompletion(modelId: modelIdChat, endpoint: new Uri(ollamaEndpoint))
+ .Build();
-// 14 - define prompt execution settings
var settings = new OpenAIPromptExecutionSettings
{
MaxTokens = 100,
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj
index 4bf4ff65..650c0d4d 100644
--- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj
@@ -1,14 +1,12 @@
-
Exe
- net8.0
+ net9.0
enable
enable
-
-
+
+
-
-
+
\ No newline at end of file
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs
index e731003d..c409674c 100644
--- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs
@@ -21,26 +21,28 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
-#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052
+#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070
+
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using System.Text;
+
+var ollamaEndpoint = "http://localhost:11434";
+var modelIdChat = "phi3.5";
// Create kernel with a custom http address
-var builder = Kernel.CreateBuilder();
-builder.AddOpenAIChatCompletion(
- modelId: "phi3.5",
- endpoint: new Uri("http://localhost:11434"),
- apiKey: "apikey");
-var kernel = builder.Build();
+var kernel = Kernel.CreateBuilder()
+ .AddOllamaChatCompletion(modelId: modelIdChat, endpoint: new Uri(ollamaEndpoint))
+ .Build();
+
var chat = kernel.GetRequiredService();
var history = new ChatHistory();
-history.AddSystemMessage("You are a useful chatbot. If you don't know an answer, say 'I don't know!'. Always reply in a funny ways. Use emojis if possible.");
+history.AddSystemMessage("You always respond in 1 sentence in a funny way. Use emojis if possible.");
while (true)
{
- Console.Write("Q:");
+ Console.Write("Q: ");
var userQ = Console.ReadLine();
if (string.IsNullOrEmpty(userQ))
{
@@ -48,7 +50,14 @@
}
history.AddUserMessage(userQ);
- var result = await chat.GetChatMessageContentsAsync(history);
- Console.WriteLine(result[^1].Content);
- history.Add(result[^1]);
+ Console.Write($"{modelIdChat}: ");
+ var response = chat.GetStreamingChatMessageContentsAsync(history);
+ var assistantResponse = new StringBuilder();
+ await foreach (var message in response)
+ {
+ Console.Write(message.ToString());
+ assistantResponse.Append(message.ToString());
+ }
+ history.AddAssistantMessage(assistantResponse.ToString());
+ Console.WriteLine();
}
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj
index aac9987d..3cb5f53c 100644
--- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj
@@ -1,15 +1,13 @@
-
Exe
- net8.0
+ net9.0
enable
enable
506e8050-acbd-476d-ab7d-bbebc8238bfa
-
-
-
-
-
-
+
+
+
+
+
\ No newline at end of file
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs
index e3df105c..4eff8b4b 100644
--- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs
@@ -23,87 +23,60 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
-#pragma warning disable SKEXP0001
-#pragma warning disable SKEXP0003
-#pragma warning disable SKEXP0010
-#pragma warning disable SKEXP0011
-#pragma warning disable SKEXP0050
-#pragma warning disable SKEXP0052
+#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070
-using Microsoft.Extensions.DependencyInjection;
+using Microsoft.KernelMemory;
+using Microsoft.KernelMemory.AI.Ollama;
using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
-using Microsoft.SemanticKernel.Embeddings;
-using Microsoft.SemanticKernel.Memory;
-using Microsoft.SemanticKernel.Plugins.Memory;
+var ollamaEndpoint = "http://localhost:11434";
+var modelIdChat = "phi3.5";
+var modelIdEmbeddings = "all-minilm";
+
+// questions
var question = "What is Bruno's favourite super hero?";
+
Console.WriteLine($"This program will answer the following question: {question}");
-Console.WriteLine("1st approach will be to ask the question directly to the Phi-3 model.");
+Console.WriteLine($"1st approach will be to ask the question directly to the {modelIdChat} model.");
Console.WriteLine("2nd approach will be to add facts to a semantic memory and ask the question again");
Console.WriteLine("");
// Create a chat completion service
-var builder = Kernel.CreateBuilder();
-builder.AddOpenAIChatCompletion(
- modelId: "phi3.5",
- endpoint: new Uri("http://localhost:11434"),
- apiKey: "apikey");
-builder.AddLocalTextEmbeddingGeneration();
-Kernel kernel = builder.Build();
-
-Console.WriteLine($"Phi-3 response (no memory).");
+Kernel kernel = Kernel.CreateBuilder()
+ .AddOllamaChatCompletion(modelId: modelIdChat, endpoint: new Uri(ollamaEndpoint))
+ .Build();
var response = kernel.InvokePromptStreamingAsync(question);
await foreach (var result in response)
{
- Console.Write(result);
+ Console.Write(result.ToString());
}
// separator
Console.WriteLine("");
Console.WriteLine("==============");
Console.WriteLine("");
+Console.WriteLine($"{modelIdChat} response (using semantic memory).");
-// get the embeddings generator service
-var embeddingGenerator = kernel.Services.GetRequiredService();
-var memory = new SemanticTextMemory(new VolatileMemoryStore(), embeddingGenerator);
-
-// add facts to the collection
-const string MemoryCollectionName = "fanFacts";
-
-await memory.SaveInformationAsync(MemoryCollectionName, id: "info1", text: "Gisela's favourite super hero is Batman");
-await memory.SaveInformationAsync(MemoryCollectionName, id: "info2", text: "The last super hero movie watched by Gisela was Guardians of the Galaxy Vol 3");
-await memory.SaveInformationAsync(MemoryCollectionName, id: "info3", text: "Bruno's favourite super hero is Invincible");
-await memory.SaveInformationAsync(MemoryCollectionName, id: "info4", text: "The last super hero movie watched by Bruno was Aquaman II");
-await memory.SaveInformationAsync(MemoryCollectionName, id: "info5", text: "Bruno don't like the super hero movie: Eternals");
-
-TextMemoryPlugin memoryPlugin = new(memory);
-
-// Import the text memory plugin into the Kernel.
-kernel.ImportPluginFromObject(memoryPlugin);
-
-OpenAIPromptExecutionSettings settings = new()
+var configOllamaKernelMemory = new OllamaConfig
{
- ToolCallBehavior = null,
+ Endpoint = ollamaEndpoint,
+ TextModel = new OllamaModelConfig(modelIdChat),
+ EmbeddingModel = new OllamaModelConfig(modelIdEmbeddings, 2048)
};
+var memory = new KernelMemoryBuilder()
+ .WithOllamaTextGeneration(configOllamaKernelMemory)
+ .WithOllamaTextEmbeddingGeneration(configOllamaKernelMemory)
+ .Build();
-var prompt = @"
- Question: {{$input}}
- Answer the question using the memory content: {{Recall}}";
+await memory.ImportTextAsync("Gisela's favourite super hero is Batman");
+await memory.ImportTextAsync("The last super hero movie watched by Gisela was Guardians of the Galaxy Vol 3");
+await memory.ImportTextAsync("Bruno's favourite super hero is Invincible");
+await memory.ImportTextAsync("The last super hero movie watched by Bruno was Deadpool and Wolverine");
+await memory.ImportTextAsync("Bruno don't like the super hero movie: Eternals");
-var arguments = new KernelArguments(settings)
+var answer = memory.AskStreamingAsync(question);
+await foreach (var result in answer)
{
- { "input", question },
- { "collection", MemoryCollectionName }
-};
-
-Console.WriteLine($"Phi-3 response (using semantic memory).");
-
-response = kernel.InvokePromptStreamingAsync(prompt, arguments);
-await foreach (var result in response)
-{
- Console.Write(result);
-}
-
-Console.WriteLine($"");
\ No newline at end of file
+ Console.Write(result.ToString());
+}
\ No newline at end of file
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj
index efb32fc4..53b69c1a 100644
--- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj
@@ -1,17 +1,20 @@
-
Exe
- net8.0
+ net9.0
sample03
enable
enable
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs
index 6575ba44..68591c9e 100644
--- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs
@@ -1,6 +1,8 @@
// Copyright (c) 2024
// Author : Bruno Capuano
// Change Log :
+// - Sample console application to use a local model hosted in ollama and semantic memory for search
+//
// The MIT License (MIT)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
@@ -21,97 +23,73 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
-#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052
-using Microsoft.Extensions.Configuration;
-using Microsoft.Extensions.DependencyInjection;
-using Microsoft.Extensions.Hosting;
-using Microsoft.Extensions.Logging;
-using Microsoft.Extensions.Options;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
-
-using OpenTelemetry;
-using OpenTelemetry.Metrics;
-using OpenTelemetry.Resources;
-using OpenTelemetry.Trace;
-using System.Text;
+#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070
-// Define endpoints for telemetry and Phi-3
-var otlpEndPoint = "http://localhost:4317";
-var phi3EndPoint = "http://localhost:11434";
+using Microsoft.KernelMemory;
+using Microsoft.KernelMemory.AI.Ollama;
+using Microsoft.SemanticKernel;
-// Create kernel with a custom http address
-var builder = Kernel.CreateBuilder();
-builder.AddOpenAIChatCompletion(
- modelId: "phi3.5",
- endpoint: new Uri(phi3EndPoint),
- apiKey: "apikey");
-ConfigureOpenTelemetry(builder, otlpEndPoint);
-var kernel = builder.Build();
+var ollamaEndpoint = "http://localhost:11434";
+var modelIdChat = "phi3.5";
+var modelIdEmbeddings = "all-minilm";
-var chat = kernel.GetRequiredService();
-var history = new ChatHistory();
-history.AddSystemMessage("You are a useful chatbot. If you don't know an answer, say 'I don't know!'. Always reply in a funny ways. Use emojis if possible.");
+// questions
+var questionEnglish = "What is Bruno's favourite super hero?";
+var questionSpanish = "Cual es el SuperHeroe favorito de Bruno?";
+var questionFrench = "Quel est le super-héros préféré de Bruno?";
+var question = questionEnglish;
-while (true)
-{
- Console.Write("Q: ");
- var userQ = Console.ReadLine();
- if (string.IsNullOrEmpty(userQ))
- {
- break;
- }
- history.AddUserMessage(userQ);
- kernel.Services.GetRequiredService>().LogInformation($"User Question: {userQ}");
+// intro
+SpectreConsoleOutput.DisplayTitle(modelIdChat);
+SpectreConsoleOutput.DisplayTitleH2($"This program will answer the following question:");
+SpectreConsoleOutput.DisplayTitleH2(question);
+SpectreConsoleOutput.DisplayTitleH3("1st approach will be to ask the question directly to the Phi-3 model.");
+SpectreConsoleOutput.DisplayTitleH3("2nd approach will be to add facts to a semantic memory and ask the question again");
+Console.WriteLine("");
- Console.Write($"Phi-3: ");
- StringBuilder sb = new();
- var result = chat.GetStreamingChatMessageContentsAsync(history);
- await foreach (var item in result)
- {
- sb.Append(item);
- Console.Write(item);
- }
- Console.WriteLine("");
- history.AddAssistantMessage(sb.ToString());
+SpectreConsoleOutput.DisplayTitleH2($"{modelIdChat} response (no memory).");
- // logging message
- kernel.Services.GetRequiredService>().LogInformation($"Phi-3 Response: {sb.ToString()}");
-}
+// Create a kernel with Azure OpenAI chat completion
+var builder = Kernel.CreateBuilder().AddOllamaChatCompletion(
+ modelId: modelIdChat,
+ endpoint: new Uri(ollamaEndpoint));
-static IKernelBuilder ConfigureOpenTelemetry(IKernelBuilder builder, string otlpEndPoint)
+Kernel kernel = builder.Build();
+var response = kernel.InvokePromptStreamingAsync(question);
+await foreach (var result in response)
{
- builder.Services.AddLogging(logging =>
- {
- //logging.AddSimpleConsole(options => options.TimestampFormat = "hh:mm:ss ");
- logging.SetMinimumLevel(LogLevel.Debug);
+ SpectreConsoleOutput.WriteGreen(result.ToString());
+}
- //logging.AddConsole();
- logging.Configure(options =>
- {
- options.ActivityTrackingOptions = ActivityTrackingOptions.SpanId;
- });
- });
+// separator
+Console.WriteLine("");
+SpectreConsoleOutput.DisplaySeparator();
+Console.WriteLine("Press Enter to continue");
+Console.ReadLine();
+SpectreConsoleOutput.DisplayTitleH2($"{modelIdChat} response (using semantic memory).");
- builder.Services.AddOpenTelemetry()
- .ConfigureResource(c => c.AddService("Sample04"))
- .WithMetrics(metrics =>
- {
- metrics.AddHttpClientInstrumentation()
- .AddRuntimeInstrumentation();
- })
- .WithTracing(tracing =>
- {
- tracing.AddHttpClientInstrumentation();
- });
+var configOllamaKernelMemory = new OllamaConfig
+{
+ Endpoint = ollamaEndpoint,
+ TextModel = new OllamaModelConfig(modelIdChat),
+ EmbeddingModel = new OllamaModelConfig(modelIdEmbeddings, 2048)
+};
+var memory = new KernelMemoryBuilder()
+ .WithOllamaTextGeneration(configOllamaKernelMemory)
+ .WithOllamaTextEmbeddingGeneration(configOllamaKernelMemory)
+ .Build();
- var useOtlpExporter = !string.IsNullOrWhiteSpace(otlpEndPoint);
- if (useOtlpExporter)
- {
- builder.Services.AddOpenTelemetry().UseOtlpExporter();
- }
+await memory.ImportTextAsync("Gisela's favourite super hero is Batman");
+await memory.ImportTextAsync("The last super hero movie watched by Gisela was Guardians of the Galaxy Vol 3");
+await memory.ImportTextAsync("Bruno's favourite super hero is Invincible");
+await memory.ImportTextAsync("The last super hero movie watched by Bruno was Deadpool and Wolverine");
+await memory.ImportTextAsync("Bruno don't like the super hero movie: Eternals");
- return builder;
+var answer = memory.AskStreamingAsync(question);
+await foreach (var result in answer)
+{
+ SpectreConsoleOutput.WriteGreen(result.ToString());
}
+
+Console.WriteLine($"");
\ No newline at end of file
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj
index ffe88500..e902b4fe 100644
--- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj
@@ -1,24 +1,21 @@
-
Exe
- net8.0
+ net9.0
enable
enable
506e8050-acbd-476d-ab7d-bbebc8238bfa
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/SpectreConsoleOutput.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/SpectreConsoleOutput.cs
new file mode 100644
index 00000000..38226119
--- /dev/null
+++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/SpectreConsoleOutput.cs
@@ -0,0 +1,128 @@
+// Copyright (c) 2024
+// Author : Bruno Capuano
+// Change Log :
+//
+// The MIT License (MIT)
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052
+using Spectre.Console;
+
+public static class SpectreConsoleOutput
+{
+ public static void DisplayTitle(string modelId = "")
+ {
+ var title = $"{modelId} RAG";
+
+ AnsiConsole.Write(new FigletText(title).Centered().Color(Color.Purple));
+ }
+
+ public static void DisplayTitleH2(string subtitle)
+ {
+ AnsiConsole.MarkupLine($"[bold][blue]=== {subtitle} ===[/][/]");
+ AnsiConsole.MarkupLine($"");
+ }
+
+ public static void DisplayTitleH3(string subtitle)
+ {
+ AnsiConsole.MarkupLine($"[bold]>> {subtitle}[/]");
+ AnsiConsole.MarkupLine($"");
+ }
+
+ public static void DisplaySeparator()
+ {
+ AnsiConsole.MarkupLine($"");
+ AnsiConsole.MarkupLine($"[bold][blue]==============[/][/]");
+ AnsiConsole.MarkupLine($"");
+ }
+
+ public static void WriteGreen(string message)
+ {
+ try
+ {
+ AnsiConsole.Markup($"[green]{message}[/]");
+ }
+ catch
+ {
+ AnsiConsole.Write($"{message}");
+ }
+ }
+
+ public static void DisplayQuestion(string question)
+ {
+ AnsiConsole.MarkupLine($"[bold][blue]>>Q: {question}[/][/]");
+ AnsiConsole.MarkupLine($"");
+ }
+ public static void DisplayAnswerStart(string answerPrefix)
+ {
+ AnsiConsole.Markup($"[bold][blue]>> {answerPrefix}:[/][/]");
+ }
+
+ public static void DisplayFilePath(string prefix, string filePath)
+ {
+ var path = new TextPath(filePath);
+
+ AnsiConsole.Markup($"[bold][blue]>> {prefix}: [/][/]");
+ AnsiConsole.Write(path);
+ AnsiConsole.MarkupLine($"");
+ }
+
+ public static void DisplaySubtitle(string prefix, string content)
+ {
+ AnsiConsole.Markup($"[bold][blue]>> {prefix}: [/][/]");
+ AnsiConsole.WriteLine(content);
+ AnsiConsole.MarkupLine($"");
+ }
+
+ public static int AskForNumber(string question)
+ {
+ var number = AnsiConsole.Ask(@$"[green]{question}[/]");
+ return number;
+ }
+
+ public static string AskForString(string question)
+ {
+ var response = AnsiConsole.Ask(@$"[green]{question}[/]");
+ return response;
+ }
+
+ public static List SelectScenarios()
+ {
+ // Ask for the user's favorite fruits
+ var scenarios = AnsiConsole.Prompt(
+ new MultiSelectionPrompt()
+ .Title("Select the [green]Phi 3 Vision scenarios[/] to run?")
+ .PageSize(10)
+ .Required(true)
+ .MoreChoicesText("[grey](Move up and down to reveal more scenarios)[/]")
+ .InstructionsText(
+ "[grey](Press [blue][/] to toggle a scenario, " +
+ "[green][/] to accept)[/]")
+ .AddChoiceGroup("Select an image to be analuyzed", new[]
+ {"foggyday.png","foggydaysmall.png","petsmusic.png","ultrarunningmug.png",
+ })
+ .AddChoices( new[] {
+ "Type the image path to be analyzed",
+ "Type a question"
+ })
+ );
+ return scenarios;
+ }
+}
\ No newline at end of file