From 578f2398e9430baaaba972e21495427214a6f9f7 Mon Sep 17 00:00:00 2001 From: Bruno Capuano Date: Mon, 27 Jan 2025 11:35:14 -0500 Subject: [PATCH 1/4] Refactor for Ollama integration and UI enhancements Updated devcontainer configuration to pull models and improved formatting. Refactored Program.cs to replace OpenAI with Ollama chat completion and enhanced chat history management. Updated project files to reference newer Semantic Kernel packages and added memory management components. Introduced SpectreConsoleOutput.cs for better console message formatting and improved overall code structure for readability. --- .../csollamaphi3.5/devcontainer.json | 86 +++++------ .../src/Sample01/Program.cs | 17 +-- .../src/Sample01/Sample01.csproj | 19 +-- .../src/Sample02/Program.cs | 35 +++-- .../src/Sample02/Sample02.csproj | 3 +- .../src/Sample03/Program.cs | 89 ++++------- .../src/Sample03/Sample03.csproj | 10 +- .../src/Sample04/Program.cs | 138 ++++++++---------- .../src/Sample04/Sample04.csproj | 20 +-- .../src/Sample04/SpectreConsoleOutput.cs | 128 ++++++++++++++++ 10 files changed, 320 insertions(+), 225 deletions(-) create mode 100644 md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/SpectreConsoleOutput.cs diff --git a/.devcontainer/csollamaphi3.5/devcontainer.json b/.devcontainer/csollamaphi3.5/devcontainer.json index 521f5059..bd3af422 100644 --- a/.devcontainer/csollamaphi3.5/devcontainer.json +++ b/.devcontainer/csollamaphi3.5/devcontainer.json @@ -1,49 +1,49 @@ { - "name": "Ollama with Phi-3.5 for C#", - "image": "mcr.microsoft.com/devcontainers/dotnet:9.0", - "features": { - "ghcr.io/devcontainers/features/docker-in-docker:2": {}, - "ghcr.io/devcontainers/features/github-cli:1": {}, - "ghcr.io/devcontainers/features/common-utils:2": {}, - "ghcr.io/devcontainers/features/dotnet:2": { - "version": "none", - "dotnetRuntimeVersions": "8.0", - "aspNetCoreRuntimeVersions": "8.0" - }, - "ghcr.io/prulloac/devcontainer-features/ollama:1": { - "pull": "phi3" - }, - "sshd": "latest" + "name": "Ollama with Phi-3.5 for C#", + "image": "mcr.microsoft.com/devcontainers/dotnet:9.0", + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/devcontainers/features/common-utils:2": {}, + "ghcr.io/devcontainers/features/dotnet:2": { + "version": "none", + "dotnetRuntimeVersions": "8.0", + "aspNetCoreRuntimeVersions": "8.0" }, - "customizations": { - "vscode": { - "extensions": [ - "ms-vscode.vscode-node-azure-pack", - "github.vscode-github-actions", - "ms-dotnettools.csdevkit", - "ms-dotnettools.vscode-dotnet-runtime", - "github.copilot", - "ms-azuretools.vscode-docker" - ] - } + "ghcr.io/prulloac/devcontainer-features/ollama:1": { + "pull": "phi3" }, - "forwardPorts": [ - 32000, - 32001 - ], - "postCreateCommand": "sudo dotnet workload update", - "postStartCommand": "ollama pull phi3.5", - "remoteUser": "vscode", - "hostRequirements": { - "memory": "8gb", - "cpus": 4 + "sshd": "latest" + }, + "customizations": { + "vscode": { + "extensions": [ + "ms-vscode.vscode-node-azure-pack", + "github.vscode-github-actions", + "ms-dotnettools.csdevkit", + "ms-dotnettools.vscode-dotnet-runtime", + "github.copilot", + "ms-azuretools.vscode-docker" + ] + } + }, + "forwardPorts": [ + 32000, + 32001 + ], + "postCreateCommand": "sudo dotnet workload update", + "postStartCommand": "ollama pull phi3.5 & ollama pull all-minilm", + "remoteUser": "vscode", + "hostRequirements": { + "memory": "8gb", + "cpus": 4 + }, + "portsAttributes": { + "32001": { + "label": "Back End" }, - "portsAttributes": { - "32001": { - "label": "Back End" - }, - "32000": { - "label": "Front End" - } + "32000": { + "label": "Front End" } + } } diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs index d6990f3e..89cdd292 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs @@ -21,20 +21,19 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052 +#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070 + using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; +var ollamaEndpoint = "http://localhost:11434"; +var modelIdChat = "phi3.5"; + // Create kernel with a custom http address -var builder = Kernel.CreateBuilder(); -builder.AddOpenAIChatCompletion( - modelId: "phi3.5", - endpoint: new Uri("http://localhost:11434"), - apiKey: "apikey"); -var kernel = builder.Build(); +var kernel = Kernel.CreateBuilder() + .AddOllamaChatCompletion(modelId: modelIdChat, endpoint: new Uri(ollamaEndpoint)) + .Build(); -// 14 - define prompt execution settings var settings = new OpenAIPromptExecutionSettings { MaxTokens = 100, diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj index 4bf4ff65..4d85f901 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj @@ -1,14 +1,15 @@  - - Exe - net8.0 - enable - enable - + + Exe + net8.0 + enable + enable + - - - + + + + diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs index e731003d..c409674c 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs @@ -21,26 +21,28 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052 +#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070 + using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using System.Text; + +var ollamaEndpoint = "http://localhost:11434"; +var modelIdChat = "phi3.5"; // Create kernel with a custom http address -var builder = Kernel.CreateBuilder(); -builder.AddOpenAIChatCompletion( - modelId: "phi3.5", - endpoint: new Uri("http://localhost:11434"), - apiKey: "apikey"); -var kernel = builder.Build(); +var kernel = Kernel.CreateBuilder() + .AddOllamaChatCompletion(modelId: modelIdChat, endpoint: new Uri(ollamaEndpoint)) + .Build(); + var chat = kernel.GetRequiredService(); var history = new ChatHistory(); -history.AddSystemMessage("You are a useful chatbot. If you don't know an answer, say 'I don't know!'. Always reply in a funny ways. Use emojis if possible."); +history.AddSystemMessage("You always respond in 1 sentence in a funny way. Use emojis if possible."); while (true) { - Console.Write("Q:"); + Console.Write("Q: "); var userQ = Console.ReadLine(); if (string.IsNullOrEmpty(userQ)) { @@ -48,7 +50,14 @@ } history.AddUserMessage(userQ); - var result = await chat.GetChatMessageContentsAsync(history); - Console.WriteLine(result[^1].Content); - history.Add(result[^1]); + Console.Write($"{modelIdChat}: "); + var response = chat.GetStreamingChatMessageContentsAsync(history); + var assistantResponse = new StringBuilder(); + await foreach (var message in response) + { + Console.Write(message.ToString()); + assistantResponse.Append(message.ToString()); + } + history.AddAssistantMessage(assistantResponse.ToString()); + Console.WriteLine(); } diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj index aac9987d..ca6809ce 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj @@ -9,7 +9,8 @@ - + + diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs index e3df105c..06ab20fd 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs @@ -23,87 +23,60 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -#pragma warning disable SKEXP0001 -#pragma warning disable SKEXP0003 -#pragma warning disable SKEXP0010 -#pragma warning disable SKEXP0011 -#pragma warning disable SKEXP0050 -#pragma warning disable SKEXP0052 +#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070 -using Microsoft.Extensions.DependencyInjection; +using Microsoft.KernelMemory; +using Microsoft.KernelMemory.AI.Ollama; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Plugins.Memory; +var ollamaEndpoint = "http://localhost:11434"; +var modelIdChat = "phi3.5"; +var modelIdEmbeddings = "all-minilm"; + +// questions var question = "What is Bruno's favourite super hero?"; + Console.WriteLine($"This program will answer the following question: {question}"); Console.WriteLine("1st approach will be to ask the question directly to the Phi-3 model."); Console.WriteLine("2nd approach will be to add facts to a semantic memory and ask the question again"); Console.WriteLine(""); // Create a chat completion service -var builder = Kernel.CreateBuilder(); -builder.AddOpenAIChatCompletion( - modelId: "phi3.5", - endpoint: new Uri("http://localhost:11434"), - apiKey: "apikey"); -builder.AddLocalTextEmbeddingGeneration(); -Kernel kernel = builder.Build(); - -Console.WriteLine($"Phi-3 response (no memory)."); +Kernel kernel = Kernel.CreateBuilder() + .AddOllamaChatCompletion(modelId: modelIdChat, endpoint: new Uri(ollamaEndpoint)) + .Build(); var response = kernel.InvokePromptStreamingAsync(question); await foreach (var result in response) { - Console.Write(result); + Console.Write(result.ToString()); } // separator Console.WriteLine(""); Console.WriteLine("=============="); Console.WriteLine(""); +Console.WriteLine($"Phi-3 response (using semantic memory)."); -// get the embeddings generator service -var embeddingGenerator = kernel.Services.GetRequiredService(); -var memory = new SemanticTextMemory(new VolatileMemoryStore(), embeddingGenerator); - -// add facts to the collection -const string MemoryCollectionName = "fanFacts"; - -await memory.SaveInformationAsync(MemoryCollectionName, id: "info1", text: "Gisela's favourite super hero is Batman"); -await memory.SaveInformationAsync(MemoryCollectionName, id: "info2", text: "The last super hero movie watched by Gisela was Guardians of the Galaxy Vol 3"); -await memory.SaveInformationAsync(MemoryCollectionName, id: "info3", text: "Bruno's favourite super hero is Invincible"); -await memory.SaveInformationAsync(MemoryCollectionName, id: "info4", text: "The last super hero movie watched by Bruno was Aquaman II"); -await memory.SaveInformationAsync(MemoryCollectionName, id: "info5", text: "Bruno don't like the super hero movie: Eternals"); - -TextMemoryPlugin memoryPlugin = new(memory); - -// Import the text memory plugin into the Kernel. -kernel.ImportPluginFromObject(memoryPlugin); - -OpenAIPromptExecutionSettings settings = new() +var configOllamaKernelMemory = new OllamaConfig { - ToolCallBehavior = null, + Endpoint = ollamaEndpoint, + TextModel = new OllamaModelConfig(modelIdChat), + EmbeddingModel = new OllamaModelConfig(modelIdEmbeddings, 2048) }; +var memory = new KernelMemoryBuilder() + .WithOllamaTextGeneration(configOllamaKernelMemory) + .WithOllamaTextEmbeddingGeneration(configOllamaKernelMemory) + .Build(); -var prompt = @" - Question: {{$input}} - Answer the question using the memory content: {{Recall}}"; +await memory.ImportTextAsync("Gisela's favourite super hero is Batman"); +await memory.ImportTextAsync("The last super hero movie watched by Gisela was Guardians of the Galaxy Vol 3"); +await memory.ImportTextAsync("Bruno's favourite super hero is Invincible"); +await memory.ImportTextAsync("The last super hero movie watched by Bruno was Deadpool and Wolverine"); +await memory.ImportTextAsync("Bruno don't like the super hero movie: Eternals"); -var arguments = new KernelArguments(settings) +var answer = memory.AskStreamingAsync(question); +await foreach (var result in answer) { - { "input", question }, - { "collection", MemoryCollectionName } -}; - -Console.WriteLine($"Phi-3 response (using semantic memory)."); - -response = kernel.InvokePromptStreamingAsync(prompt, arguments); -await foreach (var result in response) -{ - Console.Write(result); -} - -Console.WriteLine($""); \ No newline at end of file + Console.Write(result.ToString()); +} \ No newline at end of file diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj index efb32fc4..0b2d2e98 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj @@ -9,9 +9,15 @@ - - + + + + + + + + diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs index 6575ba44..68591c9e 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs @@ -1,6 +1,8 @@ // Copyright (c) 2024 // Author : Bruno Capuano // Change Log : +// - Sample console application to use a local model hosted in ollama and semantic memory for search +// // The MIT License (MIT) // // Permission is hereby granted, free of charge, to any person obtaining a copy @@ -21,97 +23,73 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052 -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -using OpenTelemetry; -using OpenTelemetry.Metrics; -using OpenTelemetry.Resources; -using OpenTelemetry.Trace; -using System.Text; +#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070 -// Define endpoints for telemetry and Phi-3 -var otlpEndPoint = "http://localhost:4317"; -var phi3EndPoint = "http://localhost:11434"; +using Microsoft.KernelMemory; +using Microsoft.KernelMemory.AI.Ollama; +using Microsoft.SemanticKernel; -// Create kernel with a custom http address -var builder = Kernel.CreateBuilder(); -builder.AddOpenAIChatCompletion( - modelId: "phi3.5", - endpoint: new Uri(phi3EndPoint), - apiKey: "apikey"); -ConfigureOpenTelemetry(builder, otlpEndPoint); -var kernel = builder.Build(); +var ollamaEndpoint = "http://localhost:11434"; +var modelIdChat = "phi3.5"; +var modelIdEmbeddings = "all-minilm"; -var chat = kernel.GetRequiredService(); -var history = new ChatHistory(); -history.AddSystemMessage("You are a useful chatbot. If you don't know an answer, say 'I don't know!'. Always reply in a funny ways. Use emojis if possible."); +// questions +var questionEnglish = "What is Bruno's favourite super hero?"; +var questionSpanish = "Cual es el SuperHeroe favorito de Bruno?"; +var questionFrench = "Quel est le super-héros préféré de Bruno?"; +var question = questionEnglish; -while (true) -{ - Console.Write("Q: "); - var userQ = Console.ReadLine(); - if (string.IsNullOrEmpty(userQ)) - { - break; - } - history.AddUserMessage(userQ); - kernel.Services.GetRequiredService>().LogInformation($"User Question: {userQ}"); +// intro +SpectreConsoleOutput.DisplayTitle(modelIdChat); +SpectreConsoleOutput.DisplayTitleH2($"This program will answer the following question:"); +SpectreConsoleOutput.DisplayTitleH2(question); +SpectreConsoleOutput.DisplayTitleH3("1st approach will be to ask the question directly to the Phi-3 model."); +SpectreConsoleOutput.DisplayTitleH3("2nd approach will be to add facts to a semantic memory and ask the question again"); +Console.WriteLine(""); - Console.Write($"Phi-3: "); - StringBuilder sb = new(); - var result = chat.GetStreamingChatMessageContentsAsync(history); - await foreach (var item in result) - { - sb.Append(item); - Console.Write(item); - } - Console.WriteLine(""); - history.AddAssistantMessage(sb.ToString()); +SpectreConsoleOutput.DisplayTitleH2($"{modelIdChat} response (no memory)."); - // logging message - kernel.Services.GetRequiredService>().LogInformation($"Phi-3 Response: {sb.ToString()}"); -} +// Create a kernel with Azure OpenAI chat completion +var builder = Kernel.CreateBuilder().AddOllamaChatCompletion( + modelId: modelIdChat, + endpoint: new Uri(ollamaEndpoint)); -static IKernelBuilder ConfigureOpenTelemetry(IKernelBuilder builder, string otlpEndPoint) +Kernel kernel = builder.Build(); +var response = kernel.InvokePromptStreamingAsync(question); +await foreach (var result in response) { - builder.Services.AddLogging(logging => - { - //logging.AddSimpleConsole(options => options.TimestampFormat = "hh:mm:ss "); - logging.SetMinimumLevel(LogLevel.Debug); + SpectreConsoleOutput.WriteGreen(result.ToString()); +} - //logging.AddConsole(); - logging.Configure(options => - { - options.ActivityTrackingOptions = ActivityTrackingOptions.SpanId; - }); - }); +// separator +Console.WriteLine(""); +SpectreConsoleOutput.DisplaySeparator(); +Console.WriteLine("Press Enter to continue"); +Console.ReadLine(); +SpectreConsoleOutput.DisplayTitleH2($"{modelIdChat} response (using semantic memory)."); - builder.Services.AddOpenTelemetry() - .ConfigureResource(c => c.AddService("Sample04")) - .WithMetrics(metrics => - { - metrics.AddHttpClientInstrumentation() - .AddRuntimeInstrumentation(); - }) - .WithTracing(tracing => - { - tracing.AddHttpClientInstrumentation(); - }); +var configOllamaKernelMemory = new OllamaConfig +{ + Endpoint = ollamaEndpoint, + TextModel = new OllamaModelConfig(modelIdChat), + EmbeddingModel = new OllamaModelConfig(modelIdEmbeddings, 2048) +}; +var memory = new KernelMemoryBuilder() + .WithOllamaTextGeneration(configOllamaKernelMemory) + .WithOllamaTextEmbeddingGeneration(configOllamaKernelMemory) + .Build(); - var useOtlpExporter = !string.IsNullOrWhiteSpace(otlpEndPoint); - if (useOtlpExporter) - { - builder.Services.AddOpenTelemetry().UseOtlpExporter(); - } +await memory.ImportTextAsync("Gisela's favourite super hero is Batman"); +await memory.ImportTextAsync("The last super hero movie watched by Gisela was Guardians of the Galaxy Vol 3"); +await memory.ImportTextAsync("Bruno's favourite super hero is Invincible"); +await memory.ImportTextAsync("The last super hero movie watched by Bruno was Deadpool and Wolverine"); +await memory.ImportTextAsync("Bruno don't like the super hero movie: Eternals"); - return builder; +var answer = memory.AskStreamingAsync(question); +await foreach (var result in answer) +{ + SpectreConsoleOutput.WriteGreen(result.ToString()); } + +Console.WriteLine($""); \ No newline at end of file diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj index ffe88500..a7dcb339 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj @@ -9,16 +9,16 @@ - - - - - - - - - - + + + + + + + + + + diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/SpectreConsoleOutput.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/SpectreConsoleOutput.cs new file mode 100644 index 00000000..38226119 --- /dev/null +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/SpectreConsoleOutput.cs @@ -0,0 +1,128 @@ +// Copyright (c) 2024 +// Author : Bruno Capuano +// Change Log : +// +// The MIT License (MIT) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052 +using Spectre.Console; + +public static class SpectreConsoleOutput +{ + public static void DisplayTitle(string modelId = "") + { + var title = $"{modelId} RAG"; + + AnsiConsole.Write(new FigletText(title).Centered().Color(Color.Purple)); + } + + public static void DisplayTitleH2(string subtitle) + { + AnsiConsole.MarkupLine($"[bold][blue]=== {subtitle} ===[/][/]"); + AnsiConsole.MarkupLine($""); + } + + public static void DisplayTitleH3(string subtitle) + { + AnsiConsole.MarkupLine($"[bold]>> {subtitle}[/]"); + AnsiConsole.MarkupLine($""); + } + + public static void DisplaySeparator() + { + AnsiConsole.MarkupLine($""); + AnsiConsole.MarkupLine($"[bold][blue]==============[/][/]"); + AnsiConsole.MarkupLine($""); + } + + public static void WriteGreen(string message) + { + try + { + AnsiConsole.Markup($"[green]{message}[/]"); + } + catch + { + AnsiConsole.Write($"{message}"); + } + } + + public static void DisplayQuestion(string question) + { + AnsiConsole.MarkupLine($"[bold][blue]>>Q: {question}[/][/]"); + AnsiConsole.MarkupLine($""); + } + public static void DisplayAnswerStart(string answerPrefix) + { + AnsiConsole.Markup($"[bold][blue]>> {answerPrefix}:[/][/]"); + } + + public static void DisplayFilePath(string prefix, string filePath) + { + var path = new TextPath(filePath); + + AnsiConsole.Markup($"[bold][blue]>> {prefix}: [/][/]"); + AnsiConsole.Write(path); + AnsiConsole.MarkupLine($""); + } + + public static void DisplaySubtitle(string prefix, string content) + { + AnsiConsole.Markup($"[bold][blue]>> {prefix}: [/][/]"); + AnsiConsole.WriteLine(content); + AnsiConsole.MarkupLine($""); + } + + public static int AskForNumber(string question) + { + var number = AnsiConsole.Ask(@$"[green]{question}[/]"); + return number; + } + + public static string AskForString(string question) + { + var response = AnsiConsole.Ask(@$"[green]{question}[/]"); + return response; + } + + public static List SelectScenarios() + { + // Ask for the user's favorite fruits + var scenarios = AnsiConsole.Prompt( + new MultiSelectionPrompt() + .Title("Select the [green]Phi 3 Vision scenarios[/] to run?") + .PageSize(10) + .Required(true) + .MoreChoicesText("[grey](Move up and down to reveal more scenarios)[/]") + .InstructionsText( + "[grey](Press [blue][/] to toggle a scenario, " + + "[green][/] to accept)[/]") + .AddChoiceGroup("Select an image to be analuyzed", new[] + {"foggyday.png","foggydaysmall.png","petsmusic.png","ultrarunningmug.png", + }) + .AddChoices( new[] { + "Type the image path to be analyzed", + "Type a question" + }) + ); + return scenarios; + } +} \ No newline at end of file From afdb50b4ca43cefdc420c88c7cb11a49259faeb5 Mon Sep 17 00:00:00 2001 From: Bruno Capuano Date: Mon, 27 Jan 2025 11:58:25 -0500 Subject: [PATCH 2/4] Update devcontainer configuration for Ollama to use Phi-3.5 --- .devcontainer/csollamaphi3.5/devcontainer.json | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.devcontainer/csollamaphi3.5/devcontainer.json b/.devcontainer/csollamaphi3.5/devcontainer.json index bd3af422..1f0be5f6 100644 --- a/.devcontainer/csollamaphi3.5/devcontainer.json +++ b/.devcontainer/csollamaphi3.5/devcontainer.json @@ -5,13 +5,8 @@ "ghcr.io/devcontainers/features/docker-in-docker:2": {}, "ghcr.io/devcontainers/features/github-cli:1": {}, "ghcr.io/devcontainers/features/common-utils:2": {}, - "ghcr.io/devcontainers/features/dotnet:2": { - "version": "none", - "dotnetRuntimeVersions": "8.0", - "aspNetCoreRuntimeVersions": "8.0" - }, "ghcr.io/prulloac/devcontainer-features/ollama:1": { - "pull": "phi3" + "pull": "phi3.5" }, "sshd": "latest" }, @@ -32,7 +27,7 @@ 32001 ], "postCreateCommand": "sudo dotnet workload update", - "postStartCommand": "ollama pull phi3.5 & ollama pull all-minilm", + "postStartCommand": "ollama pull all-minilm", "remoteUser": "vscode", "hostRequirements": { "memory": "8gb", From 21ae93c42bd740f1182fff0cc6967c073a46ffa5 Mon Sep 17 00:00:00 2001 From: Bruno Capuano Date: Mon, 27 Jan 2025 12:10:02 -0500 Subject: [PATCH 3/4] Update project files to target .NET 9.0 Refactor Sample01.csproj, Sample02.csproj, Sample03.csproj, and Sample04.csproj to target .NET 9.0 instead of .NET 8.0. Reformat ItemGroup sections for improved readability while retaining existing package references. Preserve UserSecretsId property in Sample02.csproj and Sample04.csproj. --- .../src/Sample01/Sample01.csproj | 25 +++++++-------- .../src/Sample02/Sample02.csproj | 15 ++++----- .../src/Sample03/Sample03.csproj | 29 ++++++++--------- .../src/Sample04/Sample04.csproj | 31 +++++++++---------- 4 files changed, 44 insertions(+), 56 deletions(-) diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj index 4d85f901..650c0d4d 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Sample01.csproj @@ -1,15 +1,12 @@  - - - Exe - net8.0 - enable - enable - - - - - - - - + + Exe + net9.0 + enable + enable + + + + + + \ No newline at end of file diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj index ca6809ce..3cb5f53c 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Sample02.csproj @@ -1,16 +1,13 @@  - Exe - net8.0 + net9.0 enable enable 506e8050-acbd-476d-ab7d-bbebc8238bfa - - - - - - - + + + + + \ No newline at end of file diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj index 0b2d2e98..53b69c1a 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj @@ -1,23 +1,20 @@  - Exe - net8.0 + net9.0 sample03 enable enable - - - - - - - - - - - - - - + + + + + + + + + + + + \ No newline at end of file diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj index a7dcb339..e902b4fe 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Sample04.csproj @@ -1,24 +1,21 @@  - Exe - net8.0 + net9.0 enable enable 506e8050-acbd-476d-ab7d-bbebc8238bfa - - - - - - - - - - - - - - - + + + + + + + + + + + + + \ No newline at end of file From 9e8dab522d41a33605fdc468644afdbed3e084cd Mon Sep 17 00:00:00 2001 From: Bruno Capuano Date: Mon, 27 Jan 2025 12:13:34 -0500 Subject: [PATCH 4/4] Update model references in console output messages Changed `modelIdEmbeddings` to `modelIdChat` in console output to accurately reflect the model being used. Updated messages to ensure consistency and clarity regarding the model identifier throughout the program. --- md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs index 06ab20fd..4eff8b4b 100644 --- a/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs +++ b/md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs @@ -37,7 +37,7 @@ var question = "What is Bruno's favourite super hero?"; Console.WriteLine($"This program will answer the following question: {question}"); -Console.WriteLine("1st approach will be to ask the question directly to the Phi-3 model."); +Console.WriteLine($"1st approach will be to ask the question directly to the {modelIdChat} model."); Console.WriteLine("2nd approach will be to add facts to a semantic memory and ask the question again"); Console.WriteLine(""); @@ -55,7 +55,7 @@ Console.WriteLine(""); Console.WriteLine("=============="); Console.WriteLine(""); -Console.WriteLine($"Phi-3 response (using semantic memory)."); +Console.WriteLine($"{modelIdChat} response (using semantic memory)."); var configOllamaKernelMemory = new OllamaConfig {