diff --git a/LLama.Examples/LLama.Examples.csproj b/LLama.Examples/LLama.Examples.csproj index a8abe3ae..865a6321 100644 --- a/LLama.Examples/LLama.Examples.csproj +++ b/LLama.Examples/LLama.Examples.csproj @@ -27,6 +27,7 @@ + diff --git a/LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs b/LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs index df22d9eb..ef599cdf 100644 --- a/LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs +++ b/LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs @@ -22,6 +22,7 @@ namespace LLama.Examples.NewVersion public static async Task Run() { + var loggerFactory = ConsoleLogger.LoggerFactory; Console.WriteLine("Example from: https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example15_MemorySkill.cs"); Console.Write("Please input your model path: "); var modelPath = Console.ReadLine(); @@ -30,16 +31,18 @@ namespace LLama.Examples.NewVersion var parameters = new ModelParams(modelPath) { Seed = RandomNumberGenerator.GetInt32(int.MaxValue), + EmbeddingMode = true + , GpuLayerCount = 50 }; using var model = LLamaWeights.LoadFromFile(parameters); using var context = model.CreateContext(parameters); - var ex = new InteractiveExecutor(context); - var ex2 = new StatelessExecutor(model, parameters); + //var ex = new InteractiveExecutor(context); + var ex = new InstructExecutor(context); var builder = new KernelBuilder(); - + builder.WithLoggerFactory(loggerFactory); var embedding = new LLamaEmbedder(context); - builder.WithAIService("local-llama", new LLamaSharpChatCompletion(ex), true); + //builder.WithAIService("local-llama", new LLamaSharpChatCompletion(ex), true); builder.WithAIService("local-llama-text", new LLamaSharpTextCompletion(ex), true); builder.WithAIService("local-llama-embed", new LLamaSharpEmbeddingGeneration(embedding), true); builder.WithMemoryStorage(new VolatileMemoryStore()); @@ -71,12 +74,20 @@ namespace LLama.Examples.NewVersion // ========= Test memory remember ========= Console.WriteLine("========= Example: Recalling a Memory ========="); - var answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info1", null); + var answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info1", loggerFactory); Console.WriteLine("Memory associated with 'info1': {0}", answer); - /* - Output: - "Memory associated with 'info1': My name is Andrea - */ + + answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info2", loggerFactory); + Console.WriteLine("Memory associated with 'info2': {0}", answer); + + answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info3", loggerFactory); + Console.WriteLine("Memory associated with 'info3': {0}", answer); + + answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info4", loggerFactory); + Console.WriteLine("Memory associated with 'info4': {0}", answer); + + answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info5", loggerFactory); + Console.WriteLine("Memory associated with 'info5': {0}", answer); // ========= Test memory recall ========= Console.WriteLine("========= Example: Recalling an Idea ========="); @@ -155,7 +166,7 @@ Answer: My name is Andrea and my family is from New York. I work as a tourist operator. */ - await memorySkill.RemoveAsync(MemoryCollectionName, "info1", null); + await memorySkill.RemoveAsync(MemoryCollectionName, "info1", loggerFactory); result = await kernel.RunAsync(aboutMeOracle, new("Tell me a bit about myself")); diff --git a/LLama.Examples/RepoUtils.cs b/LLama.Examples/RepoUtils.cs new file mode 100644 index 00000000..8e728339 --- /dev/null +++ b/LLama.Examples/RepoUtils.cs @@ -0,0 +1,40 @@ +using Microsoft.Extensions.Logging; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace LLama.Examples +{ + /// + /// Basic logger printing to console + /// + internal static class ConsoleLogger + { + internal static ILogger Logger => LoggerFactory.CreateLogger(); + + internal static ILoggerFactory LoggerFactory => s_loggerFactory.Value; + + private static readonly Lazy s_loggerFactory = new(LogBuilder); + + private static ILoggerFactory LogBuilder() + { + return Microsoft.Extensions.Logging.LoggerFactory.Create(builder => + { + builder.SetMinimumLevel(LogLevel.Warning); + + builder.AddFilter("Microsoft", LogLevel.Trace); + builder.AddFilter("Microsoft", LogLevel.Debug); + builder.AddFilter("Microsoft", LogLevel.Information); + builder.AddFilter("Microsoft", LogLevel.Warning); + builder.AddFilter("Microsoft", LogLevel.Error); + + builder.AddFilter("Microsoft", LogLevel.Warning); + builder.AddFilter("System", LogLevel.Warning); + + builder.AddConsole(); + }); + } + } +} diff --git a/LLama.SemanticKernel/TextEmbedding/LLamaSharpEmbeddingGeneration.cs b/LLama.SemanticKernel/TextEmbedding/LLamaSharpEmbeddingGeneration.cs index f2bd2886..02cc09d0 100644 --- a/LLama.SemanticKernel/TextEmbedding/LLamaSharpEmbeddingGeneration.cs +++ b/LLama.SemanticKernel/TextEmbedding/LLamaSharpEmbeddingGeneration.cs @@ -15,7 +15,6 @@ public sealed class LLamaSharpEmbeddingGeneration : ITextEmbeddingGeneration /// public async Task>> GenerateEmbeddingsAsync(IList data, CancellationToken cancellationToken = default) { - var result = data.Select(text => new ReadOnlyMemory(_embedder.GetEmbeddings(text))).ToList(); - return await Task.FromResult(result).ConfigureAwait(false); + return data.Select(text => new ReadOnlyMemory(_embedder.GetEmbeddings(text))).ToList(); } }