Merge pull request #612 from xbotter/deps/sk-1.6.2

Update Semantic Kernel & Kernel Memory Package
This commit is contained in:
Rinne 2024-03-19 09:34:28 +08:00 committed by GitHub
commit e3ecc318ff
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 259 additions and 33 deletions

View File

@ -10,14 +10,14 @@
<IncludeBuiltInRuntimes>true</IncludeBuiltInRuntimes> <IncludeBuiltInRuntimes>true</IncludeBuiltInRuntimes>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks> <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<LangVersion>12</LangVersion> <LangVersion>12</LangVersion>
<NoWarn>1701;1702;8604;SKEXP0001;SKEXP0052;SKEXP0003</NoWarn> <NoWarn>1701;1702;8604;SKEXP0001;SKEXP0050;SKEXP0052;SKEXP0003</NoWarn>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.0" /> <PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.0" />
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.29.240219.2" /> <PackageReference Include="Microsoft.KernelMemory.Core" Version="0.34.240313.1" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.5.0" /> <PackageReference Include="Microsoft.SemanticKernel" Version="1.6.2" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.1.0-alpha" /> <PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.6.2-alpha" />
<PackageReference Include="Spectre.Console" Version="0.48.0" /> <PackageReference Include="Spectre.Console" Version="0.48.0" />
</ItemGroup> </ItemGroup>

View File

@ -27,7 +27,7 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.KernelMemory.Abstractions" Version="0.26.240104.1" /> <PackageReference Include="Microsoft.KernelMemory.Abstractions" Version="0.34.240313.1" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

View File

@ -16,7 +16,7 @@ namespace LLamaSharp.SemanticKernel.ChatCompletion;
/// </summary> /// </summary>
public sealed class LLamaSharpChatCompletion : IChatCompletionService public sealed class LLamaSharpChatCompletion : IChatCompletionService
{ {
private readonly StatelessExecutor _model; private readonly ILLamaExecutor _model;
private ChatRequestSettings defaultRequestSettings; private ChatRequestSettings defaultRequestSettings;
private readonly IHistoryTransform historyTransform; private readonly IHistoryTransform historyTransform;
private readonly ITextStreamTransform outputTransform; private readonly ITextStreamTransform outputTransform;
@ -36,7 +36,7 @@ public sealed class LLamaSharpChatCompletion : IChatCompletionService
}; };
} }
public LLamaSharpChatCompletion(StatelessExecutor model, public LLamaSharpChatCompletion(ILLamaExecutor model,
ChatRequestSettings? defaultRequestSettings = default, ChatRequestSettings? defaultRequestSettings = default,
IHistoryTransform? historyTransform = null, IHistoryTransform? historyTransform = null,
ITextStreamTransform? outputTransform = null) ITextStreamTransform? outputTransform = null)

View File

@ -10,39 +10,42 @@
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<Version>0.8.0</Version> <Version>0.8.0</Version>
<Authors>Tim Miller, Xbotter</Authors> <Authors>Tim Miller, Xbotter</Authors>
<Company>SciSharp STACK</Company> <Company>SciSharp STACK</Company>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild> <GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright> <Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
<RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl> <RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
<RepositoryType>git</RepositoryType> <RepositoryType>git</RepositoryType>
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&amp;v=4</PackageIconUrl> <PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&amp;v=4</PackageIconUrl>
<PackageTags>LLama, LLM, GPT, ChatGPT, semantic-kernel, SciSharp</PackageTags> <PackageTags>LLama, LLM, GPT, ChatGPT, semantic-kernel, SciSharp</PackageTags>
<Description> <Description>
The integration of LLamaSharp and Microsoft semantic-kernel. The integration of LLamaSharp and Microsoft semantic-kernel.
</Description> </Description>
<PackageReleaseNotes> <PackageReleaseNotes>
Support integration with semantic-kernel Support integration with semantic-kernel
</PackageReleaseNotes> </PackageReleaseNotes>
<PackageLicenseExpression>MIT</PackageLicenseExpression> <PackageLicenseExpression>MIT</PackageLicenseExpression>
<PackageOutputPath>packages</PackageOutputPath> <PackageOutputPath>packages</PackageOutputPath>
<Platforms>AnyCPU;x64;Arm64</Platforms> <Platforms>AnyCPU;x64;Arm64</Platforms>
<PackageId>LLamaSharp.semantic-kernel</PackageId> <PackageId>LLamaSharp.semantic-kernel</PackageId>
<Configurations>Debug;Release;GPU</Configurations> <Configurations>Debug;Release;GPU</Configurations>
<NoWarn>SKEXP0001,SKEXP0052</NoWarn> <NoWarn>SKEXP0001,SKEXP0052</NoWarn>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.SemanticKernel.Abstractions" Version="1.4.0" /> <PackageReference Include="Microsoft.SemanticKernel.Abstractions" Version="1.6.2" />
</ItemGroup>
<ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
<PackageReference Include="System.Memory" Version="4.5.5" PrivateAssets="all" />
</ItemGroup> </ItemGroup>
<ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
<PackageReference Include="System.Memory" Version="4.5.5" PrivateAssets="all" />
</ItemGroup>
<ItemGroup> <ItemGroup>
<ProjectReference Include="..\LLama\LLamaSharp.csproj" /> <ProjectReference Include="..\LLama\LLamaSharp.csproj" />
</ItemGroup> </ItemGroup>
<ItemGroup>
<InternalsVisibleTo Include="Llama.Unittest"/>
</ItemGroup>
</Project> </Project>

View File

@ -14,6 +14,7 @@
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0" /> <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
<PackageReference Include="Moq" Version="4.20.70" />
<PackageReference Include="System.Linq.Async" Version="6.0.1" /> <PackageReference Include="System.Linq.Async" Version="6.0.1" />
<PackageReference Include="xunit" Version="2.7.0" /> <PackageReference Include="xunit" Version="2.7.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.7"> <PackageReference Include="xunit.runner.visualstudio" Version="2.5.7">

View File

@ -0,0 +1,50 @@
using Xunit;
using LLama;
using LLama.Abstractions;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Services;
using System;
using System.IO;
using System.Runtime.CompilerServices;
using System.Text;
using static LLama.LLamaTransforms;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Threading;
using LLamaSharp.SemanticKernel.ChatCompletion;
namespace LLamaSharp.SemanticKernel.Tests
{
public class ExtensionMethodsTests
{
[Fact]
public void ToLLamaSharpChatHistory_StateUnderTest_ExpectedBehavior()
{
// Arrange
var chatHistory = new Microsoft.SemanticKernel.ChatCompletion.ChatHistory();
bool ignoreCase = true;
// Act
var result = ExtensionMethods.ToLLamaSharpChatHistory(
chatHistory,
ignoreCase);
// Assert
Assert.NotNull(result);
}
[Fact]
public void ToLLamaSharpInferenceParams_StateUnderTest_ExpectedBehavior()
{
// Arrange
var requestSettings = new ChatRequestSettings();
// Act
var result = ExtensionMethods.ToLLamaSharpInferenceParams(
requestSettings);
// Assert
Assert.NotNull(result);
}
}
}

View File

@ -0,0 +1,85 @@
using Xunit;
using Moq;
using LLama;
using LLama.Abstractions;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Services;
using System;
using System.IO;
using System.Runtime.CompilerServices;
using System.Text;
using static LLama.LLamaTransforms;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Threading;
namespace LLamaSharp.SemanticKernel.ChatCompletion.Tests
{
public class LLamaSharpChatCompletionTests
{
private Mock<ILLamaExecutor> mockStatelessExecutor;
public LLamaSharpChatCompletionTests()
{
this.mockStatelessExecutor = new Mock<ILLamaExecutor>();
}
private LLamaSharpChatCompletion CreateLLamaSharpChatCompletion()
{
return new LLamaSharpChatCompletion(
this.mockStatelessExecutor.Object,
null,
null,
null);
}
[Fact]
public async Task GetChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
{
// Arrange
var unitUnderTest = this.CreateLLamaSharpChatCompletion();
ChatHistory chatHistory = new ChatHistory();
PromptExecutionSettings? executionSettings = null;
Kernel? kernel = null;
CancellationToken cancellationToken = default;
mockStatelessExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
.Returns(new List<string> { "test" }.ToAsyncEnumerable());
// Act
var result = await unitUnderTest.GetChatMessageContentsAsync(
chatHistory,
executionSettings,
kernel,
cancellationToken);
// Assert
Assert.True(result.Count > 0);
}
[Fact]
public async Task GetStreamingChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
{
// Arrange
var unitUnderTest = this.CreateLLamaSharpChatCompletion();
ChatHistory chatHistory = new ChatHistory();
PromptExecutionSettings? executionSettings = null;
Kernel? kernel = null;
CancellationToken cancellationToken = default;
mockStatelessExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
.Returns(new List<string> { "test" }.ToAsyncEnumerable());
// Act
await foreach (var result in unitUnderTest.GetStreamingChatMessageContentsAsync(
chatHistory,
executionSettings,
kernel,
cancellationToken))
{
// Assert
Assert.NotNull(result);
}
}
}
}

View File

@ -0,0 +1,87 @@
using Xunit;
using Moq;
using LLama;
using LLama.Abstractions;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Services;
using System;
using System.IO;
using System.Runtime.CompilerServices;
using System.Text;
using static LLama.LLamaTransforms;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Threading;
namespace LLamaSharp.SemanticKernel.TextCompletion.Tests
{
public class LLamaSharpTextCompletionTests : IDisposable
{
private MockRepository mockRepository;
private Mock<ILLamaExecutor> mockExecutor;
public LLamaSharpTextCompletionTests()
{
this.mockRepository = new MockRepository(MockBehavior.Strict);
this.mockExecutor = this.mockRepository.Create<ILLamaExecutor>();
}
public void Dispose()
{
this.mockRepository.VerifyAll();
}
private LLamaSharpTextCompletion CreateLLamaSharpTextCompletion()
{
return new LLamaSharpTextCompletion(
this.mockExecutor.Object);
}
[Fact]
public async Task GetTextContentsAsync_StateUnderTest_ExpectedBehavior()
{
// Arrange
var unitUnderTest = this.CreateLLamaSharpTextCompletion();
string prompt = "Test";
PromptExecutionSettings? executionSettings = null;
Kernel? kernel = null;
CancellationToken cancellationToken = default;
mockExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
.Returns(new List<string> { "test" }.ToAsyncEnumerable());
// Act
var result = await unitUnderTest.GetTextContentsAsync(
prompt,
executionSettings,
kernel,
cancellationToken);
// Assert
Assert.True(result.Count > 0);
}
[Fact]
public async Task GetStreamingTextContentsAsync_StateUnderTest_ExpectedBehavior()
{
// Arrange
var unitUnderTest = this.CreateLLamaSharpTextCompletion();
string prompt = "Test";
PromptExecutionSettings? executionSettings = null;
Kernel? kernel = null;
CancellationToken cancellationToken = default;
mockExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
.Returns(new List<string> { "test" }.ToAsyncEnumerable());
// Act
await foreach (var result in unitUnderTest.GetStreamingTextContentsAsync(
prompt,
executionSettings,
kernel,
cancellationToken))
{
// Assert
Assert.NotNull(result);
}
}
}
}