Merge pull request #612 from xbotter/deps/sk-1.6.2
Update Semantic Kernel & Kernel Memory Package
This commit is contained in:
commit
e3ecc318ff
|
@ -10,14 +10,14 @@
|
|||
<IncludeBuiltInRuntimes>true</IncludeBuiltInRuntimes>
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
<LangVersion>12</LangVersion>
|
||||
<NoWarn>1701;1702;8604;SKEXP0001;SKEXP0052;SKEXP0003</NoWarn>
|
||||
<NoWarn>1701;1702;8604;SKEXP0001;SKEXP0050;SKEXP0052;SKEXP0003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.29.240219.2" />
|
||||
<PackageReference Include="Microsoft.SemanticKernel" Version="1.5.0" />
|
||||
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.1.0-alpha" />
|
||||
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.34.240313.1" />
|
||||
<PackageReference Include="Microsoft.SemanticKernel" Version="1.6.2" />
|
||||
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.6.2-alpha" />
|
||||
<PackageReference Include="Spectre.Console" Version="0.48.0" />
|
||||
</ItemGroup>
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.KernelMemory.Abstractions" Version="0.26.240104.1" />
|
||||
<PackageReference Include="Microsoft.KernelMemory.Abstractions" Version="0.34.240313.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
|
|
@ -16,7 +16,7 @@ namespace LLamaSharp.SemanticKernel.ChatCompletion;
|
|||
/// </summary>
|
||||
public sealed class LLamaSharpChatCompletion : IChatCompletionService
|
||||
{
|
||||
private readonly StatelessExecutor _model;
|
||||
private readonly ILLamaExecutor _model;
|
||||
private ChatRequestSettings defaultRequestSettings;
|
||||
private readonly IHistoryTransform historyTransform;
|
||||
private readonly ITextStreamTransform outputTransform;
|
||||
|
@ -36,7 +36,7 @@ public sealed class LLamaSharpChatCompletion : IChatCompletionService
|
|||
};
|
||||
}
|
||||
|
||||
public LLamaSharpChatCompletion(StatelessExecutor model,
|
||||
public LLamaSharpChatCompletion(ILLamaExecutor model,
|
||||
ChatRequestSettings? defaultRequestSettings = default,
|
||||
IHistoryTransform? historyTransform = null,
|
||||
ITextStreamTransform? outputTransform = null)
|
||||
|
|
|
@ -10,39 +10,42 @@
|
|||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
|
||||
<Version>0.8.0</Version>
|
||||
<Authors>Tim Miller, Xbotter</Authors>
|
||||
<Company>SciSharp STACK</Company>
|
||||
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
|
||||
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
|
||||
<RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
|
||||
<RepositoryType>git</RepositoryType>
|
||||
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&v=4</PackageIconUrl>
|
||||
<PackageTags>LLama, LLM, GPT, ChatGPT, semantic-kernel, SciSharp</PackageTags>
|
||||
<Description>
|
||||
The integration of LLamaSharp and Microsoft semantic-kernel.
|
||||
</Description>
|
||||
<PackageReleaseNotes>
|
||||
Support integration with semantic-kernel
|
||||
</PackageReleaseNotes>
|
||||
<PackageLicenseExpression>MIT</PackageLicenseExpression>
|
||||
<PackageOutputPath>packages</PackageOutputPath>
|
||||
<Platforms>AnyCPU;x64;Arm64</Platforms>
|
||||
<PackageId>LLamaSharp.semantic-kernel</PackageId>
|
||||
<Configurations>Debug;Release;GPU</Configurations>
|
||||
<NoWarn>SKEXP0001,SKEXP0052</NoWarn>
|
||||
<Version>0.8.0</Version>
|
||||
<Authors>Tim Miller, Xbotter</Authors>
|
||||
<Company>SciSharp STACK</Company>
|
||||
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
|
||||
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
|
||||
<RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
|
||||
<RepositoryType>git</RepositoryType>
|
||||
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&v=4</PackageIconUrl>
|
||||
<PackageTags>LLama, LLM, GPT, ChatGPT, semantic-kernel, SciSharp</PackageTags>
|
||||
<Description>
|
||||
The integration of LLamaSharp and Microsoft semantic-kernel.
|
||||
</Description>
|
||||
<PackageReleaseNotes>
|
||||
Support integration with semantic-kernel
|
||||
</PackageReleaseNotes>
|
||||
<PackageLicenseExpression>MIT</PackageLicenseExpression>
|
||||
<PackageOutputPath>packages</PackageOutputPath>
|
||||
<Platforms>AnyCPU;x64;Arm64</Platforms>
|
||||
<PackageId>LLamaSharp.semantic-kernel</PackageId>
|
||||
<Configurations>Debug;Release;GPU</Configurations>
|
||||
<NoWarn>SKEXP0001,SKEXP0052</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.SemanticKernel.Abstractions" Version="1.4.0" />
|
||||
<PackageReference Include="Microsoft.SemanticKernel.Abstractions" Version="1.6.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
|
||||
<PackageReference Include="System.Memory" Version="4.5.5" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
|
||||
<PackageReference Include="System.Memory" Version="4.5.5" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<InternalsVisibleTo Include="Llama.Unittest"/>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.70" />
|
||||
<PackageReference Include="System.Linq.Async" Version="6.0.1" />
|
||||
<PackageReference Include="xunit" Version="2.7.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.7">
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
using Xunit;
|
||||
using LLama;
|
||||
using LLama.Abstractions;
|
||||
using Microsoft.SemanticKernel;
|
||||
using Microsoft.SemanticKernel.Services;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using static LLama.LLamaTransforms;
|
||||
using System.Threading.Tasks;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using LLamaSharp.SemanticKernel.ChatCompletion;
|
||||
|
||||
namespace LLamaSharp.SemanticKernel.Tests
|
||||
{
|
||||
public class ExtensionMethodsTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToLLamaSharpChatHistory_StateUnderTest_ExpectedBehavior()
|
||||
{
|
||||
// Arrange
|
||||
var chatHistory = new Microsoft.SemanticKernel.ChatCompletion.ChatHistory();
|
||||
bool ignoreCase = true;
|
||||
|
||||
// Act
|
||||
var result = ExtensionMethods.ToLLamaSharpChatHistory(
|
||||
chatHistory,
|
||||
ignoreCase);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToLLamaSharpInferenceParams_StateUnderTest_ExpectedBehavior()
|
||||
{
|
||||
// Arrange
|
||||
var requestSettings = new ChatRequestSettings();
|
||||
|
||||
// Act
|
||||
var result = ExtensionMethods.ToLLamaSharpInferenceParams(
|
||||
requestSettings);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
using Xunit;
|
||||
using Moq;
|
||||
using LLama;
|
||||
using LLama.Abstractions;
|
||||
using Microsoft.SemanticKernel;
|
||||
using Microsoft.SemanticKernel.ChatCompletion;
|
||||
using Microsoft.SemanticKernel.Services;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using static LLama.LLamaTransforms;
|
||||
using System.Threading.Tasks;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
|
||||
namespace LLamaSharp.SemanticKernel.ChatCompletion.Tests
|
||||
{
|
||||
public class LLamaSharpChatCompletionTests
|
||||
{
|
||||
private Mock<ILLamaExecutor> mockStatelessExecutor;
|
||||
|
||||
public LLamaSharpChatCompletionTests()
|
||||
{
|
||||
this.mockStatelessExecutor = new Mock<ILLamaExecutor>();
|
||||
}
|
||||
|
||||
private LLamaSharpChatCompletion CreateLLamaSharpChatCompletion()
|
||||
{
|
||||
return new LLamaSharpChatCompletion(
|
||||
this.mockStatelessExecutor.Object,
|
||||
null,
|
||||
null,
|
||||
null);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
|
||||
{
|
||||
// Arrange
|
||||
var unitUnderTest = this.CreateLLamaSharpChatCompletion();
|
||||
ChatHistory chatHistory = new ChatHistory();
|
||||
PromptExecutionSettings? executionSettings = null;
|
||||
Kernel? kernel = null;
|
||||
CancellationToken cancellationToken = default;
|
||||
mockStatelessExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(new List<string> { "test" }.ToAsyncEnumerable());
|
||||
|
||||
// Act
|
||||
var result = await unitUnderTest.GetChatMessageContentsAsync(
|
||||
chatHistory,
|
||||
executionSettings,
|
||||
kernel,
|
||||
cancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Count > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStreamingChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
|
||||
{
|
||||
// Arrange
|
||||
var unitUnderTest = this.CreateLLamaSharpChatCompletion();
|
||||
ChatHistory chatHistory = new ChatHistory();
|
||||
PromptExecutionSettings? executionSettings = null;
|
||||
Kernel? kernel = null;
|
||||
CancellationToken cancellationToken = default;
|
||||
|
||||
mockStatelessExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(new List<string> { "test" }.ToAsyncEnumerable());
|
||||
|
||||
// Act
|
||||
await foreach (var result in unitUnderTest.GetStreamingChatMessageContentsAsync(
|
||||
chatHistory,
|
||||
executionSettings,
|
||||
kernel,
|
||||
cancellationToken))
|
||||
{
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
using Xunit;
|
||||
using Moq;
|
||||
using LLama;
|
||||
using LLama.Abstractions;
|
||||
using Microsoft.SemanticKernel;
|
||||
using Microsoft.SemanticKernel.Services;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using static LLama.LLamaTransforms;
|
||||
using System.Threading.Tasks;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
|
||||
namespace LLamaSharp.SemanticKernel.TextCompletion.Tests
|
||||
{
|
||||
public class LLamaSharpTextCompletionTests : IDisposable
|
||||
{
|
||||
private MockRepository mockRepository;
|
||||
private Mock<ILLamaExecutor> mockExecutor;
|
||||
|
||||
public LLamaSharpTextCompletionTests()
|
||||
{
|
||||
this.mockRepository = new MockRepository(MockBehavior.Strict);
|
||||
this.mockExecutor = this.mockRepository.Create<ILLamaExecutor>();
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
this.mockRepository.VerifyAll();
|
||||
}
|
||||
|
||||
private LLamaSharpTextCompletion CreateLLamaSharpTextCompletion()
|
||||
{
|
||||
return new LLamaSharpTextCompletion(
|
||||
this.mockExecutor.Object);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetTextContentsAsync_StateUnderTest_ExpectedBehavior()
|
||||
{
|
||||
// Arrange
|
||||
var unitUnderTest = this.CreateLLamaSharpTextCompletion();
|
||||
string prompt = "Test";
|
||||
PromptExecutionSettings? executionSettings = null;
|
||||
Kernel? kernel = null;
|
||||
CancellationToken cancellationToken = default;
|
||||
mockExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(new List<string> { "test" }.ToAsyncEnumerable());
|
||||
|
||||
// Act
|
||||
var result = await unitUnderTest.GetTextContentsAsync(
|
||||
prompt,
|
||||
executionSettings,
|
||||
kernel,
|
||||
cancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Count > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStreamingTextContentsAsync_StateUnderTest_ExpectedBehavior()
|
||||
{
|
||||
// Arrange
|
||||
var unitUnderTest = this.CreateLLamaSharpTextCompletion();
|
||||
string prompt = "Test";
|
||||
PromptExecutionSettings? executionSettings = null;
|
||||
Kernel? kernel = null;
|
||||
CancellationToken cancellationToken = default;
|
||||
mockExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(new List<string> { "test" }.ToAsyncEnumerable());
|
||||
|
||||
// Act
|
||||
await foreach (var result in unitUnderTest.GetStreamingTextContentsAsync(
|
||||
prompt,
|
||||
executionSettings,
|
||||
kernel,
|
||||
cancellationToken))
|
||||
{
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue