build: add package for kernel-memory integration.
This commit is contained in:
parent
c933a71b82
commit
6a7e74e71b
|
@ -1,9 +1,30 @@
|
||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net6.0</TargetFramework>
|
<TargetFrameworks>netstandard2.0;net6.0;net7.0</TargetFrameworks>
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
|
|
||||||
|
<Version>0.7.1</Version>
|
||||||
|
<Authors>Xbotter</Authors>
|
||||||
|
<Company>SciSharp STACK</Company>
|
||||||
|
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
|
||||||
|
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
|
||||||
|
<RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
|
||||||
|
<RepositoryType>git</RepositoryType>
|
||||||
|
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&v=4</PackageIconUrl>
|
||||||
|
<PackageTags>LLama, LLM, GPT, ChatGPT, kernel-memory, vector search, SciSharp</PackageTags>
|
||||||
|
<Description>
|
||||||
|
The integration of LLamaSharp and Microsoft kernel-memory. It could make it easy to support document search for LLamaSharp model inference.
|
||||||
|
</Description>
|
||||||
|
<PackageReleaseNotes>
|
||||||
|
Support integration with kernel-memory
|
||||||
|
</PackageReleaseNotes>
|
||||||
|
<PackageLicenseExpression>MIT</PackageLicenseExpression>
|
||||||
|
<PackageOutputPath>packages</PackageOutputPath>
|
||||||
|
<Platforms>AnyCPU;x64;Arm64</Platforms>
|
||||||
|
<PackageId>LLamaSharp.kernel-memory</PackageId>
|
||||||
|
<Configurations>Debug;Release;GPU</Configurations>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
|
|
|
@ -10,8 +10,8 @@
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
|
|
||||||
<Version>0.6.2-beta1</Version>
|
<Version>0.7.1</Version>
|
||||||
<Authors>Tim Miller</Authors>
|
<Authors>Tim Miller, Xbotter</Authors>
|
||||||
<Company>SciSharp STACK</Company>
|
<Company>SciSharp STACK</Company>
|
||||||
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
|
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
|
||||||
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
|
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
|
||||||
|
@ -20,7 +20,7 @@
|
||||||
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&v=4</PackageIconUrl>
|
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&v=4</PackageIconUrl>
|
||||||
<PackageTags>LLama, LLM, GPT, ChatGPT, semantic-kernel, SciSharp</PackageTags>
|
<PackageTags>LLama, LLM, GPT, ChatGPT, semantic-kernel, SciSharp</PackageTags>
|
||||||
<Description>
|
<Description>
|
||||||
The integration of LLamaSharp ans semantic-kernel.
|
The integration of LLamaSharp and Microsoft semantic-kernel.
|
||||||
</Description>
|
</Description>
|
||||||
<PackageReleaseNotes>
|
<PackageReleaseNotes>
|
||||||
Support integration with semantic-kernel
|
Support integration with semantic-kernel
|
||||||
|
|
|
@ -54,6 +54,13 @@ For [microsoft semantic-kernel](https://github.com/microsoft/semantic-kernel) in
|
||||||
LLamaSharp.semantic-kernel
|
LLamaSharp.semantic-kernel
|
||||||
```
|
```
|
||||||
|
|
||||||
|
For [microsoft kernel-memory](https://github.com/microsoft/kernel-memory) integration, please search and install the following package:
|
||||||
|
|
||||||
|
```
|
||||||
|
LLamaSharp.kernel-memory
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Tips for choosing a version
|
### Tips for choosing a version
|
||||||
|
|
||||||
In general, there may be some break changes between two minor releases, for example 0.5.1 and 0.6.0. On the contrary, we don't introduce API break changes in patch release. Therefore it's recommended to keep the highest patch version of a minor release. For example, keep 0.5.6 instead of 0.5.3.
|
In general, there may be some break changes between two minor releases, for example 0.5.1 and 0.6.0. On the contrary, we don't introduce API break changes in patch release. Therefore it's recommended to keep the highest patch version of a minor release. For example, keep 0.5.6 instead of 0.5.3.
|
||||||
|
@ -196,7 +203,7 @@ Another choice is generate gguf format file yourself with a pytorch weight (or a
|
||||||
|
|
||||||
🔳 Fine-tune
|
🔳 Fine-tune
|
||||||
|
|
||||||
⚠️ Local document search (enabled by kernel-memory now)
|
✅ Local document search (enabled by kernel-memory now)
|
||||||
|
|
||||||
🔳 MAUI Integration
|
🔳 MAUI Integration
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue