Based on this version: 6b73ef1201
This commit is contained in:
Martin Evans 2023-08-28 19:48:31 +01:00
parent 974f16064a
commit 2022b82947
10 changed files with 25 additions and 37 deletions

View File

@ -55,27 +55,15 @@
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/ggml-metal.metal">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-cuda11.dll">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-cuda11.so">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-cuda12.dll">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-cuda12.so">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-metal.dylib">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama.dylib">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama.so">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@ -37,35 +37,20 @@
</ItemGroup>
<ItemGroup>
<None Update="../runtimes/ggml-metal.metal">
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/ggml-metal.metal">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="../runtimes/libllama-cuda11.dll">
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-cuda11.so">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="../runtimes/libllama-cuda11.so">
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-cuda12.so">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="../runtimes/libllama-cuda12.dll">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="../runtimes/libllama-cuda12.so">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="../runtimes/libllama-metal.dylib">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="../runtimes/libllama.dylib">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="../runtimes/libllama.so">
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-metal.dylib">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="Models\codellama-7b.Q3_K_S.gguf">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="Models\llama-2-7b-chat.ggmlv3.q3_K_S.bin">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@ -22,4 +22,19 @@
<CopyToPublishDirectory>Never</CopyToPublishDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/ggml-metal.metal">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-cuda11.so">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-cuda12.so">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
<None Update="C:\Users\Martin\Documents\dotnet\LLamaSharp\LLama\runtimes/libllama-metal.dylib">
<CopyToOutputDirectory>Never</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@ -414,7 +414,7 @@ namespace LLama.Native
/// <param name="length">size of the buffer</param>
/// <returns>The length writte, or if the buffer is too small a negative that indicates the length required</returns>
[DllImport(libraryName, CallingConvention = CallingConvention.Cdecl)]
public static extern int llama_token_to_str_with_model(SafeLlamaModelHandle model, int llamaToken, byte* buffer, int length);
public static extern int llama_token_to_piece_with_model(SafeLlamaModelHandle model, int llamaToken, byte* buffer, int length);
/// <summary>
/// Convert text into tokens

View File

@ -94,7 +94,7 @@ namespace LLama.Native
{
fixed (byte* destPtr = dest)
{
var length = NativeApi.llama_token_to_str_with_model(this, llama_token, destPtr, dest.Length);
var length = NativeApi.llama_token_to_piece_with_model(this, llama_token, destPtr, dest.Length);
return Math.Abs(length);
}
}
@ -110,7 +110,7 @@ namespace LLama.Native
{
unsafe
{
var length = NativeApi.llama_token_to_str_with_model(this, llama_token, null, 0);
var length = NativeApi.llama_token_to_piece_with_model(this, llama_token, null, 0);
if (length == 0)
return "";
@ -118,7 +118,7 @@ namespace LLama.Native
fixed (byte* bytePtr = bytes)
{
var written = NativeApi.llama_token_to_str_with_model(this, llama_token, bytePtr, bytes.Length);
var written = NativeApi.llama_token_to_piece_with_model(this, llama_token, bytePtr, bytes.Length);
Debug.Assert(written == bytes.Length);
return encoding.GetString(bytePtr, bytes.Length);
@ -136,7 +136,7 @@ namespace LLama.Native
{
unsafe
{
var length = NativeApi.llama_token_to_str_with_model(this, llama_token, null, 0);
var length = NativeApi.llama_token_to_piece_with_model(this, llama_token, null, 0);
if (length == 0)
return;
@ -144,7 +144,7 @@ namespace LLama.Native
fixed (byte* bytePtr = bytes)
{
// Decode into bytes
var written = NativeApi.llama_token_to_str_with_model(this, llama_token, bytePtr, bytes.Length);
var written = NativeApi.llama_token_to_piece_with_model(this, llama_token, bytePtr, bytes.Length);
Debug.Assert(written == bytes.Length);
// Decode into chars

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
LLama/runtimes/libllama.so Normal file

Binary file not shown.