Fixed a few minor warnings

This commit is contained in:
Martin Evans 2023-10-19 00:43:50 +01:00
parent 9daf586ba8
commit e89ca5cc17
6 changed files with 18 additions and 31 deletions

View File

@ -8,7 +8,10 @@ namespace LLama.Examples.NewVersion
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
var embedder = new LLamaEmbedder(new ModelParams(modelPath));
var @params = new ModelParams(modelPath);
using var weights = LLamaWeights.LoadFromFile(@params);
var embedder = new LLamaEmbedder(weights, @params);
while (true)
{

View File

@ -8,7 +8,7 @@ namespace LLama.Examples.NewVersion
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
var prompt = File.ReadAllText("Assets/chat-with-bob.txt").Trim();
var prompt = (await File.ReadAllTextAsync("Assets/chat-with-bob.txt")).Trim();
var parameters = new ModelParams(modelPath)
{
@ -44,7 +44,7 @@ namespace LLama.Examples.NewVersion
Console.Write("Your path to save executor state: ");
var executorStatePath = Console.ReadLine();
ex.SaveState(executorStatePath);
await ex.SaveState(executorStatePath);
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("All states saved!");
@ -53,7 +53,7 @@ namespace LLama.Examples.NewVersion
var ctx = ex.Context;
ctx.LoadState(modelStatePath);
ex = new InteractiveExecutor(ctx);
ex.LoadState(executorStatePath);
await ex.LoadState(executorStatePath);
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("Loaded state!");
Console.ForegroundColor = ConsoleColor.White;

View File

@ -5,7 +5,14 @@ namespace LLama.Unittest;
public class LLamaEmbedderTests
: IDisposable
{
private readonly LLamaEmbedder _embedder = new(new ModelParams(Constants.ModelPath));
private readonly LLamaEmbedder _embedder;
public LLamaEmbedderTests()
{
var @params = new ModelParams(Constants.ModelPath);
using var weights = LLamaWeights.LoadFromFile(@params);
_embedder = new(weights, @params);
}
public void Dispose()
{

View File

@ -1,6 +1,4 @@
using System.Text;
using LLama.Common;
using Newtonsoft.Json;
using LLama.Common;
namespace LLama.Unittest
{
@ -40,34 +38,11 @@ namespace LLama.Unittest
};
var settings = new Newtonsoft.Json.JsonSerializerSettings();
settings.Converters.Add(new NewtsonsoftEncodingConverter());
var json = Newtonsoft.Json.JsonConvert.SerializeObject(expected, settings);
var actual = Newtonsoft.Json.JsonConvert.DeserializeObject<ModelParams>(json, settings);
Assert.Equal(expected, actual);
}
public class NewtsonsoftEncodingConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
{
return typeof(Encoding).IsAssignableFrom(objectType);
}
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
writer.WriteValue(((Encoding)value).WebName);
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
return Encoding.GetEncoding((string)reader.Value);
}
}
}
}

View File

@ -273,6 +273,7 @@ namespace LLama.Native
/// <param name="n_past"></param>
/// <returns>Returns 0 on success</returns>
[DllImport(libraryName, CallingConvention = CallingConvention.Cdecl)]
[Obsolete("use llama_decode() instead")]
public static extern int llama_eval(SafeLLamaContextHandle ctx, llama_token* tokens, int n_tokens, int n_past);
/// <summary>

View File

@ -204,6 +204,7 @@ namespace LLama.Native
{
fixed (int* pinned = tokens)
{
// the entire `eval` system needs replacing with the new batch system!
var ret = NativeApi.llama_eval(this, pinned, tokens.Length, n_past);
return ret == 0;
}