2023-08-08 22:12:42 +08:00
|
|
|
|
using System;
|
2023-08-08 22:53:14 +08:00
|
|
|
|
using LLama.Abstractions;
|
2023-08-08 22:12:42 +08:00
|
|
|
|
using LLama.Extensions;
|
|
|
|
|
using LLama.Native;
|
|
|
|
|
|
|
|
|
|
namespace LLama
|
|
|
|
|
{
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// A set of model weights, loaded into memory.
|
|
|
|
|
/// </summary>
|
2023-08-09 07:35:32 +08:00
|
|
|
|
public sealed class LLamaWeights
|
2023-08-08 22:12:42 +08:00
|
|
|
|
: IDisposable
|
|
|
|
|
{
|
|
|
|
|
private readonly SafeLlamaModelHandle _weights;
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// The native handle, which is used in the native APIs
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <remarks>Be careful how you use this!</remarks>
|
|
|
|
|
public SafeLlamaModelHandle NativeHandle => _weights;
|
|
|
|
|
|
2023-08-22 21:16:20 +08:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Total number of tokens in vocabulary of this model
|
|
|
|
|
/// </summary>
|
|
|
|
|
public int VocabCount => NativeHandle.VocabCount;
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Total number of tokens in the context
|
|
|
|
|
/// </summary>
|
|
|
|
|
public int ContextSize => NativeHandle.ContextSize;
|
|
|
|
|
|
2023-09-29 08:18:21 +08:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Get the size of this model in bytes
|
|
|
|
|
/// </summary>
|
|
|
|
|
public ulong SizeInBytes => NativeHandle.SizeInBytes;
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Get the number of parameters in this model
|
|
|
|
|
/// </summary>
|
|
|
|
|
public ulong ParameterCount => NativeHandle.ParameterCount;
|
|
|
|
|
|
2023-08-22 21:16:20 +08:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Dimension of embedding vectors
|
|
|
|
|
/// </summary>
|
|
|
|
|
public int EmbeddingSize => NativeHandle.EmbeddingSize;
|
|
|
|
|
|
2023-09-30 23:21:18 +08:00
|
|
|
|
internal LLamaWeights(SafeLlamaModelHandle weights)
|
2023-08-08 22:12:42 +08:00
|
|
|
|
{
|
|
|
|
|
_weights = weights;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Load weights into memory
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="params"></param>
|
|
|
|
|
/// <returns></returns>
|
2023-08-08 22:53:14 +08:00
|
|
|
|
public static LLamaWeights LoadFromFile(IModelParams @params)
|
2023-08-08 22:12:42 +08:00
|
|
|
|
{
|
2023-09-29 08:18:21 +08:00
|
|
|
|
using var pin = @params.ToLlamaModelParams(out var lparams);
|
2023-08-08 22:12:42 +08:00
|
|
|
|
var weights = SafeLlamaModelHandle.LoadFromFile(@params.ModelPath, lparams);
|
2023-08-08 22:53:14 +08:00
|
|
|
|
|
2023-09-30 23:21:18 +08:00
|
|
|
|
foreach (var adapter in @params.LoraAdapters)
|
|
|
|
|
{
|
|
|
|
|
if (string.IsNullOrEmpty(adapter.Path))
|
|
|
|
|
continue;
|
|
|
|
|
if (adapter.Scale <= 0)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
weights.ApplyLoraFromFile(adapter.Path, adapter.Scale, @params.LoraBase, @params.Threads);
|
|
|
|
|
}
|
2023-08-08 22:53:14 +08:00
|
|
|
|
|
2023-09-30 23:21:18 +08:00
|
|
|
|
return new LLamaWeights(weights);
|
2023-08-08 22:12:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <inheritdoc />
|
|
|
|
|
public void Dispose()
|
|
|
|
|
{
|
|
|
|
|
_weights.Dispose();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Create a llama_context using this model
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="params"></param>
|
|
|
|
|
/// <returns></returns>
|
2023-09-30 23:21:18 +08:00
|
|
|
|
public LLamaContext CreateContext(IContextParams @params)
|
2023-08-08 22:12:42 +08:00
|
|
|
|
{
|
2023-08-22 08:25:45 +08:00
|
|
|
|
return new LLamaContext(this, @params);
|
2023-08-08 22:12:42 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|