Propagate ILogger

This commit is contained in:
sa_ddam213 2023-10-04 13:47:08 +13:00
parent e2a17d6b6f
commit 9b8de007dc
7 changed files with 25 additions and 11 deletions

View File

@ -31,7 +31,7 @@
};
using var model = LLamaWeights.LoadFromFile(parameters);
using var context = model.CreateContext(parameters);
var executor = new InstructExecutor(context, InstructionPrefix, InstructionSuffix);
var executor = new InstructExecutor(context, null!, InstructionPrefix, InstructionSuffix);
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("The executor has been enabled. In this example, the LLM will follow your instructions." +

View File

@ -10,6 +10,7 @@ namespace LLama.Web.Models
/// <seealso cref="IDisposable" />
public class LLamaModel : IDisposable
{
private readonly ILogger _llamaLogger;
private readonly ModelOptions _config;
private readonly LLamaWeights _weights;
private readonly ConcurrentDictionary<string, LLamaContext> _contexts;
@ -18,9 +19,10 @@ namespace LLama.Web.Models
/// Initializes a new instance of the <see cref="LLamaModel"/> class.
/// </summary>
/// <param name="modelParams">The model parameters.</param>
public LLamaModel(ModelOptions modelParams)
public LLamaModel(ModelOptions modelParams, ILogger llamaLogger)
{
_config = modelParams;
_llamaLogger = llamaLogger;
_weights = LLamaWeights.LoadFromFile(modelParams);
_contexts = new ConcurrentDictionary<string, LLamaContext>();
}
@ -56,7 +58,7 @@ namespace LLama.Web.Models
if (_config.MaxInstances > -1 && ContextCount >= _config.MaxInstances)
throw new Exception($"Maximum model instances reached");
context = _weights.CreateContext(_config);
context = _weights.CreateContext(_config, _llamaLogger);
if (_contexts.TryAdd(contextName, context))
return Task.FromResult(context);

View File

@ -11,6 +11,7 @@ namespace LLama.Web.Services
/// </summary>
public class ModelService : IModelService
{
private readonly ILogger _llamaLogger;
private readonly AsyncLock _modelLock;
private readonly AsyncLock _contextLock;
private readonly LLamaOptions _configuration;
@ -22,8 +23,9 @@ namespace LLama.Web.Services
/// </summary>
/// <param name="logger">The logger.</param>
/// <param name="options">The options.</param>
public ModelService(LLamaOptions configuration)
public ModelService(LLamaOptions configuration, ILogger llamaLogger)
{
_llamaLogger = llamaLogger;
_modelLock = new AsyncLock();
_contextLock = new AsyncLock();
_configuration = configuration;
@ -52,7 +54,7 @@ namespace LLama.Web.Services
await UnloadModels();
model = new LLamaModel(modelOptions);
model = new LLamaModel(modelOptions, _llamaLogger);
_modelInstances.TryAdd(modelOptions.Name, model);
return model;
}

View File

@ -9,6 +9,7 @@ using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading.Tasks;
using LLama.Extensions;
using Microsoft.Extensions.Logging;
namespace LLama
{
@ -27,10 +28,11 @@ namespace LLama
///
/// </summary>
/// <param name="context"></param>
/// <param name="logger"></param>
/// <param name="instructionPrefix"></param>
/// <param name="instructionSuffix"></param>
public InstructExecutor(LLamaContext context, string instructionPrefix = "\n\n### Instruction:\n\n",
string instructionSuffix = "\n\n### Response:\n\n") : base(context)
public InstructExecutor(LLamaContext context, ILogger logger = null!, string instructionPrefix = "\n\n### Instruction:\n\n",
string instructionSuffix = "\n\n### Response:\n\n") : base(context, logger)
{
_inp_pfx = Context.Tokenize(instructionPrefix, true);
_inp_sfx = Context.Tokenize(instructionSuffix, false);

View File

@ -9,6 +9,7 @@ using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading.Tasks;
using LLama.Extensions;
using Microsoft.Extensions.Logging;
namespace LLama
{
@ -25,7 +26,8 @@ namespace LLama
///
/// </summary>
/// <param name="context"></param>
public InteractiveExecutor(LLamaContext context) : base(context)
/// <param name="logger"></param>
public InteractiveExecutor(LLamaContext context, ILogger logger = null!) : base(context, logger)
{
_llama_token_newline = NativeApi.llama_token_nl(Context.NativeHandle);
}

View File

@ -7,6 +7,7 @@ using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using LLama.Extensions;
using Microsoft.Extensions.Logging;
namespace LLama
{
@ -19,6 +20,7 @@ namespace LLama
public class StatelessExecutor
: ILLamaExecutor
{
private readonly ILogger? _logger;
private readonly LLamaWeights _weights;
private readonly IModelParams _params;
@ -32,8 +34,10 @@ namespace LLama
/// </summary>
/// <param name="weights"></param>
/// <param name="params"></param>
public StatelessExecutor(LLamaWeights weights, IModelParams @params)
/// <param name="logger"></param>
public StatelessExecutor(LLamaWeights weights, IModelParams @params, ILogger logger = null!)
{
_logger = logger;
_weights = weights;
_params = @params;

View File

@ -3,6 +3,7 @@ using System.Text;
using LLama.Abstractions;
using LLama.Extensions;
using LLama.Native;
using Microsoft.Extensions.Logging;
namespace LLama
{
@ -72,10 +73,11 @@ namespace LLama
/// Create a llama_context using this model
/// </summary>
/// <param name="params"></param>
/// <param name="logger"></param>
/// <returns></returns>
public LLamaContext CreateContext(IModelParams @params)
public LLamaContext CreateContext(IModelParams @params, ILogger logger = default!)
{
return new LLamaContext(this, @params);
return new LLamaContext(this, @params, logger);
}
}
}