LLamaSharp/LLama/ChatSession.cs

236 lines
9.4 KiB
C#
Raw Normal View History

using LLama.Abstractions;
using LLama.Common;
2023-05-11 03:19:12 +08:00
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
2023-05-11 03:19:12 +08:00
using System.Text;
using System.Threading;
2023-05-11 03:19:12 +08:00
namespace LLama
{
2023-06-20 02:38:57 +08:00
/// <summary>
/// The main chat session class.
/// </summary>
public class ChatSession
{
private ILLamaExecutor _executor;
private ChatHistory _history;
private static readonly string _executorStateFilename = "ExecutorState.json";
private static readonly string _modelStateFilename = "ModelState.st";
2023-06-20 02:38:57 +08:00
/// <summary>
/// The executor for this session.
/// </summary>
public ILLamaExecutor Executor => _executor;
2023-06-20 02:38:57 +08:00
/// <summary>
/// The chat history for this session.
/// </summary>
public ChatHistory History => _history;
2023-06-20 02:38:57 +08:00
/// <summary>
/// The history transform used in this session.
/// </summary>
2023-06-12 18:07:41 +08:00
public IHistoryTransform HistoryTransform { get; set; } = new LLamaTransforms.DefaultHistoryTransform();
2023-06-20 02:38:57 +08:00
/// <summary>
/// The input transform pipeline used in this session.
/// </summary>
2023-06-12 18:07:41 +08:00
public List<ITextTransform> InputTransformPipeline { get; set; } = new();
2023-06-20 02:38:57 +08:00
/// <summary>
/// The output transform used in this session.
/// </summary>
2023-06-12 18:07:41 +08:00
public ITextStreamTransform OutputTransform = new LLamaTransforms.EmptyTextOutputStreamTransform();
2023-06-20 02:38:57 +08:00
/// <summary>
///
/// </summary>
/// <param name="executor">The executor for this session</param>
2023-06-19 02:04:07 +08:00
public ChatSession(ILLamaExecutor executor)
{
_executor = executor;
_history = new ChatHistory();
}
2023-06-20 02:38:57 +08:00
/// <summary>
/// Use a custom history transform.
/// </summary>
/// <param name="transform"></param>
/// <returns></returns>
2023-06-12 18:07:41 +08:00
public ChatSession WithHistoryTransform(IHistoryTransform transform)
{
2023-06-12 18:07:41 +08:00
HistoryTransform = transform;
return this;
}
2023-06-20 02:38:57 +08:00
/// <summary>
/// Add a text transform to the input transform pipeline.
/// </summary>
/// <param name="transform"></param>
/// <returns></returns>
2023-06-12 18:07:41 +08:00
public ChatSession AddInputTransform(ITextTransform transform)
{
2023-06-12 18:07:41 +08:00
InputTransformPipeline.Add(transform);
return this;
}
2023-06-20 02:38:57 +08:00
/// <summary>
/// Use a custom output transform.
/// </summary>
/// <param name="transform"></param>
/// <returns></returns>
2023-06-12 18:07:41 +08:00
public ChatSession WithOutputTransform(ITextStreamTransform transform)
{
OutputTransform = transform;
return this;
}
/// <summary>
///
/// </summary>
/// <param name="path">The directory name to save the session. If the directory does not exist, a new directory will be created.</param>
public virtual void SaveSession(string path)
{
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
_executor.Model.SaveState(Path.Combine(path, _modelStateFilename));
if(Executor is StatelessExecutor)
{
}
else if(Executor is StatefulExecutorBase statefulExecutor)
{
statefulExecutor.SaveState(Path.Combine(path, _executorStateFilename));
}
else
{
throw new System.NotImplementedException("You're using a customized executor. Please inherit ChatSession and rewrite the method.");
}
}
/// <summary>
///
/// </summary>
/// <param name="path">The directory name to load the session.</param>
public virtual void LoadSession(string path)
{
if (!Directory.Exists(path))
{
throw new FileNotFoundException($"Directory {path} does not exist.");
}
_executor.Model.LoadState(Path.Combine(path, _modelStateFilename));
if (Executor is StatelessExecutor)
{
}
else if (Executor is StatefulExecutorBase statefulExecutor)
{
statefulExecutor.LoadState(Path.Combine(path, _executorStateFilename));
}
else
{
throw new System.NotImplementedException("You're using a customized executor. Please inherit ChatSession and rewrite the method.");
}
}
/// <summary>
/// Get the response from the LLama model with chat histories.
/// </summary>
2023-07-20 23:07:53 +08:00
/// <param name="history"></param>
/// <param name="inferenceParams"></param>
2023-07-20 23:07:53 +08:00
/// <param name="cancellationToken"></param>
/// <returns></returns>
2023-08-06 07:03:45 +08:00
public IEnumerable<string> Chat(ChatHistory history, IInferenceParams? inferenceParams = null, CancellationToken cancellationToken = default)
{
2023-06-12 18:07:41 +08:00
var prompt = HistoryTransform.HistoryToText(history);
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.User, prompt).Messages);
StringBuilder sb = new();
2023-06-12 18:07:41 +08:00
foreach (var result in ChatInternal(prompt, inferenceParams, cancellationToken))
{
yield return result;
sb.Append(result);
}
2023-06-12 18:07:41 +08:00
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.Assistant, sb.ToString()).Messages);
}
/// <summary>
/// Get the response from the LLama model. Note that prompt could not only be the preset words,
/// but also the question you want to ask.
/// </summary>
/// <param name="prompt"></param>
/// <param name="inferenceParams"></param>
2023-07-20 23:07:53 +08:00
/// <param name="cancellationToken"></param>
/// <returns></returns>
2023-08-06 07:03:45 +08:00
public IEnumerable<string> Chat(string prompt, IInferenceParams? inferenceParams = null, CancellationToken cancellationToken = default)
{
2023-06-12 18:07:41 +08:00
foreach(var inputTransform in InputTransformPipeline)
{
prompt = inputTransform.Transform(prompt);
}
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.User, prompt).Messages);
StringBuilder sb = new();
2023-06-12 18:07:41 +08:00
foreach (var result in ChatInternal(prompt, inferenceParams, cancellationToken))
{
yield return result;
sb.Append(result);
}
2023-06-12 18:07:41 +08:00
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.Assistant, sb.ToString()).Messages);
}
/// <summary>
/// Get the response from the LLama model with chat histories.
/// </summary>
2023-07-20 23:07:53 +08:00
/// <param name="history"></param>
/// <param name="inferenceParams"></param>
2023-07-20 23:07:53 +08:00
/// <param name="cancellationToken"></param>
/// <returns></returns>
2023-08-06 07:03:45 +08:00
public async IAsyncEnumerable<string> ChatAsync(ChatHistory history, IInferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
2023-06-12 18:07:41 +08:00
var prompt = HistoryTransform.HistoryToText(history);
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.User, prompt).Messages);
StringBuilder sb = new();
2023-06-12 18:07:41 +08:00
await foreach (var result in ChatAsyncInternal(prompt, inferenceParams, cancellationToken))
{
yield return result;
sb.Append(result);
}
2023-06-12 18:07:41 +08:00
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.Assistant, sb.ToString()).Messages);
}
2023-06-20 02:38:57 +08:00
/// <summary>
/// Get the response from the LLama model with chat histories asynchronously.
/// </summary>
/// <param name="prompt"></param>
/// <param name="inferenceParams"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
2023-08-06 07:03:45 +08:00
public async IAsyncEnumerable<string> ChatAsync(string prompt, IInferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
2023-06-12 18:07:41 +08:00
foreach (var inputTransform in InputTransformPipeline)
{
prompt = inputTransform.Transform(prompt);
}
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.User, prompt).Messages);
StringBuilder sb = new();
2023-06-12 18:07:41 +08:00
await foreach (var result in ChatAsyncInternal(prompt, inferenceParams, cancellationToken))
{
yield return result;
sb.Append(result);
}
2023-06-12 18:07:41 +08:00
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.Assistant, sb.ToString()).Messages);
}
2023-08-06 07:03:45 +08:00
private IEnumerable<string> ChatInternal(string prompt, IInferenceParams? inferenceParams = null, CancellationToken cancellationToken = default)
2023-06-12 18:07:41 +08:00
{
var results = _executor.Infer(prompt, inferenceParams, cancellationToken);
return OutputTransform.Transform(results);
}
2023-08-06 07:03:45 +08:00
private async IAsyncEnumerable<string> ChatAsyncInternal(string prompt, IInferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
2023-06-12 18:07:41 +08:00
{
var results = _executor.InferAsync(prompt, inferenceParams, cancellationToken);
await foreach (var item in OutputTransform.TransformAsync(results))
{
yield return item;
}
}
}
}