docs: publiash documentation 0.4.

This commit is contained in:
Yaohui Liu 2023-06-20 02:38:57 +08:00
parent eed96248b5
commit 6c400e64c2
No known key found for this signature in database
GPG Key ID: E86D01E1809BD23E
189 changed files with 159213 additions and 31 deletions

View File

@ -8,36 +8,73 @@ using System.Threading;
namespace LLama
{
/// <summary>
/// The main chat session class.
/// </summary>
public class ChatSession
{
private ILLamaExecutor _executor;
private ChatHistory _history;
private static readonly string _executorStateFilename = "ExecutorState.json";
private static readonly string _modelStateFilename = "ModelState.st";
/// <summary>
/// The executor for this session.
/// </summary>
public ILLamaExecutor Executor => _executor;
/// <summary>
/// The chat history for this session.
/// </summary>
public ChatHistory History => _history;
/// <summary>
/// The history transform used in this session.
/// </summary>
public IHistoryTransform HistoryTransform { get; set; } = new LLamaTransforms.DefaultHistoryTransform();
/// <summary>
/// The input transform pipeline used in this session.
/// </summary>
public List<ITextTransform> InputTransformPipeline { get; set; } = new();
/// <summary>
/// The output transform used in this session.
/// </summary>
public ITextStreamTransform OutputTransform = new LLamaTransforms.EmptyTextOutputStreamTransform();
/// <summary>
///
/// </summary>
/// <param name="executor">The executor for this session</param>
public ChatSession(ILLamaExecutor executor)
{
_executor = executor;
_history = new ChatHistory();
}
/// <summary>
/// Use a custom history transform.
/// </summary>
/// <param name="transform"></param>
/// <returns></returns>
public ChatSession WithHistoryTransform(IHistoryTransform transform)
{
HistoryTransform = transform;
return this;
}
/// <summary>
/// Add a text transform to the input transform pipeline.
/// </summary>
/// <param name="transform"></param>
/// <returns></returns>
public ChatSession AddInputTransform(ITextTransform transform)
{
InputTransformPipeline.Add(transform);
return this;
}
/// <summary>
/// Use a custom output transform.
/// </summary>
/// <param name="transform"></param>
/// <returns></returns>
public ChatSession WithOutputTransform(ITextStreamTransform transform)
{
OutputTransform = transform;
@ -155,6 +192,13 @@ namespace LLama
History.Messages.AddRange(HistoryTransform.TextToHistory(AuthorRole.Assistant, sb.ToString()).Messages);
}
/// <summary>
/// Get the response from the LLama model with chat histories asynchronously.
/// </summary>
/// <param name="prompt"></param>
/// <param name="inferenceParams"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public async IAsyncEnumerable<string> ChatAsync(string prompt, InferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
foreach (var inputTransform in InputTransformPipeline)

View File

@ -12,6 +12,9 @@ namespace LLama.Common
Assistant = 2,
}
// copy from semantic-kernel
/// <summary>
/// The chat history class
/// </summary>
public class ChatHistory
{

View File

@ -1,27 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace LLama.Common
{
public class SessionParams
{
public string? UserName { get; set; }
public string? AssistantName { get; set; }
public string? SystemName { get; set; }
/// <summary>
/// The prefix of input text. Note that this only works when you
/// use the API with text as input.
/// </summary>
public string? InputPrefix { get; set; }
/// <summary>
/// The suffix of input text. Note that this only works when you
/// use the API with text as input.
/// </summary>
public string? InputSuffix { get; set; }
/// <summary>
/// Whether to trim the names from the text output at the start and end.
/// </summary>
public bool TrimNamesFromOutput { get; set; } = false;
}
}

View File

@ -8,6 +8,9 @@ using LLama.Common;
namespace LLama
{
/// <summary>
/// The embedder for LLama, which supports getting embeddings from text.
/// </summary>
public class LLamaEmbedder : IDisposable
{
SafeLLamaContextHandle _ctx;

View File

@ -13,20 +13,64 @@ using System.Threading;
namespace LLama
{
using llama_token = Int32;
/// <summary>
/// The base class for stateful LLama executors.
/// </summary>
public abstract class StatefulExecutorBase : ILLamaExecutor
{
/// <summary>
/// The loaded model for this executor.
/// </summary>
protected readonly LLamaModel _model;
/// <summary>
/// The logger used by this executor.
/// </summary>
protected ILLamaLogger? _logger;
/// <summary>
/// The tokens that were already processed by the model.
/// </summary>
protected int _pastTokensCount; // n_past
/// <summary>
/// The tokens that were consumed by the model during the current inference.
/// </summary>
protected int _consumedTokensCount; // n_consume
/// <summary>
///
/// </summary>
protected int _n_session_consumed;
/// <summary>
///
/// </summary>
protected int _n_matching_session_tokens;
/// <summary>
/// The path of the session file.
/// </summary>
protected string? _pathSession;
/// <summary>
/// A container of the tokens to be processed and after processed.
/// </summary>
protected List<llama_token> _embeds = new(); // embd
/// <summary>
/// A container for the tokens of input.
/// </summary>
protected List<llama_token> _embed_inps = new();
/// <summary>
///
/// </summary>
protected List<llama_token> _session_tokens = new();
/// <summary>
/// The last tokens generated by the model.
/// </summary>
protected FixedSizeQueue<llama_token> _last_n_tokens;
/// <summary>
/// The mode used by the executor.
/// </summary>
public LLamaModel Model => _model;
/// <summary>
///
/// </summary>
/// <param name="model"></param>
/// <param name="logger"></param>
protected StatefulExecutorBase(LLamaModel model, ILLamaLogger? logger = null)
{
_model = model;
@ -39,6 +83,13 @@ namespace LLama
_last_n_tokens = new FixedSizeQueue<llama_token>(_model.ContextSize).FillWith(0);
}
/// <summary>
/// This API is currently not verified.
/// </summary>
/// <param name="filename"></param>
/// <returns></returns>
/// <exception cref="ArgumentNullException"></exception>
/// <exception cref="RuntimeError"></exception>
public unsafe StatefulExecutorBase WithSessionFile(string filename)
{
_pathSession = filename;
@ -94,12 +145,20 @@ namespace LLama
return this;
}
/// <summary>
/// This API has not been verified currently.
/// </summary>
/// <param name="filename"></param>
public void SaveSessionFile(string filename)
{
var session_token_array = _session_tokens.ToArray();
NativeApi.llama_save_session_file(_model.NativeHandle, filename, session_token_array, (ulong)session_token_array.Length);
}
/// <summary>
/// After running out of the context, take some tokens from the original prompt and recompute the logits in batches.
/// </summary>
/// <param name="tokensToKeep"></param>
protected virtual void HandleRunOutOfContext(int tokensToKeep)
{
// if we run out of context:
@ -116,6 +175,9 @@ namespace LLama
_pathSession = string.Empty;
}
/// <summary>
/// Try to reuse the matching prefix from the session file.
/// </summary>
protected virtual void TryReuseMathingPrefix()
{
if (_n_session_consumed < _session_tokens.Count)
@ -146,16 +208,61 @@ namespace LLama
}
}
/// <summary>
/// Decide whether to continue the loop.
/// </summary>
/// <param name="args"></param>
/// <returns></returns>
protected abstract bool GetLoopCondition(InferStateArgs args);
/// <summary>
/// Preprocess the inputs before the inference.
/// </summary>
/// <param name="text"></param>
/// <param name="args"></param>
protected abstract void PreprocessInputs(string text, InferStateArgs args);
/// <summary>
/// Do some post processing after the inference.
/// </summary>
/// <param name="inferenceParams"></param>
/// <param name="args"></param>
/// <param name="extraOutputs"></param>
/// <returns></returns>
protected abstract bool PostProcess(InferenceParams inferenceParams, InferStateArgs args, out IEnumerable<string>? extraOutputs);
/// <summary>
/// The core inference logic.
/// </summary>
/// <param name="inferenceParams"></param>
/// <param name="args"></param>
protected abstract void InferInternal(InferenceParams inferenceParams, InferStateArgs args);
/// <summary>
/// Save the current state to a file.
/// </summary>
/// <param name="filename"></param>
public abstract void SaveState(string filename);
/// <summary>
/// Get the current state data.
/// </summary>
/// <returns></returns>
public abstract ExecutorBaseState GetStateData();
/// <summary>
/// Load the state from data.
/// </summary>
/// <param name="data"></param>
public abstract void LoadState(ExecutorBaseState data);
/// <summary>
/// Load the state from a file.
/// </summary>
/// <param name="filename"></param>
public abstract void LoadState(string filename);
/// <summary>
/// Execute the inference.
/// </summary>
/// <param name="text"></param>
/// <param name="inferenceParams"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public virtual IEnumerable<string> Infer(string text, InferenceParams? inferenceParams = null, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
@ -205,6 +312,14 @@ namespace LLama
}
}
}
/// <summary>
/// Execute the inference asynchronously.
/// </summary>
/// <param name="text"></param>
/// <param name="inferenceParams"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public virtual async IAsyncEnumerable<string> InferAsync(string text, InferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
foreach (var result in Infer(text, inferenceParams, cancellationToken))
@ -218,13 +333,25 @@ namespace LLama
/// </summary>
protected class InferStateArgs
{
/// <summary>
///
/// </summary>
public IList<string>? Antiprompts { get; set; }
/// <summary>
/// Tokens count remained to be used. (n_remain)
/// </summary>
public int RemainedTokens { get; set; }
/// <summary>
///
/// </summary>
public bool ReturnValue { get; set; }
/// <summary>
///
/// </summary>
public bool WaitForInput { get; set; }
/// <summary>
///
/// </summary>
public bool NeedToSaveSession { get; set; }
}

View File

@ -11,11 +11,20 @@ using System.Text.Json.Serialization;
namespace LLama
{
using llama_token = Int32;
/// <summary>
/// The LLama executor for instruct mode.
/// </summary>
public class InstructExecutor : StatefulExecutorBase
{
bool _is_prompt_run = true;
llama_token[] _inp_pfx;
llama_token[] _inp_sfx;
/// <summary>
///
/// </summary>
/// <param name="model"></param>
/// <param name="instructionPrefix"></param>
/// <param name="instructionSuffix"></param>
public InstructExecutor(LLamaModel model, string instructionPrefix = "\n\n### Instruction:\n\n",
string instructionSuffix = "\n\n### Response:\n\n") : base(model)
{
@ -23,6 +32,7 @@ namespace LLama
_inp_sfx = _model.Tokenize(instructionSuffix, false).ToArray();
}
/// <inheritdoc />
public override ExecutorBaseState GetStateData()
{
InstructExecutorState state = new()
@ -43,6 +53,7 @@ namespace LLama
};
return state;
}
/// <inheritdoc />
public override void LoadState(ExecutorBaseState data)
{
if(data is InstructExecutorState state)
@ -66,6 +77,7 @@ namespace LLama
}
}
/// <inheritdoc />
public override void SaveState(string filename)
{
InstructExecutorState state = GetStateData() as InstructExecutorState;
@ -74,6 +86,7 @@ namespace LLama
JsonSerializer.Serialize<InstructExecutorState>(fs, state);
}
}
/// <inheritdoc />
public override void LoadState(string filename)
{
using (FileStream fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
@ -83,10 +96,12 @@ namespace LLama
}
}
/// <inheritdoc />
protected override bool GetLoopCondition(InferStateArgs args)
{
return args.RemainedTokens != 0 || _is_prompt_run;
}
/// <inheritdoc />
protected override void PreprocessInputs(string text, InferStateArgs args)
{
if (_is_prompt_run)
@ -112,6 +127,7 @@ namespace LLama
args.RemainedTokens -= line_inp.Count();
}
}
/// <inheritdoc />
protected override bool PostProcess(InferenceParams inferenceParams, InferStateArgs args, out IEnumerable<string>? extraOutputs)
{
extraOutputs = null;
@ -154,6 +170,7 @@ namespace LLama
}
return false;
}
/// <inheritdoc />
protected override void InferInternal(InferenceParams inferenceParams, InferStateArgs args)
{
if (_embeds.Count > 0)
@ -214,12 +231,24 @@ namespace LLama
}
}
}
/// <summary>
/// The desciptor of the state of the instruct executor.
/// </summary>
public class InstructExecutorState : ExecutorBaseState
{
/// <summary>
/// Whether the executor is running for the first time (running the prompt).
/// </summary>
[JsonPropertyName("is_prompt_run")]
public bool IsPromptRun { get; set; }
/// <summary>
/// Instruction prefix tokens.
/// </summary>
[JsonPropertyName("inp_pfx")]
public llama_token[] InputPrefixTokens { get; set; }
/// <summary>
/// Instruction suffix tokens.
/// </summary>
[JsonPropertyName("inp_sfx")]
public llama_token[] InputSuffixTokens { get; set; }
}

View File

@ -14,15 +14,23 @@ using System.Threading.Tasks;
namespace LLama
{
using llama_token = Int32;
/// <summary>
/// The LLama executor for interactive mode.
/// </summary>
public class InteractiveExecutor : StatefulExecutorBase
{
bool _is_prompt_run = true;
llama_token[] _llama_token_newline;
/// <summary>
///
/// </summary>
/// <param name="model"></param>
public InteractiveExecutor(LLamaModel model) : base(model)
{
_llama_token_newline = Utils.Tokenize(_model.NativeHandle, "\n", false, _model.Encoding).ToArray();
}
/// <inheritdoc />
public override ExecutorBaseState GetStateData()
{
InteractiveExecutorState state = new()
@ -42,6 +50,7 @@ namespace LLama
};
return state;
}
/// <inheritdoc />
public override void LoadState(ExecutorBaseState data)
{
if (data is InteractiveExecutorState state)
@ -61,7 +70,7 @@ namespace LLama
else
throw new ArgumentException("Invalid state data type.");
}
/// <inheritdoc />
public override void SaveState(string filename)
{
InteractiveExecutorState state = GetStateData() as InteractiveExecutorState;
@ -70,6 +79,7 @@ namespace LLama
JsonSerializer.Serialize<InteractiveExecutorState>(fs, state);
}
}
/// <inheritdoc />
public override void LoadState(string filename)
{
using (FileStream fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
@ -88,6 +98,7 @@ namespace LLama
return args.RemainedTokens != 0 && !args.WaitForInput || _is_prompt_run;
}
/// <inheritdoc />
protected override void PreprocessInputs(string text, InferStateArgs args)
{
if (_is_prompt_run)
@ -156,6 +167,7 @@ namespace LLama
return false;
}
/// <inheritdoc />
protected override void InferInternal(InferenceParams inferenceParams, InferStateArgs args)
{
if (_embeds.Count > 0)
@ -227,10 +239,19 @@ namespace LLama
}
}
/// <summary>
/// The descriptor of the state of the interactive executor.
/// </summary>
public class InteractiveExecutorState : ExecutorBaseState
{
/// <summary>
/// Whether the executor is running for the first time (running the prompt).
/// </summary>
[JsonPropertyName("is_prompt_run")]
public bool IsPromptRun { get; set; }
/// <summary>
/// Tokens that represent a new line in with the current model.
/// </summary>
[JsonPropertyName("llama_token_newline")]
public llama_token[] LLamaNewlineTokens { get; set; }
}

View File

@ -13,6 +13,9 @@ using LLama.Common;
namespace LLama
{
using llama_token = Int32;
/// <summary>
/// The abstraction of a LLama model, which holds the context in the native library.
/// </summary>
public class LLamaModel: IDisposable
{
// TODO: expose more properties.

View File

@ -6,7 +6,10 @@ using System.Text;
namespace LLama
{
public class LLamaQuantizer
/// <summary>
/// The quantizer to quantize the model.
/// </summary>
public static class LLamaQuantizer
{
/// <summary>
/// Quantize the model.

View File

@ -20,7 +20,14 @@ namespace LLama
{
private LLamaModel _model;
private byte[] _originalState;
/// <summary>
/// The mode used by the executor when running the inference.
/// </summary>
public LLamaModel Model => _model;
/// <summary>
///
/// </summary>
/// <param name="model">The LLama model.</param>
public StatelessExecutor(LLamaModel model)
{
_model = model;
@ -28,6 +35,8 @@ namespace LLama
Utils.Eval(_model.NativeHandle, tokens.ToArray(), 0, tokens.Count(), 0, _model.Params.Threads);
_originalState = model.GetStateData();
}
/// <inheritdoc />
public IEnumerable<string> Infer(string text, InferenceParams? inferenceParams = null, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
@ -113,7 +122,7 @@ namespace LLama
_model.LoadState(_originalState);
}
/// <inheritdoc />
public async IAsyncEnumerable<string> InferAsync(string text, InferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken token = default)
{
yield return "";

View File

@ -11,6 +11,9 @@ using System.Text;
namespace LLama
{
/// <summary>
/// A class that contains all the transforms provided internally by LLama.
/// </summary>
public class LLamaTransforms
{
/// <summary>
@ -30,6 +33,14 @@ namespace LLama
string _systemName;
string _unknownName;
bool _isInstructMode;
/// <summary>
///
/// </summary>
/// <param name="userName"></param>
/// <param name="assistantName"></param>
/// <param name="systemName"></param>
/// <param name="unknownName"></param>
/// <param name="isInstructMode"></param>
public DefaultHistoryTransform(string? userName = null, string? assistantName = null,
string? systemName = null, string? unknownName = null, bool isInstructMode = false)
{
@ -40,6 +51,7 @@ namespace LLama
_isInstructMode = isInstructMode;
}
/// <inheritdoc />
public virtual string HistoryToText(ChatHistory history)
{
StringBuilder sb = new();
@ -65,6 +77,7 @@ namespace LLama
return sb.ToString();
}
/// <inheritdoc />
public virtual ChatHistory TextToHistory(AuthorRole role, string text)
{
ChatHistory history = new ChatHistory();
@ -72,6 +85,12 @@ namespace LLama
return history;
}
/// <summary>
/// Drop the name at the beginning and the end of the text.
/// </summary>
/// <param name="text"></param>
/// <param name="role"></param>
/// <returns></returns>
public virtual string TrimNamesFromText(string text, AuthorRole role)
{
if (role == AuthorRole.User && text.StartsWith($"{_userName}:"))
@ -95,6 +114,9 @@ namespace LLama
/// </summary>
public class NaiveTextInputTransform : ITextTransform
{
/// <summary>
///
/// </summary>
public NaiveTextInputTransform()
{
@ -110,11 +132,13 @@ namespace LLama
/// </summary>
public class EmptyTextOutputStreamTransform : ITextStreamTransform
{
/// <inheritdoc />
public IEnumerable<string> Transform(IEnumerable<string> tokens)
{
return tokens;
}
/// <inheritdoc />
public IAsyncEnumerable<string> TransformAsync(IAsyncEnumerable<string> tokens)
{
return tokens;

View File

@ -10,12 +10,23 @@ namespace LLama
/// </summary>
public class ResettableLLamaModel : LLamaModel
{
/// <summary>
/// The initial state of the model
/// </summary>
public byte[] OriginalState { get; set; }
/// <summary>
///
/// </summary>
/// <param name="Params"></param>
/// <param name="encoding"></param>
public ResettableLLamaModel(ModelParams Params, string encoding = "UTF-8") : base(Params, encoding)
{
OriginalState = GetStateData();
}
/// <summary>
/// Reset the state to the initial state.
/// </summary>
public void Reset()
{
LoadState(OriginalState);

View File

@ -0,0 +1,16 @@
import os
def generate_string_list(folder_path, prefix):
file_names = os.listdir(folder_path)
string_list = []
for file_name in file_names:
new_string = f"- {'.'.join(file_name.split('.')[:-1])}: {prefix}{file_name}"
string_list.append(new_string)
return string_list
folder_path = "./docs/xmldocs"
prefix = "./xmldocs/"
string_list = generate_string_list(folder_path, prefix)
result = '\n'.join(string_list)
print(result)

121
docs/xmldocs/index.md Normal file
View File

@ -0,0 +1,121 @@
# LLamaSharp
## LLama
[ChatSession](./llama.chatsession.md)
[InstructExecutor](./llama.instructexecutor.md)
[InteractiveExecutor](./llama.interactiveexecutor.md)
[LLamaEmbedder](./llama.llamaembedder.md)
[LLamaModel](./llama.llamamodel.md)
[LLamaQuantizer](./llama.llamaquantizer.md)
[LLamaTransforms](./llama.llamatransforms.md)
[ResettableLLamaModel](./llama.resettablellamamodel.md)
[StatefulExecutorBase](./llama.statefulexecutorbase.md)
[StatelessExecutor](./llama.statelessexecutor.md)
## LLama.Abstractions
[IHistoryTransform](./llama.abstractions.ihistorytransform.md)
[ILLamaExecutor](./llama.abstractions.illamaexecutor.md)
[ITextStreamTransform](./llama.abstractions.itextstreamtransform.md)
[ITextTransform](./llama.abstractions.itexttransform.md)
## LLama.Common
[AuthorRole](./llama.common.authorrole.md)
[ChatHistory](./llama.common.chathistory.md)
[FixedSizeQueue&lt;T&gt;](./llama.common.fixedsizequeue-1.md)
[ILLamaLogger](./llama.common.illamalogger.md)
[InferenceParams](./llama.common.inferenceparams.md)
[LLamaDefaultLogger](./llama.common.llamadefaultlogger.md)
[MiroStateType](./llama.common.mirostatetype.md)
[ModelParams](./llama.common.modelparams.md)
## LLama.Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)
## LLama.Extensions
[DictionaryExtension](./llama.extensions.dictionaryextension.md)
## LLama.Native
[LLamaContextParams](./llama.native.llamacontextparams.md)
[LLamaFtype](./llama.native.llamaftype.md)
[LLamaTokenData](./llama.native.llamatokendata.md)
[LLamaTokenDataArray](./llama.native.llamatokendataarray.md)
[LLamaTokenDataArrayNative](./llama.native.llamatokendataarraynative.md)
[NativeApi](./llama.native.nativeapi.md)
[SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)
[SafeLLamaHandleBase](./llama.native.safellamahandlebase.md)
## LLama.OldVersion
[ChatCompletion](./llama.oldversion.chatcompletion.md)
[ChatCompletionChoice](./llama.oldversion.chatcompletionchoice.md)
[ChatCompletionChunk](./llama.oldversion.chatcompletionchunk.md)
[ChatCompletionChunkChoice](./llama.oldversion.chatcompletionchunkchoice.md)
[ChatCompletionChunkDelta](./llama.oldversion.chatcompletionchunkdelta.md)
[ChatCompletionMessage](./llama.oldversion.chatcompletionmessage.md)
[ChatMessageRecord](./llama.oldversion.chatmessagerecord.md)
[ChatRole](./llama.oldversion.chatrole.md)
[ChatSession&lt;T&gt;](./llama.oldversion.chatsession-1.md)
[Completion](./llama.oldversion.completion.md)
[CompletionChoice](./llama.oldversion.completionchoice.md)
[CompletionChunk](./llama.oldversion.completionchunk.md)
[CompletionLogprobs](./llama.oldversion.completionlogprobs.md)
[CompletionUsage](./llama.oldversion.completionusage.md)
[Embedding](./llama.oldversion.embedding.md)
[EmbeddingData](./llama.oldversion.embeddingdata.md)
[EmbeddingUsage](./llama.oldversion.embeddingusage.md)
[IChatModel](./llama.oldversion.ichatmodel.md)
[LLamaEmbedder](./llama.oldversion.llamaembedder.md)
[LLamaModel](./llama.oldversion.llamamodel.md)
[LLamaParams](./llama.oldversion.llamaparams.md)

View File

@ -0,0 +1,49 @@
# IHistoryTransform
Namespace: LLama.Abstractions
Transform history to plain text and vice versa.
```csharp
public interface IHistoryTransform
```
## Methods
### **HistoryToText(ChatHistory)**
Convert a ChatHistory instance to plain text.
```csharp
string HistoryToText(ChatHistory history)
```
#### Parameters
`history` [ChatHistory](./llama.common.chathistory.md)<br>
The ChatHistory instance
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **TextToHistory(AuthorRole, String)**
Converts plain text to a ChatHistory instance.
```csharp
ChatHistory TextToHistory(AuthorRole role, string text)
```
#### Parameters
`role` [AuthorRole](./llama.common.authorrole.md)<br>
The role for the author.
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The chat history as plain text.
#### Returns
[ChatHistory](./llama.common.chathistory.md)<br>
The updated history.

View File

@ -0,0 +1,66 @@
# ILLamaExecutor
Namespace: LLama.Abstractions
A high level interface for LLama models.
```csharp
public interface ILLamaExecutor
```
## Properties
### **Model**
The loaded model for this executor.
```csharp
public abstract LLamaModel Model { get; }
```
#### Property Value
[LLamaModel](./llama.llamamodel.md)<br>
## Methods
### **Infer(String, InferenceParams, CancellationToken)**
Infers a response from the model.
```csharp
IEnumerable<string> Infer(string text, InferenceParams inferenceParams, CancellationToken token)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
Your prompt
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
Any additional parameters
`token` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
A cancellation token.
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **InferAsync(String, InferenceParams, CancellationToken)**
```csharp
IAsyncEnumerable<string> InferAsync(string text, InferenceParams inferenceParams, CancellationToken token)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`token` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IAsyncEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.iasyncenumerable-1)<br>

View File

@ -0,0 +1,43 @@
# ITextStreamTransform
Namespace: LLama.Abstractions
Takes a stream of tokens and transforms them.
```csharp
public interface ITextStreamTransform
```
## Methods
### **Transform(IEnumerable&lt;String&gt;)**
Takes a stream of tokens and transforms them, returning a new stream of tokens.
```csharp
IEnumerable<string> Transform(IEnumerable<string> tokens)
```
#### Parameters
`tokens` [IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **TransformAsync(IAsyncEnumerable&lt;String&gt;)**
Takes a stream of tokens and transforms them, returning a new stream of tokens asynchronously.
```csharp
IAsyncEnumerable<string> TransformAsync(IAsyncEnumerable<string> tokens)
```
#### Parameters
`tokens` [IAsyncEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.iasyncenumerable-1)<br>
#### Returns
[IAsyncEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.iasyncenumerable-1)<br>

View File

@ -0,0 +1,33 @@
# ITextTransform
Namespace: LLama.Abstractions
An interface for text transformations.
These can be used to compose a pipeline of text transformations, such as:
- Tokenization
- Lowercasing
- Punctuation removal
- Trimming
- etc.
```csharp
public interface ITextTransform
```
## Methods
### **Transform(String)**
Takes a string and transforms it.
```csharp
string Transform(string text)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>

View File

@ -0,0 +1,243 @@
# ChatSession
Namespace: LLama
The main chat session class.
```csharp
public class ChatSession
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatSession](./llama.chatsession.md)
## Fields
### **OutputTransform**
The output transform used in this session.
```csharp
public ITextStreamTransform OutputTransform;
```
## Properties
### **Executor**
The executor for this session.
```csharp
public ILLamaExecutor Executor { get; }
```
#### Property Value
[ILLamaExecutor](./llama.abstractions.illamaexecutor.md)<br>
### **History**
The chat history for this session.
```csharp
public ChatHistory History { get; }
```
#### Property Value
[ChatHistory](./llama.common.chathistory.md)<br>
### **HistoryTransform**
The history transform used in this session.
```csharp
public IHistoryTransform HistoryTransform { get; set; }
```
#### Property Value
[IHistoryTransform](./llama.abstractions.ihistorytransform.md)<br>
### **InputTransformPipeline**
The input transform pipeline used in this session.
```csharp
public List<ITextTransform> InputTransformPipeline { get; set; }
```
#### Property Value
[List&lt;ITextTransform&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.list-1)<br>
## Constructors
### **ChatSession(ILLamaExecutor)**
```csharp
public ChatSession(ILLamaExecutor executor)
```
#### Parameters
`executor` [ILLamaExecutor](./llama.abstractions.illamaexecutor.md)<br>
The executor for this session
## Methods
### **WithHistoryTransform(IHistoryTransform)**
Use a custom history transform.
```csharp
public ChatSession WithHistoryTransform(IHistoryTransform transform)
```
#### Parameters
`transform` [IHistoryTransform](./llama.abstractions.ihistorytransform.md)<br>
#### Returns
[ChatSession](./llama.chatsession.md)<br>
### **AddInputTransform(ITextTransform)**
Add a text transform to the input transform pipeline.
```csharp
public ChatSession AddInputTransform(ITextTransform transform)
```
#### Parameters
`transform` [ITextTransform](./llama.abstractions.itexttransform.md)<br>
#### Returns
[ChatSession](./llama.chatsession.md)<br>
### **WithOutputTransform(ITextStreamTransform)**
Use a custom output transform.
```csharp
public ChatSession WithOutputTransform(ITextStreamTransform transform)
```
#### Parameters
`transform` [ITextStreamTransform](./llama.abstractions.itextstreamtransform.md)<br>
#### Returns
[ChatSession](./llama.chatsession.md)<br>
### **SaveSession(String)**
```csharp
public void SaveSession(string path)
```
#### Parameters
`path` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The directory name to save the session. If the directory does not exist, a new directory will be created.
### **LoadSession(String)**
```csharp
public void LoadSession(string path)
```
#### Parameters
`path` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The directory name to load the session.
### **Chat(ChatHistory, InferenceParams, CancellationToken)**
Get the response from the LLama model with chat histories.
```csharp
public IEnumerable<string> Chat(ChatHistory history, InferenceParams inferenceParams, CancellationToken cancellationToken)
```
#### Parameters
`history` [ChatHistory](./llama.common.chathistory.md)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`cancellationToken` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **Chat(String, InferenceParams, CancellationToken)**
Get the response from the LLama model. Note that prompt could not only be the preset words,
but also the question you want to ask.
```csharp
public IEnumerable<string> Chat(string prompt, InferenceParams inferenceParams, CancellationToken cancellationToken)
```
#### Parameters
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`cancellationToken` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **ChatAsync(ChatHistory, InferenceParams, CancellationToken)**
Get the response from the LLama model with chat histories.
```csharp
public IAsyncEnumerable<string> ChatAsync(ChatHistory history, InferenceParams inferenceParams, CancellationToken cancellationToken)
```
#### Parameters
`history` [ChatHistory](./llama.common.chathistory.md)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`cancellationToken` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IAsyncEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.iasyncenumerable-1)<br>
### **ChatAsync(String, InferenceParams, CancellationToken)**
Get the response from the LLama model with chat histories asynchronously.
```csharp
public IAsyncEnumerable<string> ChatAsync(string prompt, InferenceParams inferenceParams, CancellationToken cancellationToken)
```
#### Parameters
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`cancellationToken` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IAsyncEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.iasyncenumerable-1)<br>

View File

@ -0,0 +1,15 @@
# AuthorRole
Namespace: LLama.Common
```csharp
public enum AuthorRole
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [Enum](https://docs.microsoft.com/en-us/dotnet/api/system.enum) → [AuthorRole](./llama.common.authorrole.md)<br>
Implements [IComparable](https://docs.microsoft.com/en-us/dotnet/api/system.icomparable), [IFormattable](https://docs.microsoft.com/en-us/dotnet/api/system.iformattable), [IConvertible](https://docs.microsoft.com/en-us/dotnet/api/system.iconvertible)
## Fields
| Name | Value | Description |
| --- | --: | --- |

View File

@ -0,0 +1,53 @@
# ChatHistory
Namespace: LLama.Common
The chat history class
```csharp
public class ChatHistory
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatHistory](./llama.common.chathistory.md)
## Properties
### **Messages**
List of messages in the chat
```csharp
public List<Message> Messages { get; }
```
#### Property Value
[List&lt;Message&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.list-1)<br>
## Constructors
### **ChatHistory()**
Create a new instance of the chat content class
```csharp
public ChatHistory()
```
## Methods
### **AddMessage(AuthorRole, String)**
Add a message to the chat history
```csharp
public void AddMessage(AuthorRole authorRole, string content)
```
#### Parameters
`authorRole` [AuthorRole](./llama.common.authorrole.md)<br>
Role of the message author
`content` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
Message content

View File

@ -0,0 +1,111 @@
# FixedSizeQueue&lt;T&gt;
Namespace: LLama.Common
A queue with fixed storage size.
Currently it's only a naive implementation and needs to be further optimized in the future.
```csharp
public class FixedSizeQueue<T> : , System.Collections.IEnumerable
```
#### Type Parameters
`T`<br>
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [FixedSizeQueue&lt;T&gt;](./llama.common.fixedsizequeue-1.md)<br>
Implements IEnumerable&lt;T&gt;, [IEnumerable](https://docs.microsoft.com/en-us/dotnet/api/system.collections.ienumerable)
## Properties
### **Count**
```csharp
public int Count { get; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Capacity**
```csharp
public int Capacity { get; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
## Constructors
### **FixedSizeQueue(Int32)**
```csharp
public FixedSizeQueue(int size)
```
#### Parameters
`size` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **FixedSizeQueue(Int32, IEnumerable&lt;T&gt;)**
```csharp
public FixedSizeQueue(int size, IEnumerable<T> data)
```
#### Parameters
`size` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`data` IEnumerable&lt;T&gt;<br>
## Methods
### **FillWith(T)**
```csharp
public FixedSizeQueue<T> FillWith(T value)
```
#### Parameters
`value` T<br>
#### Returns
[FixedSizeQueue&lt;T&gt;](./llama.common.fixedsizequeue-1.md)<br>
### **Enqueue(T)**
Enquene an element.
```csharp
public void Enqueue(T item)
```
#### Parameters
`item` T<br>
### **ToArray()**
```csharp
public T[] ToArray()
```
#### Returns
T[]<br>
### **GetEnumerator()**
```csharp
public IEnumerator<T> GetEnumerator()
```
#### Returns
IEnumerator&lt;T&gt;<br>

View File

@ -0,0 +1,28 @@
# ILLamaLogger
Namespace: LLama.Common
```csharp
public interface ILLamaLogger
```
## Methods
### **Log(String, String, LogLevel)**
Write the log in cosutomized way
```csharp
void Log(string source, string message, LogLevel level)
```
#### Parameters
`source` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The source of the log. It may be a method name or class name.
`message` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The message.
`level` [LogLevel](./llama.common.illamalogger.loglevel.md)<br>
The log level.

View File

@ -0,0 +1,264 @@
# InferenceParams
Namespace: LLama.Common
```csharp
public class InferenceParams
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [InferenceParams](./llama.common.inferenceparams.md)
## Properties
### **TokensKeep**
number of tokens to keep from initial prompt
```csharp
public int TokensKeep { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **MaxTokens**
how many new tokens to predict (n_predict), set to -1 to inifinitely generate response
until it complete.
```csharp
public int MaxTokens { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **LogitBias**
logit bias for specific tokens
```csharp
public Dictionary<int, float> LogitBias { get; set; }
```
#### Property Value
[Dictionary&lt;Int32, Single&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.dictionary-2)<br>
### **AntiPrompts**
Sequences where the model will stop generating further tokens.
```csharp
public IEnumerable<string> AntiPrompts { get; set; }
```
#### Property Value
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **PathSession**
path to file for saving/loading model eval state
```csharp
public string PathSession { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **InputSuffix**
string to suffix user inputs with
```csharp
public string InputSuffix { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **InputPrefix**
string to prefix user inputs with
```csharp
public string InputPrefix { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **TopK**
0 or lower to use vocab size
```csharp
public int TopK { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **TopP**
1.0 = disabled
```csharp
public float TopP { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **TfsZ**
1.0 = disabled
```csharp
public float TfsZ { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **TypicalP**
1.0 = disabled
```csharp
public float TypicalP { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **Temperature**
1.0 = disabled
```csharp
public float Temperature { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **RepeatPenalty**
1.0 = disabled
```csharp
public float RepeatPenalty { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **RepeatLastTokensCount**
last n tokens to penalize (0 = disable penalty, -1 = context size) (repeat_last_n)
```csharp
public int RepeatLastTokensCount { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **FrequencyPenalty**
frequency penalty coefficient
0.0 = disabled
```csharp
public float FrequencyPenalty { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **PresencePenalty**
presence penalty coefficient
0.0 = disabled
```csharp
public float PresencePenalty { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **Mirostat**
Mirostat uses tokens instead of words.
algorithm described in the paper https://arxiv.org/abs/2007.14966.
0 = disabled, 1 = mirostat, 2 = mirostat 2.0
```csharp
public MiroStateType Mirostat { get; set; }
```
#### Property Value
[MiroStateType](./llama.common.mirostatetype.md)<br>
### **MirostatTau**
target entropy
```csharp
public float MirostatTau { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **MirostatEta**
learning rate
```csharp
public float MirostatEta { get; set; }
```
#### Property Value
[Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **PenalizeNL**
consider newlines as a repeatable token (penalize_nl)
```csharp
public bool PenalizeNL { get; set; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
## Constructors
### **InferenceParams()**
```csharp
public InferenceParams()
```

View File

@ -0,0 +1,121 @@
# LLamaDefaultLogger
Namespace: LLama.Common
The default logger of LLamaSharp. On default it write to console. User methods of `LLamaLogger.Default` to change the behavior.
It's more recommended to inherit `ILLamaLogger` to cosutomize the behavior.
```csharp
public sealed class LLamaDefaultLogger : ILLamaLogger
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [LLamaDefaultLogger](./llama.common.llamadefaultlogger.md)<br>
Implements [ILLamaLogger](./llama.common.illamalogger.md)
## Properties
### **Default**
```csharp
public static LLamaDefaultLogger Default { get; }
```
#### Property Value
[LLamaDefaultLogger](./llama.common.llamadefaultlogger.md)<br>
## Methods
### **EnableConsole()**
```csharp
public LLamaDefaultLogger EnableConsole()
```
#### Returns
[LLamaDefaultLogger](./llama.common.llamadefaultlogger.md)<br>
### **DisableConsole()**
```csharp
public LLamaDefaultLogger DisableConsole()
```
#### Returns
[LLamaDefaultLogger](./llama.common.llamadefaultlogger.md)<br>
### **EnableFile(String, FileMode)**
```csharp
public LLamaDefaultLogger EnableFile(string filename, FileMode mode)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`mode` [FileMode](https://docs.microsoft.com/en-us/dotnet/api/system.io.filemode)<br>
#### Returns
[LLamaDefaultLogger](./llama.common.llamadefaultlogger.md)<br>
### **DisableFile(String)**
```csharp
public LLamaDefaultLogger DisableFile(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[LLamaDefaultLogger](./llama.common.llamadefaultlogger.md)<br>
### **Log(String, String, LogLevel)**
```csharp
public void Log(string source, string message, LogLevel level)
```
#### Parameters
`source` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`message` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`level` [LogLevel](./llama.common.illamalogger.loglevel.md)<br>
### **Info(String)**
```csharp
public void Info(string message)
```
#### Parameters
`message` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Warn(String)**
```csharp
public void Warn(string message)
```
#### Parameters
`message` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Error(String)**
```csharp
public void Error(string message)
```
#### Parameters
`message` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>

View File

@ -0,0 +1,15 @@
# MiroStateType
Namespace: LLama.Common
```csharp
public enum MiroStateType
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [Enum](https://docs.microsoft.com/en-us/dotnet/api/system.enum) → [MiroStateType](./llama.common.mirostatetype.md)<br>
Implements [IComparable](https://docs.microsoft.com/en-us/dotnet/api/system.icomparable), [IFormattable](https://docs.microsoft.com/en-us/dotnet/api/system.iformattable), [IConvertible](https://docs.microsoft.com/en-us/dotnet/api/system.iconvertible)
## Fields
| Name | Value | Description |
| --- | --: | --- |

View File

@ -0,0 +1,234 @@
# ModelParams
Namespace: LLama.Common
```csharp
public class ModelParams
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ModelParams](./llama.common.modelparams.md)
## Properties
### **ContextSize**
Model context size (n_ctx)
```csharp
public int ContextSize { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **GpuLayerCount**
Number of layers to run in VRAM / GPU memory (n_gpu_layers)
```csharp
public int GpuLayerCount { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Seed**
Seed for the random number generator (seed)
```csharp
public int Seed { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **UseFp16Memory**
Use f16 instead of f32 for memory kv (memory_f16)
```csharp
public bool UseFp16Memory { get; set; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **UseMemorymap**
Use mmap for faster loads (use_mmap)
```csharp
public bool UseMemorymap { get; set; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **UseMemoryLock**
Use mlock to keep model in memory (use_mlock)
```csharp
public bool UseMemoryLock { get; set; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Perplexity**
Compute perplexity over the prompt (perplexity)
```csharp
public bool Perplexity { get; set; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **ModelPath**
Model path (model)
```csharp
public string ModelPath { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **LoraAdapter**
lora adapter path (lora_adapter)
```csharp
public string LoraAdapter { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **LoraBase**
base model path for the lora adapter (lora_base)
```csharp
public string LoraBase { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Threads**
Number of threads (-1 = autodetect) (n_threads)
```csharp
public int Threads { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **BatchSize**
batch size for prompt processing (must be &gt;=32 to use BLAS) (n_batch)
```csharp
public int BatchSize { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **ConvertEosToNewLine**
Whether to convert eos to newline during the inference.
```csharp
public bool ConvertEosToNewLine { get; set; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **EmbeddingMode**
Whether to use embedding mode. (embedding) Note that if this is set to true,
The LLamaModel won't produce text response anymore.
```csharp
public bool EmbeddingMode { get; set; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
## Constructors
### **ModelParams(String, Int32, Int32, Int32, Boolean, Boolean, Boolean, Boolean, String, String, Int32, Int32, Boolean, Boolean)**
```csharp
public ModelParams(string modelPath, int contextSize, int gpuLayerCount, int seed, bool useFp16Memory, bool useMemorymap, bool useMemoryLock, bool perplexity, string loraAdapter, string loraBase, int threads, int batchSize, bool convertEosToNewLine, bool embeddingMode)
```
#### Parameters
`modelPath` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The model path.
`contextSize` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Model context size (n_ctx)
`gpuLayerCount` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Number of layers to run in VRAM / GPU memory (n_gpu_layers)
`seed` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Seed for the random number generator (seed)
`useFp16Memory` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to use f16 instead of f32 for memory kv (memory_f16)
`useMemorymap` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to use mmap for faster loads (use_mmap)
`useMemoryLock` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to use mlock to keep model in memory (use_mlock)
`perplexity` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Thether to compute perplexity over the prompt (perplexity)
`loraAdapter` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
Lora adapter path (lora_adapter)
`loraBase` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
Base model path for the lora adapter (lora_base)
`threads` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Number of threads (-1 = autodetect) (n_threads)
`batchSize` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Batch size for prompt processing (must be &gt;=32 to use BLAS) (n_batch)
`convertEosToNewLine` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to convert eos to newline during the inference.
`embeddingMode` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to use embedding mode. (embedding) Note that if this is set to true, The LLamaModel won't produce text response anymore.

View File

@ -0,0 +1,110 @@
# RuntimeError
Namespace: LLama.Exceptions
```csharp
public class RuntimeError : System.Exception, System.Runtime.Serialization.ISerializable
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [Exception](https://docs.microsoft.com/en-us/dotnet/api/system.exception) → [RuntimeError](./llama.exceptions.runtimeerror.md)<br>
Implements [ISerializable](https://docs.microsoft.com/en-us/dotnet/api/system.runtime.serialization.iserializable)
## Properties
### **TargetSite**
```csharp
public MethodBase TargetSite { get; }
```
#### Property Value
[MethodBase](https://docs.microsoft.com/en-us/dotnet/api/system.reflection.methodbase)<br>
### **Message**
```csharp
public string Message { get; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Data**
```csharp
public IDictionary Data { get; }
```
#### Property Value
[IDictionary](https://docs.microsoft.com/en-us/dotnet/api/system.collections.idictionary)<br>
### **InnerException**
```csharp
public Exception InnerException { get; }
```
#### Property Value
[Exception](https://docs.microsoft.com/en-us/dotnet/api/system.exception)<br>
### **HelpLink**
```csharp
public string HelpLink { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Source**
```csharp
public string Source { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **HResult**
```csharp
public int HResult { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **StackTrace**
```csharp
public string StackTrace { get; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Constructors
### **RuntimeError()**
```csharp
public RuntimeError()
```
### **RuntimeError(String)**
```csharp
public RuntimeError(string message)
```
#### Parameters
`message` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>

View File

@ -0,0 +1,73 @@
# DictionaryExtension
Namespace: LLama.Extensions
```csharp
public static class DictionaryExtension
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [DictionaryExtension](./llama.extensions.dictionaryextension.md)
## Methods
### **Deconstruct&lt;T1, T2&gt;(KeyValuePair&lt;T1, T2&gt;, T1&, T2&)**
```csharp
public static void Deconstruct<T1, T2>(KeyValuePair<T1, T2> pair, T1& first, T2& second)
```
#### Type Parameters
`T1`<br>
`T2`<br>
#### Parameters
`pair` KeyValuePair&lt;T1, T2&gt;<br>
`first` T1&<br>
`second` T2&<br>
### **Update&lt;T1, T2&gt;(Dictionary&lt;T1, T2&gt;, IDictionary&lt;T1, T2&gt;)**
```csharp
public static void Update<T1, T2>(Dictionary<T1, T2> dic, IDictionary<T1, T2> other)
```
#### Type Parameters
`T1`<br>
`T2`<br>
#### Parameters
`dic` Dictionary&lt;T1, T2&gt;<br>
`other` IDictionary&lt;T1, T2&gt;<br>
### **GetOrDefault&lt;T1, T2&gt;(Dictionary&lt;T1, T2&gt;, T1, T2)**
```csharp
public static T2 GetOrDefault<T1, T2>(Dictionary<T1, T2> dic, T1 key, T2 defaultValue)
```
#### Type Parameters
`T1`<br>
`T2`<br>
#### Parameters
`dic` Dictionary&lt;T1, T2&gt;<br>
`key` T1<br>
`defaultValue` T2<br>
#### Returns
T2<br>

View File

@ -0,0 +1,142 @@
# InstructExecutor
Namespace: LLama
The LLama executor for instruct mode.
```csharp
public class InstructExecutor : StatefulExecutorBase, LLama.Abstractions.ILLamaExecutor
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [StatefulExecutorBase](./llama.statefulexecutorbase.md) → [InstructExecutor](./llama.instructexecutor.md)<br>
Implements [ILLamaExecutor](./llama.abstractions.illamaexecutor.md)
## Properties
### **Model**
The mode used by the executor.
```csharp
public LLamaModel Model { get; }
```
#### Property Value
[LLamaModel](./llama.llamamodel.md)<br>
## Constructors
### **InstructExecutor(LLamaModel, String, String)**
```csharp
public InstructExecutor(LLamaModel model, string instructionPrefix, string instructionSuffix)
```
#### Parameters
`model` [LLamaModel](./llama.llamamodel.md)<br>
`instructionPrefix` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`instructionSuffix` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Methods
### **GetStateData()**
```csharp
public ExecutorBaseState GetStateData()
```
#### Returns
[ExecutorBaseState](./llama.statefulexecutorbase.executorbasestate.md)<br>
### **LoadState(ExecutorBaseState)**
```csharp
public void LoadState(ExecutorBaseState data)
```
#### Parameters
`data` [ExecutorBaseState](./llama.statefulexecutorbase.executorbasestate.md)<br>
### **SaveState(String)**
```csharp
public void SaveState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **LoadState(String)**
```csharp
public void LoadState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **GetLoopCondition(InferStateArgs)**
```csharp
protected bool GetLoopCondition(InferStateArgs args)
```
#### Parameters
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **PreprocessInputs(String, InferStateArgs)**
```csharp
protected void PreprocessInputs(string text, InferStateArgs args)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
### **PostProcess(InferenceParams, InferStateArgs, IEnumerable`1&)**
```csharp
protected bool PostProcess(InferenceParams inferenceParams, InferStateArgs args, IEnumerable`1& extraOutputs)
```
#### Parameters
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
`extraOutputs` [IEnumerable`1&](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1&)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **InferInternal(InferenceParams, InferStateArgs)**
```csharp
protected void InferInternal(InferenceParams inferenceParams, InferStateArgs args)
```
#### Parameters
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>

View File

@ -0,0 +1,142 @@
# InteractiveExecutor
Namespace: LLama
The LLama executor for interactive mode.
```csharp
public class InteractiveExecutor : StatefulExecutorBase, LLama.Abstractions.ILLamaExecutor
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [StatefulExecutorBase](./llama.statefulexecutorbase.md) → [InteractiveExecutor](./llama.interactiveexecutor.md)<br>
Implements [ILLamaExecutor](./llama.abstractions.illamaexecutor.md)
## Properties
### **Model**
The mode used by the executor.
```csharp
public LLamaModel Model { get; }
```
#### Property Value
[LLamaModel](./llama.llamamodel.md)<br>
## Constructors
### **InteractiveExecutor(LLamaModel)**
```csharp
public InteractiveExecutor(LLamaModel model)
```
#### Parameters
`model` [LLamaModel](./llama.llamamodel.md)<br>
## Methods
### **GetStateData()**
```csharp
public ExecutorBaseState GetStateData()
```
#### Returns
[ExecutorBaseState](./llama.statefulexecutorbase.executorbasestate.md)<br>
### **LoadState(ExecutorBaseState)**
```csharp
public void LoadState(ExecutorBaseState data)
```
#### Parameters
`data` [ExecutorBaseState](./llama.statefulexecutorbase.executorbasestate.md)<br>
### **SaveState(String)**
```csharp
public void SaveState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **LoadState(String)**
```csharp
public void LoadState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **GetLoopCondition(InferStateArgs)**
Define whether to continue the loop to generate responses.
```csharp
protected bool GetLoopCondition(InferStateArgs args)
```
#### Parameters
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **PreprocessInputs(String, InferStateArgs)**
```csharp
protected void PreprocessInputs(string text, InferStateArgs args)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
### **PostProcess(InferenceParams, InferStateArgs, IEnumerable`1&)**
Return whether to break the generation.
```csharp
protected bool PostProcess(InferenceParams inferenceParams, InferStateArgs args, IEnumerable`1& extraOutputs)
```
#### Parameters
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
`extraOutputs` [IEnumerable`1&](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1&)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **InferInternal(InferenceParams, InferStateArgs)**
```csharp
protected void InferInternal(InferenceParams inferenceParams, InferStateArgs args)
```
#### Parameters
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>

View File

@ -0,0 +1,64 @@
# LLamaEmbedder
Namespace: LLama
The embedder for LLama, which supports getting embeddings from text.
```csharp
public class LLamaEmbedder : System.IDisposable
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [LLamaEmbedder](./llama.llamaembedder.md)<br>
Implements [IDisposable](https://docs.microsoft.com/en-us/dotnet/api/system.idisposable)
## Constructors
### **LLamaEmbedder(ModelParams)**
```csharp
public LLamaEmbedder(ModelParams params)
```
#### Parameters
`params` [ModelParams](./llama.common.modelparams.md)<br>
## Methods
### **GetEmbeddings(String, Int32, Boolean, String)**
Get the embeddings of the text.
```csharp
public Single[] GetEmbeddings(string text, int threads, bool addBos, string encoding)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`threads` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Threads used for inference.
`addBos` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Add bos to the text.
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[Single[]](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
#### Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
### **Dispose()**
```csharp
public void Dispose()
```

View File

@ -0,0 +1,282 @@
# LLamaModel
Namespace: LLama
The abstraction of a LLama model, which holds the context in the native library.
```csharp
public class LLamaModel : System.IDisposable
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [LLamaModel](./llama.llamamodel.md)<br>
Implements [IDisposable](https://docs.microsoft.com/en-us/dotnet/api/system.idisposable)
## Properties
### **ContextSize**
The context size.
```csharp
public int ContextSize { get; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Params**
The model params set for this model.
```csharp
public ModelParams Params { get; set; }
```
#### Property Value
[ModelParams](./llama.common.modelparams.md)<br>
### **NativeHandle**
The native handle, which is used to be passed to the native APIs. Please avoid using it
unless you know what is the usage of the Native API.
```csharp
public SafeLLamaContextHandle NativeHandle { get; }
```
#### Property Value
[SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
### **Encoding**
The encoding set for this model to deal with text input.
```csharp
public Encoding Encoding { get; }
```
#### Property Value
[Encoding](https://docs.microsoft.com/en-us/dotnet/api/system.text.encoding)<br>
## Constructors
### **LLamaModel(ModelParams, String, ILLamaLogger)**
```csharp
public LLamaModel(ModelParams Params, string encoding, ILLamaLogger logger)
```
#### Parameters
`Params` [ModelParams](./llama.common.modelparams.md)<br>
Model params.
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
Encoding to deal with text input.
`logger` [ILLamaLogger](./llama.common.illamalogger.md)<br>
The logger.
## Methods
### **Tokenize(String, Boolean)**
Tokenize a string.
```csharp
public IEnumerable<int> Tokenize(string text, bool addBos)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`addBos` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to add a bos to the text.
#### Returns
[IEnumerable&lt;Int32&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **DeTokenize(IEnumerable&lt;Int32&gt;)**
Detokenize the tokens to text.
```csharp
public string DeTokenize(IEnumerable<int> tokens)
```
#### Parameters
`tokens` [IEnumerable&lt;Int32&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **SaveState(String)**
Save the state to specified path.
```csharp
public void SaveState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **GetStateData()**
Get the state data as a byte array.
```csharp
public Byte[] GetStateData()
```
#### Returns
[Byte[]](https://docs.microsoft.com/en-us/dotnet/api/system.byte)<br>
### **LoadState(String)**
Load the state from specified path.
```csharp
public void LoadState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
### **LoadState(Byte[])**
Load the state from memory.
```csharp
public void LoadState(Byte[] stateData)
```
#### Parameters
`stateData` [Byte[]](https://docs.microsoft.com/en-us/dotnet/api/system.byte)<br>
#### Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
### **Sample(LLamaTokenDataArray, Single, MiroStateType, Single, Single, Int32, Single, Single, Single)**
Perform the sampling. Please don't use it unless you fully know what it does.
```csharp
public int Sample(LLamaTokenDataArray candidates, float temperature, MiroStateType mirostat, float mirostatTau, float mirostatEta, int topK, float topP, float tfsZ, float typicalP)
```
#### Parameters
`candidates` [LLamaTokenDataArray](./llama.native.llamatokendataarray.md)<br>
`temperature` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`mirostat` [MiroStateType](./llama.common.mirostatetype.md)<br>
`mirostatTau` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`mirostatEta` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`topK` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`topP` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`tfsZ` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`typicalP` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **ApplyPenalty(IEnumerable&lt;Int32&gt;, Dictionary&lt;Int32, Single&gt;, Int32, Single, Single, Single, Boolean)**
Apply the penalty for the tokens. Please don't use it unless you fully know what it does.
```csharp
public LLamaTokenDataArray ApplyPenalty(IEnumerable<int> lastTokens, Dictionary<int, float> logitBias, int repeatLastTokensCount, float repeatPenalty, float alphaFrequency, float alphaPresence, bool penalizeNL)
```
#### Parameters
`lastTokens` [IEnumerable&lt;Int32&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
`logitBias` [Dictionary&lt;Int32, Single&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.dictionary-2)<br>
`repeatLastTokensCount` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`repeatPenalty` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`alphaFrequency` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`alphaPresence` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`penalizeNL` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
#### Returns
[LLamaTokenDataArray](./llama.native.llamatokendataarray.md)<br>
### **Eval(Int32[], Int32)**
```csharp
public int Eval(Int32[] tokens, int pastTokensCount)
```
#### Parameters
`tokens` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`pastTokensCount` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
The updated `pastTokensCount`.
#### Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
### **GenerateResult(IEnumerable&lt;Int32&gt;)**
```csharp
internal IEnumerable<string> GenerateResult(IEnumerable<int> ids)
```
#### Parameters
`ids` [IEnumerable&lt;Int32&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **Dispose()**
```csharp
public void Dispose()
```

View File

@ -0,0 +1,75 @@
# LLamaQuantizer
Namespace: LLama
The quantizer to quantize the model.
```csharp
public static class LLamaQuantizer
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [LLamaQuantizer](./llama.llamaquantizer.md)
## Methods
### **Quantize(String, String, LLamaFtype, Int32)**
Quantize the model.
```csharp
public static bool Quantize(string srcFileName, string dstFilename, LLamaFtype ftype, int nthread)
```
#### Parameters
`srcFileName` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The model file to be quantized.
`dstFilename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The path to save the quantized model.
`ftype` [LLamaFtype](./llama.native.llamaftype.md)<br>
The type of quantization.
`nthread` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Thread to be used during the quantization. By default it's the physical core number.
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether the quantization is successful.
#### Exceptions
[ArgumentException](https://docs.microsoft.com/en-us/dotnet/api/system.argumentexception)<br>
### **Quantize(String, String, String, Int32)**
Quantize the model.
```csharp
public static bool Quantize(string srcFileName, string dstFilename, string ftype, int nthread)
```
#### Parameters
`srcFileName` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The model file to be quantized.
`dstFilename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The path to save the quantized model.
`ftype` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The type of quantization.
`nthread` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Thread to be used during the quantization. By default it's the physical core number.
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether the quantization is successful.
#### Exceptions
[ArgumentException](https://docs.microsoft.com/en-us/dotnet/api/system.argumentexception)<br>

View File

@ -0,0 +1,19 @@
# LLamaTransforms
Namespace: LLama
A class that contains all the transforms provided internally by LLama.
```csharp
public class LLamaTransforms
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [LLamaTransforms](./llama.llamatransforms.md)
## Constructors
### **LLamaTransforms()**
```csharp
public LLamaTransforms()
```

View File

@ -0,0 +1,99 @@
# LLamaContextParams
Namespace: LLama.Native
```csharp
public struct LLamaContextParams
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [LLamaContextParams](./llama.native.llamacontextparams.md)
## Fields
### **n_ctx**
text context
```csharp
public int n_ctx;
```
### **n_gpu_layers**
number of layers to store in VRAM
```csharp
public int n_gpu_layers;
```
### **seed**
RNG seed, -1 for random
```csharp
public int seed;
```
### **f16_kv**
use fp16 for KV cache
```csharp
public bool f16_kv;
```
### **logits_all**
the llama_eval() call computes all logits, not just the last one
```csharp
public bool logits_all;
```
### **vocab_only**
only load the vocabulary, no weights
```csharp
public bool vocab_only;
```
### **use_mmap**
use mmap if possible
```csharp
public bool use_mmap;
```
### **use_mlock**
force system to keep model in RAM
```csharp
public bool use_mlock;
```
### **embedding**
embedding mode only
```csharp
public bool embedding;
```
### **progress_callback**
called with a progress value between 0 and 1, pass NULL to disable
```csharp
public IntPtr progress_callback;
```
### **progress_callback_user_data**
context pointer passed to the progress callback
```csharp
public IntPtr progress_callback_user_data;
```

View File

@ -0,0 +1,15 @@
# LLamaFtype
Namespace: LLama.Native
```csharp
public enum LLamaFtype
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [Enum](https://docs.microsoft.com/en-us/dotnet/api/system.enum) → [LLamaFtype](./llama.native.llamaftype.md)<br>
Implements [IComparable](https://docs.microsoft.com/en-us/dotnet/api/system.icomparable), [IFormattable](https://docs.microsoft.com/en-us/dotnet/api/system.iformattable), [IConvertible](https://docs.microsoft.com/en-us/dotnet/api/system.iconvertible)
## Fields
| Name | Value | Description |
| --- | --: | --- |

View File

@ -0,0 +1,51 @@
# LLamaTokenData
Namespace: LLama.Native
```csharp
public struct LLamaTokenData
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [LLamaTokenData](./llama.native.llamatokendata.md)
## Fields
### **id**
token id
```csharp
public int id;
```
### **logit**
log-odds of the token
```csharp
public float logit;
```
### **p**
probability of the token
```csharp
public float p;
```
## Constructors
### **LLamaTokenData(Int32, Single, Single)**
```csharp
LLamaTokenData(int id, float logit, float p)
```
#### Parameters
`id` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`logit` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`p` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>

View File

@ -0,0 +1,45 @@
# LLamaTokenDataArray
Namespace: LLama.Native
```csharp
public struct LLamaTokenDataArray
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [LLamaTokenDataArray](./llama.native.llamatokendataarray.md)
## Fields
### **data**
```csharp
public Memory<LLamaTokenData> data;
```
### **size**
```csharp
public ulong size;
```
### **sorted**
```csharp
public bool sorted;
```
## Constructors
### **LLamaTokenDataArray(LLamaTokenData[], UInt64, Boolean)**
```csharp
LLamaTokenDataArray(LLamaTokenData[] data, ulong size, bool sorted)
```
#### Parameters
`data` [LLamaTokenData[]](./llama.native.llamatokendata.md)<br>
`size` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
`sorted` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>

View File

@ -0,0 +1,29 @@
# LLamaTokenDataArrayNative
Namespace: LLama.Native
```csharp
public struct LLamaTokenDataArrayNative
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [LLamaTokenDataArrayNative](./llama.native.llamatokendataarraynative.md)
## Fields
### **data**
```csharp
public IntPtr data;
```
### **size**
```csharp
public ulong size;
```
### **sorted**
```csharp
public bool sorted;
```

View File

@ -0,0 +1,786 @@
# NativeApi
Namespace: LLama.Native
```csharp
public class NativeApi
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [NativeApi](./llama.native.nativeapi.md)
## Constructors
### **NativeApi()**
```csharp
public NativeApi()
```
## Methods
### **llama_print_timings(SafeLLamaContextHandle)**
```csharp
public static void llama_print_timings(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
### **llama_reset_timings(SafeLLamaContextHandle)**
```csharp
public static void llama_reset_timings(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
### **llama_print_system_info()**
Print system information
```csharp
public static IntPtr llama_print_system_info()
```
#### Returns
[IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
### **llama_model_quantize(String, String, LLamaFtype, Int32)**
```csharp
public static int llama_model_quantize(string fname_inp, string fname_out, LLamaFtype ftype, int nthread)
```
#### Parameters
`fname_inp` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`fname_out` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`ftype` [LLamaFtype](./llama.native.llamaftype.md)<br>
`nthread` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_sample_repetition_penalty(SafeLLamaContextHandle, IntPtr, Int32[], UInt64, Single)**
Repetition penalty described in CTRL academic paper https://arxiv.org/abs/1909.05858, with negative logit fix.
```csharp
public static void llama_sample_repetition_penalty(SafeLLamaContextHandle ctx, IntPtr candidates, Int32[] last_tokens, ulong last_tokens_size, float penalty)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
`last_tokens` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`last_tokens_size` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
`penalty` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **llama_sample_frequency_and_presence_penalties(SafeLLamaContextHandle, IntPtr, Int32[], UInt64, Single, Single)**
Frequency and presence penalties described in OpenAI API https://platform.openai.com/docs/api-reference/parameter-details.
```csharp
public static void llama_sample_frequency_and_presence_penalties(SafeLLamaContextHandle ctx, IntPtr candidates, Int32[] last_tokens, ulong last_tokens_size, float alpha_frequency, float alpha_presence)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
`last_tokens` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`last_tokens_size` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
`alpha_frequency` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`alpha_presence` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **llama_sample_softmax(SafeLLamaContextHandle, IntPtr)**
Sorts candidate tokens by their logits in descending order and calculate probabilities based on logits.
```csharp
public static void llama_sample_softmax(SafeLLamaContextHandle ctx, IntPtr candidates)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
### **llama_sample_top_k(SafeLLamaContextHandle, IntPtr, Int32, UInt64)**
Top-K sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751
```csharp
public static void llama_sample_top_k(SafeLLamaContextHandle ctx, IntPtr candidates, int k, ulong min_keep)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
`k` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`min_keep` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
### **llama_sample_top_p(SafeLLamaContextHandle, IntPtr, Single, UInt64)**
Nucleus sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751
```csharp
public static void llama_sample_top_p(SafeLLamaContextHandle ctx, IntPtr candidates, float p, ulong min_keep)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
`p` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`min_keep` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
### **llama_sample_tail_free(SafeLLamaContextHandle, IntPtr, Single, UInt64)**
Tail Free Sampling described in https://www.trentonbricken.com/Tail-Free-Sampling/.
```csharp
public static void llama_sample_tail_free(SafeLLamaContextHandle ctx, IntPtr candidates, float z, ulong min_keep)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
`z` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`min_keep` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
### **llama_sample_typical(SafeLLamaContextHandle, IntPtr, Single, UInt64)**
Locally Typical Sampling implementation described in the paper https://arxiv.org/abs/2202.00666.
```csharp
public static void llama_sample_typical(SafeLLamaContextHandle ctx, IntPtr candidates, float p, ulong min_keep)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
`p` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`min_keep` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
### **llama_sample_temperature(SafeLLamaContextHandle, IntPtr, Single)**
```csharp
public static void llama_sample_temperature(SafeLLamaContextHandle ctx, IntPtr candidates, float temp)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
`temp` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **llama_sample_token_mirostat(SafeLLamaContextHandle, IntPtr, Single, Single, Int32, Single*)**
Mirostat 1.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words.
```csharp
public static int llama_sample_token_mirostat(SafeLLamaContextHandle ctx, IntPtr candidates, float tau, float eta, int m, Single* mu)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
A vector of `llama_token_data` containing the candidate tokens, their probabilities (p), and log-odds (logit) for the current position in the generated text.
`tau` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
The target cross-entropy (or surprise) value you want to achieve for the generated text. A higher value corresponds to more surprising or less predictable text, while a lower value corresponds to less surprising or more predictable text.
`eta` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
The learning rate used to update `mu` based on the error between the target and observed surprisal of the sampled word. A larger learning rate will cause `mu` to be updated more quickly, while a smaller learning rate will result in slower updates.
`m` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
The number of tokens considered in the estimation of `s_hat`. This is an arbitrary value that is used to calculate `s_hat`, which in turn helps to calculate the value of `k`. In the paper, they use `m = 100`, but you can experiment with different values to see how it affects the performance of the algorithm.
`mu` [Single*](https://docs.microsoft.com/en-us/dotnet/api/system.single*)<br>
Maximum cross-entropy. This value is initialized to be twice the target cross-entropy (`2 * tau`) and is updated in the algorithm based on the error between the target and observed surprisal.
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_sample_token_mirostat_v2(SafeLLamaContextHandle, IntPtr, Single, Single, Single*)**
Mirostat 2.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words.
```csharp
public static int llama_sample_token_mirostat_v2(SafeLLamaContextHandle ctx, IntPtr candidates, float tau, float eta, Single* mu)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
A vector of `llama_token_data` containing the candidate tokens, their probabilities (p), and log-odds (logit) for the current position in the generated text.
`tau` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
The target cross-entropy (or surprise) value you want to achieve for the generated text. A higher value corresponds to more surprising or less predictable text, while a lower value corresponds to less surprising or more predictable text.
`eta` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
The learning rate used to update `mu` based on the error between the target and observed surprisal of the sampled word. A larger learning rate will cause `mu` to be updated more quickly, while a smaller learning rate will result in slower updates.
`mu` [Single*](https://docs.microsoft.com/en-us/dotnet/api/system.single*)<br>
Maximum cross-entropy. This value is initialized to be twice the target cross-entropy (`2 * tau`) and is updated in the algorithm based on the error between the target and observed surprisal.
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_sample_token_greedy(SafeLLamaContextHandle, IntPtr)**
Selects the token with the highest probability.
```csharp
public static int llama_sample_token_greedy(SafeLLamaContextHandle ctx, IntPtr candidates)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_sample_token(SafeLLamaContextHandle, IntPtr)**
Randomly selects a token from the candidates based on their probabilities.
```csharp
public static int llama_sample_token(SafeLLamaContextHandle ctx, IntPtr candidates)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`candidates` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to LLamaTokenDataArray
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_empty_call()**
```csharp
public static bool llama_empty_call()
```
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **llama_context_default_params()**
```csharp
public static LLamaContextParams llama_context_default_params()
```
#### Returns
[LLamaContextParams](./llama.native.llamacontextparams.md)<br>
### **llama_mmap_supported()**
```csharp
public static bool llama_mmap_supported()
```
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **llama_mlock_supported()**
```csharp
public static bool llama_mlock_supported()
```
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **llama_init_from_file(String, LLamaContextParams)**
Various functions for loading a ggml llama model.
Allocate (almost) all memory needed for the model.
Return NULL on failure
```csharp
public static IntPtr llama_init_from_file(string path_model, LLamaContextParams params_)
```
#### Parameters
`path_model` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`params_` [LLamaContextParams](./llama.native.llamacontextparams.md)<br>
#### Returns
[IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
### **llama_init_backend()**
not great API - very likely to change.
Initialize the llama + ggml backend
Call once at the start of the program
```csharp
public static void llama_init_backend()
```
### **llama_free(IntPtr)**
Frees all allocated memory
```csharp
public static void llama_free(IntPtr ctx)
```
#### Parameters
`ctx` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
### **llama_apply_lora_from_file(SafeLLamaContextHandle, String, String, Int32)**
Apply a LoRA adapter to a loaded model
path_base_model is the path to a higher quality model to use as a base for
the layers modified by the adapter. Can be NULL to use the current loaded model.
The model needs to be reloaded before applying a new adapter, otherwise the adapter
will be applied on top of the previous one
```csharp
public static int llama_apply_lora_from_file(SafeLLamaContextHandle ctx, string path_lora, string path_base_model, int n_threads)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`path_lora` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`path_base_model` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`n_threads` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Returns 0 on success
### **llama_get_kv_cache_token_count(SafeLLamaContextHandle)**
Returns the number of tokens in the KV cache
```csharp
public static int llama_get_kv_cache_token_count(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_set_rng_seed(SafeLLamaContextHandle, Int32)**
Sets the current rng seed.
```csharp
public static void llama_set_rng_seed(SafeLLamaContextHandle ctx, int seed)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`seed` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_get_state_size(SafeLLamaContextHandle)**
Returns the maximum size in bytes of the state (rng, logits, embedding
and kv_cache) - will often be smaller after compacting tokens
```csharp
public static ulong llama_get_state_size(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
#### Returns
[UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
### **llama_copy_state_data(SafeLLamaContextHandle, Byte[])**
Copies the state to the specified destination address.
Destination needs to have allocated enough memory.
Returns the number of bytes copied
```csharp
public static ulong llama_copy_state_data(SafeLLamaContextHandle ctx, Byte[] dest)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`dest` [Byte[]](https://docs.microsoft.com/en-us/dotnet/api/system.byte)<br>
#### Returns
[UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
### **llama_set_state_data(SafeLLamaContextHandle, Byte[])**
Set the state reading from the specified address
Returns the number of bytes read
```csharp
public static ulong llama_set_state_data(SafeLLamaContextHandle ctx, Byte[] src)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`src` [Byte[]](https://docs.microsoft.com/en-us/dotnet/api/system.byte)<br>
#### Returns
[UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
### **llama_load_session_file(SafeLLamaContextHandle, String, Int32[], UInt64, UInt64*)**
Load session file
```csharp
public static bool llama_load_session_file(SafeLLamaContextHandle ctx, string path_session, Int32[] tokens_out, ulong n_token_capacity, UInt64* n_token_count_out)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`path_session` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`tokens_out` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_token_capacity` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
`n_token_count_out` [UInt64*](https://docs.microsoft.com/en-us/dotnet/api/system.uint64*)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **llama_save_session_file(SafeLLamaContextHandle, String, Int32[], UInt64)**
Save session file
```csharp
public static bool llama_save_session_file(SafeLLamaContextHandle ctx, string path_session, Int32[] tokens, ulong n_token_count)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`path_session` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`tokens` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_token_count` [UInt64](https://docs.microsoft.com/en-us/dotnet/api/system.uint64)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **llama_eval(SafeLLamaContextHandle, Int32[], Int32, Int32, Int32)**
Run the llama inference to obtain the logits and probabilities for the next token.
tokens + n_tokens is the provided batch of new tokens to process
n_past is the number of tokens to use from previous eval calls
```csharp
public static int llama_eval(SafeLLamaContextHandle ctx, Int32[] tokens, int n_tokens, int n_past, int n_threads)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`tokens` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_tokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_past` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_threads` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
Returns 0 on success
### **llama_eval_with_pointer(SafeLLamaContextHandle, Int32*, Int32, Int32, Int32)**
```csharp
public static int llama_eval_with_pointer(SafeLLamaContextHandle ctx, Int32* tokens, int n_tokens, int n_past, int n_threads)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`tokens` [Int32*](https://docs.microsoft.com/en-us/dotnet/api/system.int32*)<br>
`n_tokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_past` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_threads` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_tokenize(SafeLLamaContextHandle, String, Encoding, Int32[], Int32, Boolean)**
Convert the provided text into tokens.
The tokens pointer must be large enough to hold the resulting tokens.
Returns the number of tokens on success, no more than n_max_tokens
Returns a negative number on failure - the number of tokens that would have been returned
```csharp
public static int llama_tokenize(SafeLLamaContextHandle ctx, string text, Encoding encoding, Int32[] tokens, int n_max_tokens, bool add_bos)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [Encoding](https://docs.microsoft.com/en-us/dotnet/api/system.text.encoding)<br>
`tokens` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_max_tokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`add_bos` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_tokenize_native(SafeLLamaContextHandle, SByte[], Int32[], Int32, Boolean)**
```csharp
public static int llama_tokenize_native(SafeLLamaContextHandle ctx, SByte[] text, Int32[] tokens, int n_max_tokens, bool add_bos)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`text` [SByte[]](https://docs.microsoft.com/en-us/dotnet/api/system.sbyte)<br>
`tokens` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_max_tokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`add_bos` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_n_vocab(SafeLLamaContextHandle)**
```csharp
public static int llama_n_vocab(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_n_ctx(SafeLLamaContextHandle)**
```csharp
public static int llama_n_ctx(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_n_embd(SafeLLamaContextHandle)**
```csharp
public static int llama_n_embd(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_get_logits(SafeLLamaContextHandle)**
Token logits obtained from the last call to llama_eval()
The logits for the last token are stored in the last row
Can be mutated in order to change the probabilities of the next token
Rows: n_tokens
Cols: n_vocab
```csharp
public static Single* llama_get_logits(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
#### Returns
[Single*](https://docs.microsoft.com/en-us/dotnet/api/system.single*)<br>
### **llama_get_embeddings(SafeLLamaContextHandle)**
Get the embeddings for the input
shape: [n_embd] (1-dimensional)
```csharp
public static Single* llama_get_embeddings(SafeLLamaContextHandle ctx)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
#### Returns
[Single*](https://docs.microsoft.com/en-us/dotnet/api/system.single*)<br>
### **llama_token_to_str(SafeLLamaContextHandle, Int32)**
Token Id -&gt; String. Uses the vocabulary in the provided context
```csharp
public static IntPtr llama_token_to_str(SafeLLamaContextHandle ctx, int token)
```
#### Parameters
`ctx` [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
`token` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
#### Returns
[IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
Pointer to a string.
### **llama_token_bos()**
```csharp
public static int llama_token_bos()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_token_eos()**
```csharp
public static int llama_token_eos()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **llama_token_nl()**
```csharp
public static int llama_token_nl()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>

View File

@ -0,0 +1,56 @@
# SafeLLamaContextHandle
Namespace: LLama.Native
```csharp
public class SafeLLamaContextHandle : SafeLLamaHandleBase, System.IDisposable
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [CriticalFinalizerObject](https://docs.microsoft.com/en-us/dotnet/api/system.runtime.constrainedexecution.criticalfinalizerobject) → [SafeHandle](https://docs.microsoft.com/en-us/dotnet/api/system.runtime.interopservices.safehandle) → [SafeLLamaHandleBase](./llama.native.safellamahandlebase.md) → [SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
Implements [IDisposable](https://docs.microsoft.com/en-us/dotnet/api/system.idisposable)
## Properties
### **IsInvalid**
```csharp
public bool IsInvalid { get; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **IsClosed**
```csharp
public bool IsClosed { get; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
## Constructors
### **SafeLLamaContextHandle(IntPtr)**
```csharp
public SafeLLamaContextHandle(IntPtr handle)
```
#### Parameters
`handle` [IntPtr](https://docs.microsoft.com/en-us/dotnet/api/system.intptr)<br>
## Methods
### **ReleaseHandle()**
```csharp
protected bool ReleaseHandle()
```
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>

View File

@ -0,0 +1,44 @@
# SafeLLamaHandleBase
Namespace: LLama.Native
```csharp
public abstract class SafeLLamaHandleBase : System.Runtime.InteropServices.SafeHandle, System.IDisposable
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [CriticalFinalizerObject](https://docs.microsoft.com/en-us/dotnet/api/system.runtime.constrainedexecution.criticalfinalizerobject) → [SafeHandle](https://docs.microsoft.com/en-us/dotnet/api/system.runtime.interopservices.safehandle) → [SafeLLamaHandleBase](./llama.native.safellamahandlebase.md)<br>
Implements [IDisposable](https://docs.microsoft.com/en-us/dotnet/api/system.idisposable)
## Properties
### **IsInvalid**
```csharp
public bool IsInvalid { get; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **IsClosed**
```csharp
public bool IsClosed { get; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>

View File

@ -0,0 +1,188 @@
# ChatCompletion
Namespace: LLama.OldVersion
```csharp
public class ChatCompletion : System.IEquatable`1[[LLama.OldVersion.ChatCompletion, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatCompletion](./llama.oldversion.chatcompletion.md)<br>
Implements [IEquatable&lt;ChatCompletion&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Id**
```csharp
public string Id { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Object**
```csharp
public string Object { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Created**
```csharp
public int Created { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Model**
```csharp
public string Model { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Choices**
```csharp
public ChatCompletionChoice[] Choices { get; set; }
```
#### Property Value
[ChatCompletionChoice[]](./llama.oldversion.chatcompletionchoice.md)<br>
### **Usage**
```csharp
public CompletionUsage Usage { get; set; }
```
#### Property Value
[CompletionUsage](./llama.oldversion.completionusage.md)<br>
## Constructors
### **ChatCompletion(String, String, Int32, String, ChatCompletionChoice[], CompletionUsage)**
```csharp
public ChatCompletion(string Id, string Object, int Created, string Model, ChatCompletionChoice[] Choices, CompletionUsage Usage)
```
#### Parameters
`Id` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Object` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Created` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`Model` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Choices` [ChatCompletionChoice[]](./llama.oldversion.chatcompletionchoice.md)<br>
`Usage` [CompletionUsage](./llama.oldversion.completionusage.md)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(ChatCompletion)**
```csharp
public bool Equals(ChatCompletion other)
```
#### Parameters
`other` [ChatCompletion](./llama.oldversion.chatcompletion.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public ChatCompletion <Clone>$()
```
#### Returns
[ChatCompletion](./llama.oldversion.chatcompletion.md)<br>
### **Deconstruct(String&, String&, Int32&, String&, ChatCompletionChoice[]&, CompletionUsage&)**
```csharp
public void Deconstruct(String& Id, String& Object, Int32& Created, String& Model, ChatCompletionChoice[]& Choices, CompletionUsage& Usage)
```
#### Parameters
`Id` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Object` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Created` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`Model` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Choices` [ChatCompletionChoice[]&](./llama.oldversion.chatcompletionchoice&.md)<br>
`Usage` [CompletionUsage&](./llama.oldversion.completionusage&.md)<br>

View File

@ -0,0 +1,146 @@
# ChatCompletionChoice
Namespace: LLama.OldVersion
```csharp
public class ChatCompletionChoice : System.IEquatable`1[[LLama.OldVersion.ChatCompletionChoice, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatCompletionChoice](./llama.oldversion.chatcompletionchoice.md)<br>
Implements [IEquatable&lt;ChatCompletionChoice&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Index**
```csharp
public int Index { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Message**
```csharp
public ChatCompletionMessage Message { get; set; }
```
#### Property Value
[ChatCompletionMessage](./llama.oldversion.chatcompletionmessage.md)<br>
### **FinishReason**
```csharp
public string FinishReason { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Constructors
### **ChatCompletionChoice(Int32, ChatCompletionMessage, String)**
```csharp
public ChatCompletionChoice(int Index, ChatCompletionMessage Message, string FinishReason)
```
#### Parameters
`Index` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`Message` [ChatCompletionMessage](./llama.oldversion.chatcompletionmessage.md)<br>
`FinishReason` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(ChatCompletionChoice)**
```csharp
public bool Equals(ChatCompletionChoice other)
```
#### Parameters
`other` [ChatCompletionChoice](./llama.oldversion.chatcompletionchoice.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public ChatCompletionChoice <Clone>$()
```
#### Returns
[ChatCompletionChoice](./llama.oldversion.chatcompletionchoice.md)<br>
### **Deconstruct(Int32&, ChatCompletionMessage&, String&)**
```csharp
public void Deconstruct(Int32& Index, ChatCompletionMessage& Message, String& FinishReason)
```
#### Parameters
`Index` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`Message` [ChatCompletionMessage&](./llama.oldversion.chatcompletionmessage&.md)<br>
`FinishReason` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>

View File

@ -0,0 +1,174 @@
# ChatCompletionChunk
Namespace: LLama.OldVersion
```csharp
public class ChatCompletionChunk : System.IEquatable`1[[LLama.OldVersion.ChatCompletionChunk, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatCompletionChunk](./llama.oldversion.chatcompletionchunk.md)<br>
Implements [IEquatable&lt;ChatCompletionChunk&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Id**
```csharp
public string Id { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Model**
```csharp
public string Model { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Object**
```csharp
public string Object { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Created**
```csharp
public int Created { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Choices**
```csharp
public ChatCompletionChunkChoice[] Choices { get; set; }
```
#### Property Value
[ChatCompletionChunkChoice[]](./llama.oldversion.chatcompletionchunkchoice.md)<br>
## Constructors
### **ChatCompletionChunk(String, String, String, Int32, ChatCompletionChunkChoice[])**
```csharp
public ChatCompletionChunk(string Id, string Model, string Object, int Created, ChatCompletionChunkChoice[] Choices)
```
#### Parameters
`Id` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Model` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Object` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Created` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`Choices` [ChatCompletionChunkChoice[]](./llama.oldversion.chatcompletionchunkchoice.md)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(ChatCompletionChunk)**
```csharp
public bool Equals(ChatCompletionChunk other)
```
#### Parameters
`other` [ChatCompletionChunk](./llama.oldversion.chatcompletionchunk.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public ChatCompletionChunk <Clone>$()
```
#### Returns
[ChatCompletionChunk](./llama.oldversion.chatcompletionchunk.md)<br>
### **Deconstruct(String&, String&, String&, Int32&, ChatCompletionChunkChoice[]&)**
```csharp
public void Deconstruct(String& Id, String& Model, String& Object, Int32& Created, ChatCompletionChunkChoice[]& Choices)
```
#### Parameters
`Id` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Model` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Object` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Created` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`Choices` [ChatCompletionChunkChoice[]&](./llama.oldversion.chatcompletionchunkchoice&.md)<br>

View File

@ -0,0 +1,146 @@
# ChatCompletionChunkChoice
Namespace: LLama.OldVersion
```csharp
public class ChatCompletionChunkChoice : System.IEquatable`1[[LLama.OldVersion.ChatCompletionChunkChoice, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatCompletionChunkChoice](./llama.oldversion.chatcompletionchunkchoice.md)<br>
Implements [IEquatable&lt;ChatCompletionChunkChoice&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Index**
```csharp
public int Index { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Delta**
```csharp
public ChatCompletionChunkDelta Delta { get; set; }
```
#### Property Value
[ChatCompletionChunkDelta](./llama.oldversion.chatcompletionchunkdelta.md)<br>
### **FinishReason**
```csharp
public string FinishReason { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Constructors
### **ChatCompletionChunkChoice(Int32, ChatCompletionChunkDelta, String)**
```csharp
public ChatCompletionChunkChoice(int Index, ChatCompletionChunkDelta Delta, string FinishReason)
```
#### Parameters
`Index` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`Delta` [ChatCompletionChunkDelta](./llama.oldversion.chatcompletionchunkdelta.md)<br>
`FinishReason` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(ChatCompletionChunkChoice)**
```csharp
public bool Equals(ChatCompletionChunkChoice other)
```
#### Parameters
`other` [ChatCompletionChunkChoice](./llama.oldversion.chatcompletionchunkchoice.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public ChatCompletionChunkChoice <Clone>$()
```
#### Returns
[ChatCompletionChunkChoice](./llama.oldversion.chatcompletionchunkchoice.md)<br>
### **Deconstruct(Int32&, ChatCompletionChunkDelta&, String&)**
```csharp
public void Deconstruct(Int32& Index, ChatCompletionChunkDelta& Delta, String& FinishReason)
```
#### Parameters
`Index` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`Delta` [ChatCompletionChunkDelta&](./llama.oldversion.chatcompletionchunkdelta&.md)<br>
`FinishReason` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>

View File

@ -0,0 +1,132 @@
# ChatCompletionChunkDelta
Namespace: LLama.OldVersion
```csharp
public class ChatCompletionChunkDelta : System.IEquatable`1[[LLama.OldVersion.ChatCompletionChunkDelta, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatCompletionChunkDelta](./llama.oldversion.chatcompletionchunkdelta.md)<br>
Implements [IEquatable&lt;ChatCompletionChunkDelta&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Role**
```csharp
public string Role { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Content**
```csharp
public string Content { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Constructors
### **ChatCompletionChunkDelta(String, String)**
```csharp
public ChatCompletionChunkDelta(string Role, string Content)
```
#### Parameters
`Role` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Content` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(ChatCompletionChunkDelta)**
```csharp
public bool Equals(ChatCompletionChunkDelta other)
```
#### Parameters
`other` [ChatCompletionChunkDelta](./llama.oldversion.chatcompletionchunkdelta.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public ChatCompletionChunkDelta <Clone>$()
```
#### Returns
[ChatCompletionChunkDelta](./llama.oldversion.chatcompletionchunkdelta.md)<br>
### **Deconstruct(String&, String&)**
```csharp
public void Deconstruct(String& Role, String& Content)
```
#### Parameters
`Role` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Content` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>

View File

@ -0,0 +1,146 @@
# ChatCompletionMessage
Namespace: LLama.OldVersion
```csharp
public class ChatCompletionMessage : System.IEquatable`1[[LLama.OldVersion.ChatCompletionMessage, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatCompletionMessage](./llama.oldversion.chatcompletionmessage.md)<br>
Implements [IEquatable&lt;ChatCompletionMessage&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Role**
```csharp
public ChatRole Role { get; set; }
```
#### Property Value
[ChatRole](./llama.oldversion.chatrole.md)<br>
### **Content**
```csharp
public string Content { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Name**
```csharp
public string Name { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Constructors
### **ChatCompletionMessage(ChatRole, String, String)**
```csharp
public ChatCompletionMessage(ChatRole Role, string Content, string Name)
```
#### Parameters
`Role` [ChatRole](./llama.oldversion.chatrole.md)<br>
`Content` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Name` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(ChatCompletionMessage)**
```csharp
public bool Equals(ChatCompletionMessage other)
```
#### Parameters
`other` [ChatCompletionMessage](./llama.oldversion.chatcompletionmessage.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public ChatCompletionMessage <Clone>$()
```
#### Returns
[ChatCompletionMessage](./llama.oldversion.chatcompletionmessage.md)<br>
### **Deconstruct(ChatRole&, String&, String&)**
```csharp
public void Deconstruct(ChatRole& Role, String& Content, String& Name)
```
#### Parameters
`Role` [ChatRole&](./llama.oldversion.chatrole&.md)<br>
`Content` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Name` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>

View File

@ -0,0 +1,132 @@
# ChatMessageRecord
Namespace: LLama.OldVersion
```csharp
public class ChatMessageRecord : System.IEquatable`1[[LLama.OldVersion.ChatMessageRecord, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatMessageRecord](./llama.oldversion.chatmessagerecord.md)<br>
Implements [IEquatable&lt;ChatMessageRecord&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Message**
```csharp
public ChatCompletionMessage Message { get; set; }
```
#### Property Value
[ChatCompletionMessage](./llama.oldversion.chatcompletionmessage.md)<br>
### **Time**
```csharp
public DateTime Time { get; set; }
```
#### Property Value
[DateTime](https://docs.microsoft.com/en-us/dotnet/api/system.datetime)<br>
## Constructors
### **ChatMessageRecord(ChatCompletionMessage, DateTime)**
```csharp
public ChatMessageRecord(ChatCompletionMessage Message, DateTime Time)
```
#### Parameters
`Message` [ChatCompletionMessage](./llama.oldversion.chatcompletionmessage.md)<br>
`Time` [DateTime](https://docs.microsoft.com/en-us/dotnet/api/system.datetime)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(ChatMessageRecord)**
```csharp
public bool Equals(ChatMessageRecord other)
```
#### Parameters
`other` [ChatMessageRecord](./llama.oldversion.chatmessagerecord.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public ChatMessageRecord <Clone>$()
```
#### Returns
[ChatMessageRecord](./llama.oldversion.chatmessagerecord.md)<br>
### **Deconstruct(ChatCompletionMessage&, DateTime&)**
```csharp
public void Deconstruct(ChatCompletionMessage& Message, DateTime& Time)
```
#### Parameters
`Message` [ChatCompletionMessage&](./llama.oldversion.chatcompletionmessage&.md)<br>
`Time` [DateTime&](https://docs.microsoft.com/en-us/dotnet/api/system.datetime&)<br>

View File

@ -0,0 +1,15 @@
# ChatRole
Namespace: LLama.OldVersion
```csharp
public enum ChatRole
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [Enum](https://docs.microsoft.com/en-us/dotnet/api/system.enum) → [ChatRole](./llama.oldversion.chatrole.md)<br>
Implements [IComparable](https://docs.microsoft.com/en-us/dotnet/api/system.icomparable), [IFormattable](https://docs.microsoft.com/en-us/dotnet/api/system.iformattable), [IConvertible](https://docs.microsoft.com/en-us/dotnet/api/system.iconvertible)
## Fields
| Name | Value | Description |
| --- | --: | --- |

View File

@ -0,0 +1,93 @@
# ChatSession&lt;T&gt;
Namespace: LLama.OldVersion
```csharp
public class ChatSession<T>
```
#### Type Parameters
`T`<br>
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ChatSession&lt;T&gt;](./llama.oldversion.chatsession-1.md)
## Constructors
### **ChatSession(T)**
```csharp
public ChatSession(T model)
```
#### Parameters
`model` T<br>
## Methods
### **Chat(String, String, String)**
```csharp
public IEnumerable<string> Chat(string text, string prompt, string encoding)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **WithPrompt(String, String)**
```csharp
public ChatSession<T> WithPrompt(string prompt, string encoding)
```
#### Parameters
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[ChatSession&lt;T&gt;](./llama.oldversion.chatsession-1.md)<br>
### **WithPromptFile(String, String)**
```csharp
public ChatSession<T> WithPromptFile(string promptFilename, string encoding)
```
#### Parameters
`promptFilename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[ChatSession&lt;T&gt;](./llama.oldversion.chatsession-1.md)<br>
### **WithAntiprompt(String[])**
Set the keyword to split the return value of chat AI.
```csharp
public ChatSession<T> WithAntiprompt(String[] antiprompt)
```
#### Parameters
`antiprompt` [String[]](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[ChatSession&lt;T&gt;](./llama.oldversion.chatsession-1.md)<br>

View File

@ -0,0 +1,188 @@
# Completion
Namespace: LLama.OldVersion
```csharp
public class Completion : System.IEquatable`1[[LLama.OldVersion.Completion, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [Completion](./llama.oldversion.completion.md)<br>
Implements [IEquatable&lt;Completion&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Id**
```csharp
public string Id { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Object**
```csharp
public string Object { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Created**
```csharp
public int Created { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Model**
```csharp
public string Model { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Choices**
```csharp
public CompletionChoice[] Choices { get; set; }
```
#### Property Value
[CompletionChoice[]](./llama.oldversion.completionchoice.md)<br>
### **Usage**
```csharp
public CompletionUsage Usage { get; set; }
```
#### Property Value
[CompletionUsage](./llama.oldversion.completionusage.md)<br>
## Constructors
### **Completion(String, String, Int32, String, CompletionChoice[], CompletionUsage)**
```csharp
public Completion(string Id, string Object, int Created, string Model, CompletionChoice[] Choices, CompletionUsage Usage)
```
#### Parameters
`Id` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Object` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Created` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`Model` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Choices` [CompletionChoice[]](./llama.oldversion.completionchoice.md)<br>
`Usage` [CompletionUsage](./llama.oldversion.completionusage.md)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(Completion)**
```csharp
public bool Equals(Completion other)
```
#### Parameters
`other` [Completion](./llama.oldversion.completion.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public Completion <Clone>$()
```
#### Returns
[Completion](./llama.oldversion.completion.md)<br>
### **Deconstruct(String&, String&, Int32&, String&, CompletionChoice[]&, CompletionUsage&)**
```csharp
public void Deconstruct(String& Id, String& Object, Int32& Created, String& Model, CompletionChoice[]& Choices, CompletionUsage& Usage)
```
#### Parameters
`Id` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Object` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Created` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`Model` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Choices` [CompletionChoice[]&](./llama.oldversion.completionchoice&.md)<br>
`Usage` [CompletionUsage&](./llama.oldversion.completionusage&.md)<br>

View File

@ -0,0 +1,160 @@
# CompletionChoice
Namespace: LLama.OldVersion
```csharp
public class CompletionChoice : System.IEquatable`1[[LLama.OldVersion.CompletionChoice, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [CompletionChoice](./llama.oldversion.completionchoice.md)<br>
Implements [IEquatable&lt;CompletionChoice&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Text**
```csharp
public string Text { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Index**
```csharp
public int Index { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Logprobs**
```csharp
public CompletionLogprobs Logprobs { get; set; }
```
#### Property Value
[CompletionLogprobs](./llama.oldversion.completionlogprobs.md)<br>
### **FinishReason**
```csharp
public string FinishReason { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Constructors
### **CompletionChoice(String, Int32, CompletionLogprobs, String)**
```csharp
public CompletionChoice(string Text, int Index, CompletionLogprobs Logprobs, string FinishReason)
```
#### Parameters
`Text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Index` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`Logprobs` [CompletionLogprobs](./llama.oldversion.completionlogprobs.md)<br>
`FinishReason` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(CompletionChoice)**
```csharp
public bool Equals(CompletionChoice other)
```
#### Parameters
`other` [CompletionChoice](./llama.oldversion.completionchoice.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public CompletionChoice <Clone>$()
```
#### Returns
[CompletionChoice](./llama.oldversion.completionchoice.md)<br>
### **Deconstruct(String&, Int32&, CompletionLogprobs&, String&)**
```csharp
public void Deconstruct(String& Text, Int32& Index, CompletionLogprobs& Logprobs, String& FinishReason)
```
#### Parameters
`Text` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Index` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`Logprobs` [CompletionLogprobs&](./llama.oldversion.completionlogprobs&.md)<br>
`FinishReason` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>

View File

@ -0,0 +1,174 @@
# CompletionChunk
Namespace: LLama.OldVersion
```csharp
public class CompletionChunk : System.IEquatable`1[[LLama.OldVersion.CompletionChunk, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [CompletionChunk](./llama.oldversion.completionchunk.md)<br>
Implements [IEquatable&lt;CompletionChunk&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Id**
```csharp
public string Id { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Object**
```csharp
public string Object { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Created**
```csharp
public int Created { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Model**
```csharp
public string Model { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Choices**
```csharp
public CompletionChoice[] Choices { get; set; }
```
#### Property Value
[CompletionChoice[]](./llama.oldversion.completionchoice.md)<br>
## Constructors
### **CompletionChunk(String, String, Int32, String, CompletionChoice[])**
```csharp
public CompletionChunk(string Id, string Object, int Created, string Model, CompletionChoice[] Choices)
```
#### Parameters
`Id` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Object` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Created` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`Model` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Choices` [CompletionChoice[]](./llama.oldversion.completionchoice.md)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(CompletionChunk)**
```csharp
public bool Equals(CompletionChunk other)
```
#### Parameters
`other` [CompletionChunk](./llama.oldversion.completionchunk.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public CompletionChunk <Clone>$()
```
#### Returns
[CompletionChunk](./llama.oldversion.completionchunk.md)<br>
### **Deconstruct(String&, String&, Int32&, String&, CompletionChoice[]&)**
```csharp
public void Deconstruct(String& Id, String& Object, Int32& Created, String& Model, CompletionChoice[]& Choices)
```
#### Parameters
`Id` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Object` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Created` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`Model` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Choices` [CompletionChoice[]&](./llama.oldversion.completionchoice&.md)<br>

View File

@ -0,0 +1,160 @@
# CompletionLogprobs
Namespace: LLama.OldVersion
```csharp
public class CompletionLogprobs : System.IEquatable`1[[LLama.OldVersion.CompletionLogprobs, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [CompletionLogprobs](./llama.oldversion.completionlogprobs.md)<br>
Implements [IEquatable&lt;CompletionLogprobs&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **TextOffset**
```csharp
public Int32[] TextOffset { get; set; }
```
#### Property Value
[Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **TokenLogProbs**
```csharp
public Single[] TokenLogProbs { get; set; }
```
#### Property Value
[Single[]](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **Tokens**
```csharp
public String[] Tokens { get; set; }
```
#### Property Value
[String[]](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **TopLogprobs**
```csharp
public Dictionary`2[] TopLogprobs { get; set; }
```
#### Property Value
[Dictionary`2[]](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.dictionary-2)<br>
## Constructors
### **CompletionLogprobs(Int32[], Single[], String[], Dictionary`2[])**
```csharp
public CompletionLogprobs(Int32[] TextOffset, Single[] TokenLogProbs, String[] Tokens, Dictionary`2[] TopLogprobs)
```
#### Parameters
`TextOffset` [Int32[]](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`TokenLogProbs` [Single[]](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`Tokens` [String[]](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`TopLogprobs` [Dictionary`2[]](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.dictionary-2)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(CompletionLogprobs)**
```csharp
public bool Equals(CompletionLogprobs other)
```
#### Parameters
`other` [CompletionLogprobs](./llama.oldversion.completionlogprobs.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public CompletionLogprobs <Clone>$()
```
#### Returns
[CompletionLogprobs](./llama.oldversion.completionlogprobs.md)<br>
### **Deconstruct(Int32[]&, Single[]&, String[]&, Dictionary`2[]&)**
```csharp
public void Deconstruct(Int32[]& TextOffset, Single[]& TokenLogProbs, String[]& Tokens, Dictionary`2[]& TopLogprobs)
```
#### Parameters
`TextOffset` [Int32[]&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`TokenLogProbs` [Single[]&](https://docs.microsoft.com/en-us/dotnet/api/system.single&)<br>
`Tokens` [String[]&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`TopLogprobs` [Dictionary`2[]&](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.dictionary-2&)<br>

View File

@ -0,0 +1,146 @@
# CompletionUsage
Namespace: LLama.OldVersion
```csharp
public class CompletionUsage : System.IEquatable`1[[LLama.OldVersion.CompletionUsage, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [CompletionUsage](./llama.oldversion.completionusage.md)<br>
Implements [IEquatable&lt;CompletionUsage&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **PromptTokens**
```csharp
public int PromptTokens { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **CompletionTokens**
```csharp
public int CompletionTokens { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **TotalTokens**
```csharp
public int TotalTokens { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
## Constructors
### **CompletionUsage(Int32, Int32, Int32)**
```csharp
public CompletionUsage(int PromptTokens, int CompletionTokens, int TotalTokens)
```
#### Parameters
`PromptTokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`CompletionTokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`TotalTokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(CompletionUsage)**
```csharp
public bool Equals(CompletionUsage other)
```
#### Parameters
`other` [CompletionUsage](./llama.oldversion.completionusage.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public CompletionUsage <Clone>$()
```
#### Returns
[CompletionUsage](./llama.oldversion.completionusage.md)<br>
### **Deconstruct(Int32&, Int32&, Int32&)**
```csharp
public void Deconstruct(Int32& PromptTokens, Int32& CompletionTokens, Int32& TotalTokens)
```
#### Parameters
`PromptTokens` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`CompletionTokens` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`TotalTokens` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>

View File

@ -0,0 +1,160 @@
# Embedding
Namespace: LLama.OldVersion
```csharp
public class Embedding : System.IEquatable`1[[LLama.OldVersion.Embedding, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [Embedding](./llama.oldversion.embedding.md)<br>
Implements [IEquatable&lt;Embedding&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Object**
```csharp
public string Object { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Model**
```csharp
public string Model { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Data**
```csharp
public EmbeddingData[] Data { get; set; }
```
#### Property Value
[EmbeddingData[]](./llama.oldversion.embeddingdata.md)<br>
### **Usage**
```csharp
public EmbeddingUsage Usage { get; set; }
```
#### Property Value
[EmbeddingUsage](./llama.oldversion.embeddingusage.md)<br>
## Constructors
### **Embedding(String, String, EmbeddingData[], EmbeddingUsage)**
```csharp
public Embedding(string Object, string Model, EmbeddingData[] Data, EmbeddingUsage Usage)
```
#### Parameters
`Object` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Model` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Data` [EmbeddingData[]](./llama.oldversion.embeddingdata.md)<br>
`Usage` [EmbeddingUsage](./llama.oldversion.embeddingusage.md)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(Embedding)**
```csharp
public bool Equals(Embedding other)
```
#### Parameters
`other` [Embedding](./llama.oldversion.embedding.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public Embedding <Clone>$()
```
#### Returns
[Embedding](./llama.oldversion.embedding.md)<br>
### **Deconstruct(String&, String&, EmbeddingData[]&, EmbeddingUsage&)**
```csharp
public void Deconstruct(String& Object, String& Model, EmbeddingData[]& Data, EmbeddingUsage& Usage)
```
#### Parameters
`Object` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Model` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Data` [EmbeddingData[]&](./llama.oldversion.embeddingdata&.md)<br>
`Usage` [EmbeddingUsage&](./llama.oldversion.embeddingusage&.md)<br>

View File

@ -0,0 +1,146 @@
# EmbeddingData
Namespace: LLama.OldVersion
```csharp
public class EmbeddingData : System.IEquatable`1[[LLama.OldVersion.EmbeddingData, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [EmbeddingData](./llama.oldversion.embeddingdata.md)<br>
Implements [IEquatable&lt;EmbeddingData&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **Index**
```csharp
public int Index { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Object**
```csharp
public string Object { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Embedding**
```csharp
public Single[] Embedding { get; set; }
```
#### Property Value
[Single[]](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
## Constructors
### **EmbeddingData(Int32, String, Single[])**
```csharp
public EmbeddingData(int Index, string Object, Single[] Embedding)
```
#### Parameters
`Index` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`Object` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`Embedding` [Single[]](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(EmbeddingData)**
```csharp
public bool Equals(EmbeddingData other)
```
#### Parameters
`other` [EmbeddingData](./llama.oldversion.embeddingdata.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public EmbeddingData <Clone>$()
```
#### Returns
[EmbeddingData](./llama.oldversion.embeddingdata.md)<br>
### **Deconstruct(Int32&, String&, Single[]&)**
```csharp
public void Deconstruct(Int32& Index, String& Object, Single[]& Embedding)
```
#### Parameters
`Index` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`Object` [String&](https://docs.microsoft.com/en-us/dotnet/api/system.string&)<br>
`Embedding` [Single[]&](https://docs.microsoft.com/en-us/dotnet/api/system.single&)<br>

View File

@ -0,0 +1,132 @@
# EmbeddingUsage
Namespace: LLama.OldVersion
```csharp
public class EmbeddingUsage : System.IEquatable`1[[LLama.OldVersion.EmbeddingUsage, LLamaSharp, Version=0.4.0.0, Culture=neutral, PublicKeyToken=null]]
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [EmbeddingUsage](./llama.oldversion.embeddingusage.md)<br>
Implements [IEquatable&lt;EmbeddingUsage&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.iequatable-1)
## Properties
### **PromptTokens**
```csharp
public int PromptTokens { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **TotalTokens**
```csharp
public int TotalTokens { get; set; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
## Constructors
### **EmbeddingUsage(Int32, Int32)**
```csharp
public EmbeddingUsage(int PromptTokens, int TotalTokens)
```
#### Parameters
`PromptTokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`TotalTokens` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
## Methods
### **ToString()**
```csharp
public string ToString()
```
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **PrintMembers(StringBuilder)**
```csharp
protected bool PrintMembers(StringBuilder builder)
```
#### Parameters
`builder` [StringBuilder](https://docs.microsoft.com/en-us/dotnet/api/system.text.stringbuilder)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **GetHashCode()**
```csharp
public int GetHashCode()
```
#### Returns
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Equals(Object)**
```csharp
public bool Equals(object obj)
```
#### Parameters
`obj` [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **Equals(EmbeddingUsage)**
```csharp
public bool Equals(EmbeddingUsage other)
```
#### Parameters
`other` [EmbeddingUsage](./llama.oldversion.embeddingusage.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **&lt;Clone&gt;$()**
```csharp
public EmbeddingUsage <Clone>$()
```
#### Returns
[EmbeddingUsage](./llama.oldversion.embeddingusage.md)<br>
### **Deconstruct(Int32&, Int32&)**
```csharp
public void Deconstruct(Int32& PromptTokens, Int32& TotalTokens)
```
#### Parameters
`PromptTokens` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>
`TotalTokens` [Int32&](https://docs.microsoft.com/en-us/dotnet/api/system.int32&)<br>

View File

@ -0,0 +1,63 @@
# IChatModel
Namespace: LLama.OldVersion
```csharp
public interface IChatModel
```
## Properties
### **Name**
```csharp
public abstract string Name { get; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Methods
### **Chat(String, String, String)**
```csharp
IEnumerable<string> Chat(string text, string prompt, string encoding)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **InitChatPrompt(String, String)**
Init a prompt for chat and automatically produce the next prompt during the chat.
```csharp
void InitChatPrompt(string prompt, string encoding)
```
#### Parameters
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **InitChatAntiprompt(String[])**
```csharp
void InitChatAntiprompt(String[] antiprompt)
```
#### Parameters
`antiprompt` [String[]](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>

View File

@ -0,0 +1,50 @@
# LLamaEmbedder
Namespace: LLama.OldVersion
```csharp
public class LLamaEmbedder : System.IDisposable
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [LLamaEmbedder](./llama.oldversion.llamaembedder.md)<br>
Implements [IDisposable](https://docs.microsoft.com/en-us/dotnet/api/system.idisposable)
## Constructors
### **LLamaEmbedder(LLamaParams)**
```csharp
public LLamaEmbedder(LLamaParams params)
```
#### Parameters
`params` [LLamaParams](./llama.oldversion.llamaparams.md)<br>
## Methods
### **GetEmbeddings(String, Int32, Boolean, String)**
```csharp
public Single[] GetEmbeddings(string text, int n_thread, bool add_bos, string encoding)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`n_thread` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`add_bos` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[Single[]](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
### **Dispose()**
```csharp
public void Dispose()
```

View File

@ -0,0 +1,362 @@
# LLamaModel
Namespace: LLama.OldVersion
```csharp
public class LLamaModel : IChatModel, System.IDisposable
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [LLamaModel](./llama.oldversion.llamamodel.md)<br>
Implements [IChatModel](./llama.oldversion.ichatmodel.md), [IDisposable](https://docs.microsoft.com/en-us/dotnet/api/system.idisposable)
## Properties
### **Name**
```csharp
public string Name { get; set; }
```
#### Property Value
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Verbose**
```csharp
public bool Verbose { get; set; }
```
#### Property Value
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **NativeHandle**
```csharp
public SafeLLamaContextHandle NativeHandle { get; }
```
#### Property Value
[SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
## Constructors
### **LLamaModel(String, String, Boolean, Int32, Int32, Int32, Int32, Int32, Int32, Int32, Dictionary&lt;Int32, Single&gt;, Int32, Single, Single, Single, Single, Single, Int32, Single, Single, Int32, Single, Single, String, String, String, String, List&lt;String&gt;, String, String, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, String)**
Please refer `LLamaParams` to find the meanings of each arg. Be sure to have set the `n_gpu_layers`, otherwise it will
load 20 layers to gpu by default.
```csharp
public LLamaModel(string model_path, string model_name, bool verbose, int seed, int n_threads, int n_predict, int n_ctx, int n_batch, int n_keep, int n_gpu_layers, Dictionary<int, float> logit_bias, int top_k, float top_p, float tfs_z, float typical_p, float temp, float repeat_penalty, int repeat_last_n, float frequency_penalty, float presence_penalty, int mirostat, float mirostat_tau, float mirostat_eta, string prompt, string path_session, string input_prefix, string input_suffix, List<string> antiprompt, string lora_adapter, string lora_base, bool memory_f16, bool random_prompt, bool use_color, bool interactive, bool embedding, bool interactive_first, bool prompt_cache_all, bool instruct, bool penalize_nl, bool perplexity, bool use_mmap, bool use_mlock, bool mem_test, bool verbose_prompt, string encoding)
```
#### Parameters
`model_path` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The model file path.
`model_name` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The model name.
`verbose` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to print details when running the model.
`seed` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_threads` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_predict` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_ctx` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_batch` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_keep` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_gpu_layers` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`logit_bias` [Dictionary&lt;Int32, Single&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.dictionary-2)<br>
`top_k` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`top_p` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`tfs_z` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`typical_p` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`temp` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`repeat_penalty` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`repeat_last_n` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`frequency_penalty` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`presence_penalty` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`mirostat` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`mirostat_tau` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`mirostat_eta` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`path_session` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`input_prefix` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`input_suffix` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`antiprompt` [List&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.list-1)<br>
`lora_adapter` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`lora_base` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`memory_f16` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`random_prompt` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`use_color` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`interactive` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`embedding` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`interactive_first` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`prompt_cache_all` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`instruct` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`penalize_nl` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`perplexity` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`use_mmap` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`use_mlock` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`mem_test` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`verbose_prompt` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **LLamaModel(LLamaParams, String, Boolean, String)**
Please refer `LLamaParams` to find the meanings of each arg. Be sure to have set the `n_gpu_layers`, otherwise it will
load 20 layers to gpu by default.
```csharp
public LLamaModel(LLamaParams params, string name, bool verbose, string encoding)
```
#### Parameters
`params` [LLamaParams](./llama.oldversion.llamaparams.md)<br>
The LLamaModel params
`name` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
Model name
`verbose` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to output the detailed info.
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
## Methods
### **WithPrompt(String, String)**
Apply a prompt to the model.
```csharp
public LLamaModel WithPrompt(string prompt, string encoding)
```
#### Parameters
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[LLamaModel](./llama.oldversion.llamamodel.md)<br>
#### Exceptions
[ArgumentException](https://docs.microsoft.com/en-us/dotnet/api/system.argumentexception)<br>
### **WithPromptFile(String)**
Apply the prompt file to the model.
```csharp
public LLamaModel WithPromptFile(string promptFileName)
```
#### Parameters
`promptFileName` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[LLamaModel](./llama.oldversion.llamamodel.md)<br>
### **InitChatPrompt(String, String)**
```csharp
public void InitChatPrompt(string prompt, string encoding)
```
#### Parameters
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **InitChatAntiprompt(String[])**
```csharp
public void InitChatAntiprompt(String[] antiprompt)
```
#### Parameters
`antiprompt` [String[]](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Chat(String, String, String)**
Chat with the LLaMa model under interactive mode.
```csharp
public IEnumerable<string> Chat(string text, string prompt, string encoding)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
#### Exceptions
[ArgumentException](https://docs.microsoft.com/en-us/dotnet/api/system.argumentexception)<br>
### **SaveState(String)**
Save the state to specified path.
```csharp
public void SaveState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **LoadState(String, Boolean)**
Load the state from specified path.
```csharp
public void LoadState(string filename, bool clearPreviousEmbed)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`clearPreviousEmbed` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
Whether to clear previous footprints of this model.
#### Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
### **Tokenize(String, String)**
Tokenize a string.
```csharp
public List<int> Tokenize(string text, string encoding)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The utf-8 encoded string to tokenize.
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[List&lt;Int32&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.list-1)<br>
A list of tokens.
#### Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
If the tokenization failed.
### **DeTokenize(IEnumerable&lt;Int32&gt;)**
Detokenize a list of tokens.
```csharp
public string DeTokenize(IEnumerable<int> tokens)
```
#### Parameters
`tokens` [IEnumerable&lt;Int32&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
The list of tokens to detokenize.
#### Returns
[String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
The detokenized string.
### **Call(String, String)**
Call the model to run inference.
```csharp
public IEnumerable<string> Call(string text, string encoding)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
#### Exceptions
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
### **Dispose()**
```csharp
public void Dispose()
```

View File

@ -0,0 +1,357 @@
# LLamaParams
Namespace: LLama.OldVersion
```csharp
public struct LLamaParams
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [ValueType](https://docs.microsoft.com/en-us/dotnet/api/system.valuetype) → [LLamaParams](./llama.oldversion.llamaparams.md)
## Fields
### **seed**
```csharp
public int seed;
```
### **n_threads**
```csharp
public int n_threads;
```
### **n_predict**
```csharp
public int n_predict;
```
### **n_ctx**
```csharp
public int n_ctx;
```
### **n_batch**
```csharp
public int n_batch;
```
### **n_keep**
```csharp
public int n_keep;
```
### **n_gpu_layers**
```csharp
public int n_gpu_layers;
```
### **logit_bias**
```csharp
public Dictionary<int, float> logit_bias;
```
### **top_k**
```csharp
public int top_k;
```
### **top_p**
```csharp
public float top_p;
```
### **tfs_z**
```csharp
public float tfs_z;
```
### **typical_p**
```csharp
public float typical_p;
```
### **temp**
```csharp
public float temp;
```
### **repeat_penalty**
```csharp
public float repeat_penalty;
```
### **repeat_last_n**
```csharp
public int repeat_last_n;
```
### **frequency_penalty**
```csharp
public float frequency_penalty;
```
### **presence_penalty**
```csharp
public float presence_penalty;
```
### **mirostat**
```csharp
public int mirostat;
```
### **mirostat_tau**
```csharp
public float mirostat_tau;
```
### **mirostat_eta**
```csharp
public float mirostat_eta;
```
### **model**
```csharp
public string model;
```
### **prompt**
```csharp
public string prompt;
```
### **path_session**
```csharp
public string path_session;
```
### **input_prefix**
```csharp
public string input_prefix;
```
### **input_suffix**
```csharp
public string input_suffix;
```
### **antiprompt**
```csharp
public List<string> antiprompt;
```
### **lora_adapter**
```csharp
public string lora_adapter;
```
### **lora_base**
```csharp
public string lora_base;
```
### **memory_f16**
```csharp
public bool memory_f16;
```
### **random_prompt**
```csharp
public bool random_prompt;
```
### **use_color**
```csharp
public bool use_color;
```
### **interactive**
```csharp
public bool interactive;
```
### **prompt_cache_all**
```csharp
public bool prompt_cache_all;
```
### **embedding**
```csharp
public bool embedding;
```
### **interactive_first**
```csharp
public bool interactive_first;
```
### **instruct**
```csharp
public bool instruct;
```
### **penalize_nl**
```csharp
public bool penalize_nl;
```
### **perplexity**
```csharp
public bool perplexity;
```
### **use_mmap**
```csharp
public bool use_mmap;
```
### **use_mlock**
```csharp
public bool use_mlock;
```
### **mem_test**
```csharp
public bool mem_test;
```
### **verbose_prompt**
```csharp
public bool verbose_prompt;
```
## Constructors
### **LLamaParams(Int32, Int32, Int32, Int32, Int32, Int32, Int32, Dictionary&lt;Int32, Single&gt;, Int32, Single, Single, Single, Single, Single, Int32, Single, Single, Int32, Single, Single, String, String, String, String, String, List&lt;String&gt;, String, String, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean, Boolean)**
```csharp
LLamaParams(int seed, int n_threads, int n_predict, int n_ctx, int n_batch, int n_keep, int n_gpu_layers, Dictionary<int, float> logit_bias, int top_k, float top_p, float tfs_z, float typical_p, float temp, float repeat_penalty, int repeat_last_n, float frequency_penalty, float presence_penalty, int mirostat, float mirostat_tau, float mirostat_eta, string model, string prompt, string path_session, string input_prefix, string input_suffix, List<string> antiprompt, string lora_adapter, string lora_base, bool memory_f16, bool random_prompt, bool use_color, bool interactive, bool prompt_cache_all, bool embedding, bool interactive_first, bool instruct, bool penalize_nl, bool perplexity, bool use_mmap, bool use_mlock, bool mem_test, bool verbose_prompt)
```
#### Parameters
`seed` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_threads` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_predict` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_ctx` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_batch` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_keep` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`n_gpu_layers` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`logit_bias` [Dictionary&lt;Int32, Single&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.dictionary-2)<br>
`top_k` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`top_p` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`tfs_z` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`typical_p` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`temp` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`repeat_penalty` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`repeat_last_n` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`frequency_penalty` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`presence_penalty` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`mirostat` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
`mirostat_tau` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`mirostat_eta` [Single](https://docs.microsoft.com/en-us/dotnet/api/system.single)<br>
`model` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`prompt` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`path_session` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`input_prefix` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`input_suffix` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`antiprompt` [List&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.list-1)<br>
`lora_adapter` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`lora_base` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`memory_f16` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`random_prompt` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`use_color` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`interactive` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`prompt_cache_all` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`embedding` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`interactive_first` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`instruct` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`penalize_nl` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`perplexity` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`use_mmap` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`use_mlock` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`mem_test` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
`verbose_prompt` [Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>

View File

@ -0,0 +1,101 @@
# ResettableLLamaModel
Namespace: LLama
A LLamaModel what could be reset. Note that using this class will consume about 10% more memories.
```csharp
public class ResettableLLamaModel : LLamaModel, System.IDisposable
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [LLamaModel](./llama.llamamodel.md) → [ResettableLLamaModel](./llama.resettablellamamodel.md)<br>
Implements [IDisposable](https://docs.microsoft.com/en-us/dotnet/api/system.idisposable)
## Properties
### **OriginalState**
The initial state of the model
```csharp
public Byte[] OriginalState { get; set; }
```
#### Property Value
[Byte[]](https://docs.microsoft.com/en-us/dotnet/api/system.byte)<br>
### **ContextSize**
The context size.
```csharp
public int ContextSize { get; }
```
#### Property Value
[Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **Params**
The model params set for this model.
```csharp
public ModelParams Params { get; set; }
```
#### Property Value
[ModelParams](./llama.common.modelparams.md)<br>
### **NativeHandle**
The native handle, which is used to be passed to the native APIs. Please avoid using it
unless you know what is the usage of the Native API.
```csharp
public SafeLLamaContextHandle NativeHandle { get; }
```
#### Property Value
[SafeLLamaContextHandle](./llama.native.safellamacontexthandle.md)<br>
### **Encoding**
The encoding set for this model to deal with text input.
```csharp
public Encoding Encoding { get; }
```
#### Property Value
[Encoding](https://docs.microsoft.com/en-us/dotnet/api/system.text.encoding)<br>
## Constructors
### **ResettableLLamaModel(ModelParams, String)**
```csharp
public ResettableLLamaModel(ModelParams Params, string encoding)
```
#### Parameters
`Params` [ModelParams](./llama.common.modelparams.md)<br>
`encoding` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
## Methods
### **Reset()**
Reset the state to the initial state.
```csharp
public void Reset()
```

View File

@ -0,0 +1,234 @@
# StatefulExecutorBase
Namespace: LLama
The base class for stateful LLama executors.
```csharp
public abstract class StatefulExecutorBase : LLama.Abstractions.ILLamaExecutor
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [StatefulExecutorBase](./llama.statefulexecutorbase.md)<br>
Implements [ILLamaExecutor](./llama.abstractions.illamaexecutor.md)
## Properties
### **Model**
The mode used by the executor.
```csharp
public LLamaModel Model { get; }
```
#### Property Value
[LLamaModel](./llama.llamamodel.md)<br>
## Methods
### **WithSessionFile(String)**
This API is currently not verified.
```csharp
public StatefulExecutorBase WithSessionFile(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
#### Returns
[StatefulExecutorBase](./llama.statefulexecutorbase.md)<br>
#### Exceptions
[ArgumentNullException](https://docs.microsoft.com/en-us/dotnet/api/system.argumentnullexception)<br>
[RuntimeError](./llama.exceptions.runtimeerror.md)<br>
### **SaveSessionFile(String)**
This API has not been verified currently.
```csharp
public void SaveSessionFile(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **HandleRunOutOfContext(Int32)**
After running out of the context, take some tokens from the original prompt and recompute the logits in batches.
```csharp
protected void HandleRunOutOfContext(int tokensToKeep)
```
#### Parameters
`tokensToKeep` [Int32](https://docs.microsoft.com/en-us/dotnet/api/system.int32)<br>
### **TryReuseMathingPrefix()**
Try to reuse the matching prefix from the session file.
```csharp
protected void TryReuseMathingPrefix()
```
### **GetLoopCondition(InferStateArgs)**
Decide whether to continue the loop.
```csharp
protected abstract bool GetLoopCondition(InferStateArgs args)
```
#### Parameters
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **PreprocessInputs(String, InferStateArgs)**
Preprocess the inputs before the inference.
```csharp
protected abstract void PreprocessInputs(string text, InferStateArgs args)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
### **PostProcess(InferenceParams, InferStateArgs, IEnumerable`1&)**
Do some post processing after the inference.
```csharp
protected abstract bool PostProcess(InferenceParams inferenceParams, InferStateArgs args, IEnumerable`1& extraOutputs)
```
#### Parameters
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
`extraOutputs` [IEnumerable`1&](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1&)<br>
#### Returns
[Boolean](https://docs.microsoft.com/en-us/dotnet/api/system.boolean)<br>
### **InferInternal(InferenceParams, InferStateArgs)**
The core inference logic.
```csharp
protected abstract void InferInternal(InferenceParams inferenceParams, InferStateArgs args)
```
#### Parameters
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`args` [InferStateArgs](./llama.statefulexecutorbase.inferstateargs.md)<br>
### **SaveState(String)**
Save the current state to a file.
```csharp
public abstract void SaveState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **GetStateData()**
Get the current state data.
```csharp
public abstract ExecutorBaseState GetStateData()
```
#### Returns
[ExecutorBaseState](./llama.statefulexecutorbase.executorbasestate.md)<br>
### **LoadState(ExecutorBaseState)**
Load the state from data.
```csharp
public abstract void LoadState(ExecutorBaseState data)
```
#### Parameters
`data` [ExecutorBaseState](./llama.statefulexecutorbase.executorbasestate.md)<br>
### **LoadState(String)**
Load the state from a file.
```csharp
public abstract void LoadState(string filename)
```
#### Parameters
`filename` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
### **Infer(String, InferenceParams, CancellationToken)**
Execute the inference.
```csharp
public IEnumerable<string> Infer(string text, InferenceParams inferenceParams, CancellationToken cancellationToken)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`cancellationToken` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **InferAsync(String, InferenceParams, CancellationToken)**
Execute the inference asynchronously.
```csharp
public IAsyncEnumerable<string> InferAsync(string text, InferenceParams inferenceParams, CancellationToken cancellationToken)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`cancellationToken` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IAsyncEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.iasyncenumerable-1)<br>

View File

@ -0,0 +1,80 @@
# StatelessExecutor
Namespace: LLama
This executor infer the input as one-time job. Previous inputs won't impact on the
response to current input.
```csharp
public class StatelessExecutor : LLama.Abstractions.ILLamaExecutor
```
Inheritance [Object](https://docs.microsoft.com/en-us/dotnet/api/system.object) → [StatelessExecutor](./llama.statelessexecutor.md)<br>
Implements [ILLamaExecutor](./llama.abstractions.illamaexecutor.md)
## Properties
### **Model**
The mode used by the executor when running the inference.
```csharp
public LLamaModel Model { get; }
```
#### Property Value
[LLamaModel](./llama.llamamodel.md)<br>
## Constructors
### **StatelessExecutor(LLamaModel)**
```csharp
public StatelessExecutor(LLamaModel model)
```
#### Parameters
`model` [LLamaModel](./llama.llamamodel.md)<br>
The LLama model.
## Methods
### **Infer(String, InferenceParams, CancellationToken)**
```csharp
public IEnumerable<string> Infer(string text, InferenceParams inferenceParams, CancellationToken cancellationToken)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`cancellationToken` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.ienumerable-1)<br>
### **InferAsync(String, InferenceParams, CancellationToken)**
```csharp
public IAsyncEnumerable<string> InferAsync(string text, InferenceParams inferenceParams, CancellationToken token)
```
#### Parameters
`text` [String](https://docs.microsoft.com/en-us/dotnet/api/system.string)<br>
`inferenceParams` [InferenceParams](./llama.common.inferenceparams.md)<br>
`token` [CancellationToken](https://docs.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken)<br>
#### Returns
[IAsyncEnumerable&lt;String&gt;](https://docs.microsoft.com/en-us/dotnet/api/system.collections.generic.iasyncenumerable-1)<br>

View File

@ -26,4 +26,65 @@ nav:
- BotSharp: HighLevelApps/bot-sharp.md
- More:
- Logger: More/log.md
theme: readthedocs
- API Reference:
- index: ./xmldocs/index.md
- llama.abstractions.ihistorytransform: ./xmldocs/llama.abstractions.ihistorytransform.md
- llama.abstractions.illamaexecutor: ./xmldocs/llama.abstractions.illamaexecutor.md
- llama.abstractions.itextstreamtransform: ./xmldocs/llama.abstractions.itextstreamtransform.md
- llama.abstractions.itexttransform: ./xmldocs/llama.abstractions.itexttransform.md
- llama.chatsession: ./xmldocs/llama.chatsession.md
- llama.common.authorrole: ./xmldocs/llama.common.authorrole.md
- llama.common.chathistory: ./xmldocs/llama.common.chathistory.md
- llama.common.fixedsizequeue-1: ./xmldocs/llama.common.fixedsizequeue-1.md
- llama.common.illamalogger: ./xmldocs/llama.common.illamalogger.md
- llama.common.inferenceparams: ./xmldocs/llama.common.inferenceparams.md
- llama.common.llamadefaultlogger: ./xmldocs/llama.common.llamadefaultlogger.md
- llama.common.mirostatetype: ./xmldocs/llama.common.mirostatetype.md
- llama.common.modelparams: ./xmldocs/llama.common.modelparams.md
- llama.exceptions.runtimeerror: ./xmldocs/llama.exceptions.runtimeerror.md
- llama.extensions.dictionaryextension: ./xmldocs/llama.extensions.dictionaryextension.md
- llama.instructexecutor: ./xmldocs/llama.instructexecutor.md
- llama.interactiveexecutor: ./xmldocs/llama.interactiveexecutor.md
- llama.llamaembedder: ./xmldocs/llama.llamaembedder.md
- llama.llamamodel: ./xmldocs/llama.llamamodel.md
- llama.llamaquantizer: ./xmldocs/llama.llamaquantizer.md
- llama.llamatransforms: ./xmldocs/llama.llamatransforms.md
- llama.native.llamacontextparams: ./xmldocs/llama.native.llamacontextparams.md
- llama.native.llamaftype: ./xmldocs/llama.native.llamaftype.md
- llama.native.llamatokendata: ./xmldocs/llama.native.llamatokendata.md
- llama.native.llamatokendataarray: ./xmldocs/llama.native.llamatokendataarray.md
- llama.native.llamatokendataarraynative: ./xmldocs/llama.native.llamatokendataarraynative.md
- llama.native.nativeapi: ./xmldocs/llama.native.nativeapi.md
- llama.native.safellamacontexthandle: ./xmldocs/llama.native.safellamacontexthandle.md
- llama.native.safellamahandlebase: ./xmldocs/llama.native.safellamahandlebase.md
- llama.oldversion.chatcompletion: ./xmldocs/llama.oldversion.chatcompletion.md
- llama.oldversion.chatcompletionchoice: ./xmldocs/llama.oldversion.chatcompletionchoice.md
- llama.oldversion.chatcompletionchunk: ./xmldocs/llama.oldversion.chatcompletionchunk.md
- llama.oldversion.chatcompletionchunkchoice: ./xmldocs/llama.oldversion.chatcompletionchunkchoice.md
- llama.oldversion.chatcompletionchunkdelta: ./xmldocs/llama.oldversion.chatcompletionchunkdelta.md
- llama.oldversion.chatcompletionmessage: ./xmldocs/llama.oldversion.chatcompletionmessage.md
- llama.oldversion.chatmessagerecord: ./xmldocs/llama.oldversion.chatmessagerecord.md
- llama.oldversion.chatrole: ./xmldocs/llama.oldversion.chatrole.md
- llama.oldversion.chatsession-1: ./xmldocs/llama.oldversion.chatsession-1.md
- llama.oldversion.completion: ./xmldocs/llama.oldversion.completion.md
- llama.oldversion.completionchoice: ./xmldocs/llama.oldversion.completionchoice.md
- llama.oldversion.completionchunk: ./xmldocs/llama.oldversion.completionchunk.md
- llama.oldversion.completionlogprobs: ./xmldocs/llama.oldversion.completionlogprobs.md
- llama.oldversion.completionusage: ./xmldocs/llama.oldversion.completionusage.md
- llama.oldversion.embedding: ./xmldocs/llama.oldversion.embedding.md
- llama.oldversion.embeddingdata: ./xmldocs/llama.oldversion.embeddingdata.md
- llama.oldversion.embeddingusage: ./xmldocs/llama.oldversion.embeddingusage.md
- llama.oldversion.ichatmodel: ./xmldocs/llama.oldversion.ichatmodel.md
- llama.oldversion.llamaembedder: ./xmldocs/llama.oldversion.llamaembedder.md
- llama.oldversion.llamamodel: ./xmldocs/llama.oldversion.llamamodel.md
- llama.oldversion.llamaparams: ./xmldocs/llama.oldversion.llamaparams.md
- llama.resettablellamamodel: ./xmldocs/llama.resettablellamamodel.md
- llama.statefulexecutorbase: ./xmldocs/llama.statefulexecutorbase.md
- llama.statelessexecutor: ./xmldocs/llama.statelessexecutor.md
theme:
name: material
extra:
version:
provider: mike

1526
site/404.html Normal file

File diff suppressed because it is too large Load Diff

1629
site/Architecher/index.html Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

1748
site/GetStarted/index.html Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

1681
site/Tricks/index.html Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,18 @@
/*!
* Lunr languages, `Danish` language
* https://github.com/MihaiValentin/lunr-languages
*
* Copyright 2014, Mihai Valentin
* http://www.mozilla.org/MPL/
*/
/*!
* based on
* Snowball JavaScript Library v0.3
* http://code.google.com/p/urim/
* http://snowball.tartarus.org/
*
* Copyright 2010, Oleg Mazko
* http://www.mozilla.org/MPL/
*/
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.da=function(){this.pipeline.reset(),this.pipeline.add(e.da.trimmer,e.da.stopWordFilter,e.da.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.da.stemmer))},e.da.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA--",e.da.trimmer=e.trimmerSupport.generateTrimmer(e.da.wordCharacters),e.Pipeline.registerFunction(e.da.trimmer,"trimmer-da"),e.da.stemmer=function(){var r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){function e(){var e,r=f.cursor+3;if(d=f.limit,0<=r&&r<=f.limit){for(a=r;;){if(e=f.cursor,f.in_grouping(w,97,248)){f.cursor=e;break}if(f.cursor=e,e>=f.limit)return;f.cursor++}for(;!f.out_grouping(w,97,248);){if(f.cursor>=f.limit)return;f.cursor++}d=f.cursor,d<a&&(d=a)}}function n(){var e,r;if(f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(c,32),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del();break;case 2:f.in_grouping_b(p,97,229)&&f.slice_del()}}function t(){var e,r=f.limit-f.cursor;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.find_among_b(l,4)?(f.bra=f.cursor,f.limit_backward=e,f.cursor=f.limit-r,f.cursor>f.limit_backward&&(f.cursor--,f.bra=f.cursor,f.slice_del())):f.limit_backward=e)}function s(){var e,r,i,n=f.limit-f.cursor;if(f.ket=f.cursor,f.eq_s_b(2,"st")&&(f.bra=f.cursor,f.eq_s_b(2,"ig")&&f.slice_del()),f.cursor=f.limit-n,f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(m,5),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del(),i=f.limit-f.cursor,t(),f.cursor=f.limit-i;break;case 2:f.slice_from("løs")}}function o(){var e;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.out_grouping_b(w,97,248)?(f.bra=f.cursor,u=f.slice_to(u),f.limit_backward=e,f.eq_v_b(u)&&f.slice_del()):f.limit_backward=e)}var a,d,u,c=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],l=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],w=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],p=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],f=new i;this.setCurrent=function(e){f.setCurrent(e)},this.getCurrent=function(){return f.getCurrent()},this.stem=function(){var r=f.cursor;return e(),f.limit_backward=r,f.cursor=f.limit,n(),f.cursor=f.limit,t(),f.cursor=f.limit,s(),f.cursor=f.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hi=function(){this.pipeline.reset(),this.pipeline.add(e.hi.trimmer,e.hi.stopWordFilter,e.hi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hi.stemmer))},e.hi.wordCharacters="ऀ-ःऄ-एऐ-टठ-यर-िी-ॏॐ-य़ॠ-९॰-ॿa-zA-Z--0-9-",e.hi.trimmer=e.trimmerSupport.generateTrimmer(e.hi.wordCharacters),e.Pipeline.registerFunction(e.hi.trimmer,"trimmer-hi"),e.hi.stopWordFilter=e.generateStopWordFilter("अत अपना अपनी अपने अभी अंदर आदि आप इत्यादि इन इनका इन्हीं इन्हें इन्हों इस इसका इसकी इसके इसमें इसी इसे उन उनका उनकी उनके उनको उन्हीं उन्हें उन्हों उस उसके उसी उसे एक एवं एस ऐसे और कई कर करता करते करना करने करें कहते कहा का काफ़ी कि कितना किन्हें किन्हों किया किर किस किसी किसे की कुछ कुल के को कोई कौन कौनसा गया घर जब जहाँ जा जितना जिन जिन्हें जिन्हों जिस जिसे जीधर जैसा जैसे जो तक तब तरह तिन तिन्हें तिन्हों तिस तिसे तो था थी थे दबारा दिया दुसरा दूसरे दो द्वारा न नके नहीं ना निहायत नीचे ने पर पहले पूरा पे फिर बनी बही बहुत बाद बाला बिलकुल भी भीतर मगर मानो मे में यदि यह यहाँ यही या यिह ये रखें रहा रहे ऱ्वासा लिए लिये लेकिन व वग़ैरह वर्ग वह वहाँ वहीं वाले वुह वे वो सकता सकते सबसे सभी साथ साबुत साभ सारा से सो संग ही हुआ हुई हुए है हैं हो होता होती होते होना होने".split(" ")),e.hi.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.hi.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var t=i.toString().toLowerCase().replace(/^\s+/,"");return r.cut(t).split("|")},e.Pipeline.registerFunction(e.hi.stemmer,"stemmer-hi"),e.Pipeline.registerFunction(e.hi.stopWordFilter,"stopWordFilter-hi")}});

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hy=function(){this.pipeline.reset(),this.pipeline.add(e.hy.trimmer,e.hy.stopWordFilter)},e.hy.wordCharacters="[A-Za-z԰-֏ff-ﭏ]",e.hy.trimmer=e.trimmerSupport.generateTrimmer(e.hy.wordCharacters),e.Pipeline.registerFunction(e.hy.trimmer,"trimmer-hy"),e.hy.stopWordFilter=e.generateStopWordFilter("դու և եք էիր էիք հետո նաև նրանք որը վրա է որ պիտի են այս մեջ ն իր ու ի այդ որոնք այն կամ էր մի ես համար այլ իսկ էին ենք հետ ին թ էինք մենք նրա նա դուք եմ էի ըստ որպես ում".split(" ")),e.Pipeline.registerFunction(e.hy.stopWordFilter,"stopWordFilter-hy"),e.hy.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}(),e.Pipeline.registerFunction(e.hy.stemmer,"stemmer-hy")}});

Some files were not shown because too many files have changed in this diff Show More