Removed (marked as obsolete) prompting with a string for `Conversation`. Tokenization requires extra parameters (e.g. addBos, special) which require special considersation. For now it's better to tokenize using other tools and pass the tokens directly.
This commit is contained in:
parent
6bf010d719
commit
3ba49754b1
|
@ -32,7 +32,7 @@ public class BatchedExecutorFork
|
|||
|
||||
// Evaluate the initial prompt to create one conversation
|
||||
using var start = executor.Create();
|
||||
start.Prompt(prompt);
|
||||
start.Prompt(executor.Context.Tokenize(prompt));
|
||||
await executor.Infer();
|
||||
|
||||
// Create the root node of the tree
|
||||
|
|
|
@ -34,9 +34,9 @@ public class BatchedExecutorGuidance
|
|||
|
||||
// Load the two prompts into two conversations
|
||||
using var guided = executor.Create();
|
||||
guided.Prompt(positivePrompt);
|
||||
guided.Prompt(executor.Context.Tokenize(positivePrompt));
|
||||
using var guidance = executor.Create();
|
||||
guidance.Prompt(negativePrompt);
|
||||
guidance.Prompt(executor.Context.Tokenize(negativePrompt));
|
||||
|
||||
// Run inference to evaluate prompts
|
||||
await AnsiConsole
|
||||
|
|
|
@ -33,7 +33,7 @@ public class BatchedExecutorRewind
|
|||
|
||||
// Evaluate the initial prompt to create one conversation
|
||||
using var conversation = executor.Create();
|
||||
conversation.Prompt(prompt);
|
||||
conversation.Prompt(executor.Context.Tokenize(prompt));
|
||||
|
||||
// Create the start node wrapping the conversation
|
||||
var node = new Node(executor.Context);
|
||||
|
|
|
@ -31,7 +31,7 @@ public class BatchedExecutorSaveAndLoad
|
|||
|
||||
// Create a conversation
|
||||
var conversation = executor.Create();
|
||||
conversation.Prompt(prompt);
|
||||
conversation.Prompt(executor.Context.Tokenize(prompt));
|
||||
|
||||
// Run inference loop
|
||||
var decoder = new StreamingTokenDecoder(executor.Context);
|
||||
|
|
|
@ -55,23 +55,6 @@ public sealed class BatchedExecutor
|
|||
Epoch = 1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Start a new <see cref="Conversation"/> with the given prompt
|
||||
/// </summary>
|
||||
/// <param name="prompt"></param>
|
||||
/// <returns></returns>
|
||||
[Obsolete("Use BatchedExecutor.Create instead")]
|
||||
public Conversation Prompt(string prompt)
|
||||
{
|
||||
if (IsDisposed)
|
||||
throw new ObjectDisposedException(nameof(BatchedExecutor));
|
||||
|
||||
var conversation = Create();
|
||||
conversation.Prompt(prompt);
|
||||
|
||||
return conversation;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Start a new <see cref="Conversation"/>
|
||||
/// </summary>
|
||||
|
|
|
@ -166,11 +166,12 @@ public sealed class Conversation
|
|||
/// </summary>
|
||||
/// <param name="input"></param>
|
||||
/// <returns></returns>
|
||||
public void Prompt(string input)
|
||||
[Obsolete("Tokenize the text and pass the tokens instead")]
|
||||
public void Prompt(string input, bool addBos, bool special)
|
||||
{
|
||||
AssertCanBePrompted();
|
||||
|
||||
Prompt(Executor.Context.Tokenize(input));
|
||||
Prompt(Executor.Context.Tokenize(input, addBos, special));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
Loading…
Reference in New Issue