Change to nullable cast in LLamaSharpTextCompletion.
This commit is contained in:
parent
f64a54c9c8
commit
10a7d68330
|
@ -16,14 +16,16 @@ public sealed class LLamaSharpTextCompletion : ITextCompletion
|
|||
|
||||
public async Task<IReadOnlyList<ITextResult>> GetCompletionsAsync(string text, AIRequestSettings? requestSettings, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var settings = (ChatRequestSettings)requestSettings;
|
||||
var settings = (ChatRequestSettings?)requestSettings;
|
||||
var result = executor.InferAsync(text, settings?.ToLLamaSharpInferenceParams(), cancellationToken);
|
||||
return await Task.FromResult(new List<ITextResult> { new LLamaTextResult(result) }.AsReadOnly()).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously.
|
||||
public async IAsyncEnumerable<ITextStreamingResult> GetStreamingCompletionsAsync(string text, AIRequestSettings? requestSettings, CancellationToken cancellationToken = default)
|
||||
#pragma warning restore CS1998
|
||||
{
|
||||
var settings = (ChatRequestSettings)requestSettings;
|
||||
var settings = (ChatRequestSettings?)requestSettings;
|
||||
var result = executor.InferAsync(text, settings?.ToLLamaSharpInferenceParams(), cancellationToken);
|
||||
yield return new LLamaTextResult(result);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue