diff --git a/LLama.SemanticKernel/TextCompletion/LLamaSharpTextCompletion.cs b/LLama.SemanticKernel/TextCompletion/LLamaSharpTextCompletion.cs index 0bcbfa16..e4235ab7 100644 --- a/LLama.SemanticKernel/TextCompletion/LLamaSharpTextCompletion.cs +++ b/LLama.SemanticKernel/TextCompletion/LLamaSharpTextCompletion.cs @@ -16,14 +16,16 @@ public sealed class LLamaSharpTextCompletion : ITextCompletion public async Task> GetCompletionsAsync(string text, AIRequestSettings? requestSettings, CancellationToken cancellationToken = default) { - var settings = (ChatRequestSettings)requestSettings; + var settings = (ChatRequestSettings?)requestSettings; var result = executor.InferAsync(text, settings?.ToLLamaSharpInferenceParams(), cancellationToken); return await Task.FromResult(new List { new LLamaTextResult(result) }.AsReadOnly()).ConfigureAwait(false); } +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously. public async IAsyncEnumerable GetStreamingCompletionsAsync(string text, AIRequestSettings? requestSettings, CancellationToken cancellationToken = default) +#pragma warning restore CS1998 { - var settings = (ChatRequestSettings)requestSettings; + var settings = (ChatRequestSettings?)requestSettings; var result = executor.InferAsync(text, settings?.ToLLamaSharpInferenceParams(), cancellationToken); yield return new LLamaTextResult(result); }