Adding Response Format - Correcting non-standard way of working with PromptExecutionSettings

can be used downstream to post-process the messages based on the requested format
This commit is contained in:
Zoli Somogyi 2024-04-27 09:39:40 +02:00
parent 59a0afdb77
commit 2aa96b206f
1 changed files with 6 additions and 0 deletions

View File

@ -74,6 +74,12 @@ public class LLamaSharpPromptExecutionSettings : PromptExecutionSettings
[JsonPropertyName("token_selection_biases")] [JsonPropertyName("token_selection_biases")]
public IDictionary<int, int> TokenSelectionBiases { get; set; } = new Dictionary<int, int>(); public IDictionary<int, int> TokenSelectionBiases { get; set; } = new Dictionary<int, int>();
/// <summary>
/// Indicates the format of the response which can be used downstream to post-process the messages. Handlebars: handlebars_object. JSON: json_object, etc.
/// </summary>
[JsonPropertyName("response_format")]
public string ResponseFormat { get; set; } = string.Empty;
/// <summary> /// <summary>
/// Create a new settings object with the values from another settings object. /// Create a new settings object with the values from another settings object.
/// </summary> /// </summary>