Adding Response Format - Correcting non-standard way of working with PromptExecutionSettings
can be used downstream to post-process the messages based on the requested format
This commit is contained in:
parent
59a0afdb77
commit
2aa96b206f
|
@ -74,6 +74,12 @@ public class LLamaSharpPromptExecutionSettings : PromptExecutionSettings
|
|||
[JsonPropertyName("token_selection_biases")]
|
||||
public IDictionary<int, int> TokenSelectionBiases { get; set; } = new Dictionary<int, int>();
|
||||
|
||||
/// <summary>
|
||||
/// Indicates the format of the response which can be used downstream to post-process the messages. Handlebars: handlebars_object. JSON: json_object, etc.
|
||||
/// </summary>
|
||||
[JsonPropertyName("response_format")]
|
||||
public string ResponseFormat { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Create a new settings object with the values from another settings object.
|
||||
/// </summary>
|
||||
|
|
Loading…
Reference in New Issue