Fixed WebAPI

Upgraded to .NET8.0
Fixed logging (removed Console replaced with Ilogger)
Fixed warnings (Null Strings, etc).

The application now returns data from the back-end chat services.
This commit is contained in:
Scot McConnaughay 2023-12-18 00:38:05 -07:00
parent 7139281e4d
commit 2abc2ecfca
6 changed files with 84 additions and 81 deletions

View File

@ -1,25 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk.Web"> <Project Sdk="Microsoft.NET.Sdk.Web">
<Import Project="..\LLama\LLamaSharp.Runtime.targets" /> <Import Project="..\LLama\LLamaSharp.Runtime.targets" />
<PropertyGroup> <PropertyGroup>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup> <InvariantGlobalization>true</InvariantGlobalization>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.VisualStudio.Validation" Version="17.8.8" /> <ItemGroup>
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" /> <PackageReference Include="Microsoft.VisualStudio.Validation" Version="17.8.8" />
</ItemGroup> <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="8.0.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.4.0" />
<ItemGroup> </ItemGroup>
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
</ItemGroup> <ItemGroup>
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
<ItemGroup> </ItemGroup>
<Content Update="Properties\launchSettings.json">
<ExcludeFromSingleFile>true</ExcludeFromSingleFile> <ItemGroup>
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <Content Update="Properties\launchSettings.json">
<CopyToPublishDirectory>Never</CopyToPublishDirectory> <ExcludeFromSingleFile>true</ExcludeFromSingleFile>
</Content> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</ItemGroup> <CopyToPublishDirectory>Never</CopyToPublishDirectory>
</Project> </Content>
</ItemGroup>
</Project>

View File

@ -2,15 +2,15 @@
public class SendMessageInput public class SendMessageInput
{ {
public string Text { get; set; } public string Text { get; set; } = "";
} }
public class HistoryInput public class HistoryInput
{ {
public List<HistoryItem> Messages { get; set; } public List<HistoryItem> Messages { get; set; } = [];
public class HistoryItem public class HistoryItem
{ {
public string Role { get; set; } public string Role { get; set; } = "User";
public string Content { get; set; } public string Content { get; set; } = "";
} }
} }

View File

@ -1,28 +1,32 @@
using LLama.WebAPI.Services; using LLama.WebAPI.Services;
var builder = WebApplication.CreateBuilder(args); var builder = WebApplication.CreateBuilder(args);
// Add services to the container. // Add services to the container.
builder.Services.AddControllers(); builder.Services.AddControllers();
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle // Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
builder.Services.AddEndpointsApiExplorer(); builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(); builder.Services.AddSwaggerGen();
builder.Services.AddSingleton<StatefulChatService>(); builder.Services.AddSingleton<StatefulChatService>();
builder.Services.AddScoped<StatelessChatService>(); builder.Services.AddScoped<StatelessChatService>();
var app = builder.Build(); var app = builder.Build();
app.UseRouting();
// Configure the HTTP request pipeline.
if (app.Environment.IsDevelopment()) // Configure the HTTP request pipeline.
{ if (app.Environment.IsDevelopment())
app.UseSwagger(); {
app.UseSwaggerUI(); app.UseSwagger();
} app.UseSwaggerUI();
}
app.UseAuthorization();
app.UseAuthorization();
app.MapControllers();
app.UseEndpoints(endpoints =>
app.Run(); {
_ = endpoints.MapControllers();
});
app.Run();

View File

@ -9,13 +9,14 @@ public class StatefulChatService : IDisposable
{ {
private readonly ChatSession _session; private readonly ChatSession _session;
private readonly LLamaContext _context; private readonly LLamaContext _context;
private readonly ILogger<StatefulChatService> _logger;
private bool _continue = false; private bool _continue = false;
private const string SystemPrompt = "Transcript of a dialog, where the User interacts with an Assistant. Assistant is helpful, kind, honest, good at writing, and never fails to answer the User's requests immediately and with precision."; private const string SystemPrompt = "Transcript of a dialog, where the User interacts with an Assistant. Assistant is helpful, kind, honest, good at writing, and never fails to answer the User's requests immediately and with precision.";
public StatefulChatService(IConfiguration configuration) public StatefulChatService(IConfiguration configuration, ILogger<StatefulChatService> logger)
{ {
var @params = new Common.ModelParams(configuration["ModelPath"]) var @params = new Common.ModelParams(configuration["ModelPath"]!)
{ {
ContextSize = 512, ContextSize = 512,
}; };
@ -23,6 +24,7 @@ public class StatefulChatService : IDisposable
// todo: share weights from a central service // todo: share weights from a central service
using var weights = LLamaWeights.LoadFromFile(@params); using var weights = LLamaWeights.LoadFromFile(@params);
_logger = logger;
_context = new LLamaContext(weights, @params); _context = new LLamaContext(weights, @params);
_session = new ChatSession(new InteractiveExecutor(_context)); _session = new ChatSession(new InteractiveExecutor(_context));
@ -36,16 +38,13 @@ public class StatefulChatService : IDisposable
public async Task<string> Send(SendMessageInput input) public async Task<string> Send(SendMessageInput input)
{ {
if (!_continue) if (!_continue)
{ {
Console.Write(SystemPrompt); _logger.LogInformation("Prompt: {text}", SystemPrompt);
_continue = true; _continue = true;
} }
_logger.LogInformation("Input: {text}", input.Text);
Console.ForegroundColor = ConsoleColor.Green;
Console.Write(input.Text);
Console.ForegroundColor = ConsoleColor.White;
var outputs = _session.ChatAsync( var outputs = _session.ChatAsync(
new Common.ChatHistory.Message(Common.AuthorRole.User, input.Text), new Common.ChatHistory.Message(Common.AuthorRole.User, input.Text),
new Common.InferenceParams() new Common.InferenceParams()
@ -57,7 +56,7 @@ public class StatefulChatService : IDisposable
var result = ""; var result = "";
await foreach (var output in outputs) await foreach (var output in outputs)
{ {
Console.Write(output); _logger.LogInformation("Message: {output}", output);
result += output; result += output;
} }
@ -68,16 +67,14 @@ public class StatefulChatService : IDisposable
{ {
if (!_continue) if (!_continue)
{ {
Console.Write(SystemPrompt); _logger.LogInformation(SystemPrompt);
_continue = true; _continue = true;
} }
Console.ForegroundColor = ConsoleColor.Green; _logger.LogInformation(input.Text);
Console.Write(input.Text);
Console.ForegroundColor = ConsoleColor.White;
var outputs = _session.ChatAsync( var outputs = _session.ChatAsync(
new Common.ChatHistory.Message(Common.AuthorRole.User, input.Text) new Common.ChatHistory.Message(Common.AuthorRole.User, input.Text!)
, new Common.InferenceParams() , new Common.InferenceParams()
{ {
RepeatPenalty = 1.0f, RepeatPenalty = 1.0f,
@ -86,7 +83,7 @@ public class StatefulChatService : IDisposable
await foreach (var output in outputs) await foreach (var output in outputs)
{ {
Console.Write(output); _logger.LogInformation(output);
yield return output; yield return output;
} }
} }

View File

@ -12,7 +12,7 @@ namespace LLama.WebAPI.Services
public StatelessChatService(IConfiguration configuration) public StatelessChatService(IConfiguration configuration)
{ {
var @params = new Common.ModelParams(configuration["ModelPath"]) var @params = new Common.ModelParams(configuration["ModelPath"]!)
{ {
ContextSize = 512, ContextSize = 512,
}; };

View File

@ -1,8 +1,8 @@
{ {
"Logging": { "Logging": {
"LogLevel": { "LogLevel": {
"Default": "Information", "Default": "Information",
"Microsoft.AspNetCore": "Warning" "Microsoft.AspNetCore": "Warning"
} }
} }
} }