diff --git a/Directory.Packages.props b/Directory.Packages.props
index 693d4f9..c6d70e6 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -5,6 +5,9 @@
+
+
+
diff --git a/SmartComponents.sln b/SmartComponents.sln
index 0ffc2fe..00929b5 100644
--- a/SmartComponents.sln
+++ b/SmartComponents.sln
@@ -27,8 +27,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TestBlazorApp", "test\testa
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SmartComponents.Inference", "src\SmartComponents.Inference\SmartComponents.Inference.csproj", "{ED69BAFD-00BC-4086-A34D-466CFCEBB300}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SmartComponents.Inference.OpenAI", "src\SmartComponents.Inference.OpenAI\SmartComponents.Inference.OpenAI.csproj", "{7AE35372-45C8-4ACA-BEEA-265C8A272910}"
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SmartComponents.E2ETest.Common", "test\SmartComponents.E2ETest.Common\SmartComponents.E2ETest.Common.csproj", "{3970A925-2753-4E2C-B39C-420C3BC80373}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SmartComponents.E2ETest.Mvc", "test\SmartComponents.E2ETest.Mvc\SmartComponents.E2ETest.Mvc.csproj", "{A0530A9F-C57D-4FC7-89B3-2C0B0005ADF8}"
@@ -89,10 +87,6 @@ Global
{ED69BAFD-00BC-4086-A34D-466CFCEBB300}.Debug|Any CPU.Build.0 = Debug|Any CPU
{ED69BAFD-00BC-4086-A34D-466CFCEBB300}.Release|Any CPU.ActiveCfg = Release|Any CPU
{ED69BAFD-00BC-4086-A34D-466CFCEBB300}.Release|Any CPU.Build.0 = Release|Any CPU
- {7AE35372-45C8-4ACA-BEEA-265C8A272910}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {7AE35372-45C8-4ACA-BEEA-265C8A272910}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {7AE35372-45C8-4ACA-BEEA-265C8A272910}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {7AE35372-45C8-4ACA-BEEA-265C8A272910}.Release|Any CPU.Build.0 = Release|Any CPU
{3970A925-2753-4E2C-B39C-420C3BC80373}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{3970A925-2753-4E2C-B39C-420C3BC80373}.Debug|Any CPU.Build.0 = Debug|Any CPU
{3970A925-2753-4E2C-B39C-420C3BC80373}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -147,7 +141,6 @@ Global
{E139BF4E-3BC2-40FB-80BF-6B5DF2075DA2} = {7A830C0D-7E18-4674-A729-726085D9C0D1}
{1083F54F-D7D4-46FF-9554-278A1655A1D3} = {7A830C0D-7E18-4674-A729-726085D9C0D1}
{ED69BAFD-00BC-4086-A34D-466CFCEBB300} = {B1370349-29FA-49A1-A229-A31F7516A1FF}
- {7AE35372-45C8-4ACA-BEEA-265C8A272910} = {B1370349-29FA-49A1-A229-A31F7516A1FF}
{3970A925-2753-4E2C-B39C-420C3BC80373} = {03710CDB-ACD6-4712-95C8-B780EEEFAA29}
{A0530A9F-C57D-4FC7-89B3-2C0B0005ADF8} = {03710CDB-ACD6-4712-95C8-B780EEEFAA29}
{86CB7231-2401-4286-BDD2-778F2F0EBB6C} = {03710CDB-ACD6-4712-95C8-B780EEEFAA29}
diff --git a/samples/ExampleBlazorApp/ExampleBlazorApp.csproj b/samples/ExampleBlazorApp/ExampleBlazorApp.csproj
index 0699f52..8a9df4c 100644
--- a/samples/ExampleBlazorApp/ExampleBlazorApp.csproj
+++ b/samples/ExampleBlazorApp/ExampleBlazorApp.csproj
@@ -12,7 +12,6 @@
-
@@ -20,6 +19,12 @@
+
+
+
+
+
+
diff --git a/samples/ExampleBlazorApp/Program.cs b/samples/ExampleBlazorApp/Program.cs
index 16aeed6..08b4c73 100644
--- a/samples/ExampleBlazorApp/Program.cs
+++ b/samples/ExampleBlazorApp/Program.cs
@@ -2,7 +2,9 @@
// The .NET Foundation licenses this file to you under the MIT license.
using ExampleBlazorApp.Components;
-using SmartComponents.Inference.OpenAI;
+using Microsoft.Extensions.AI;
+using OpenAI;
+using SmartComponents.Inference;
using SmartComponents.LocalEmbeddings;
var builder = WebApplication.CreateBuilder(args);
@@ -12,11 +14,19 @@
builder.Services.AddRazorComponents()
.AddInteractiveServerComponents();
builder.Services.AddSmartComponents()
- .WithInferenceBackend()
.WithAntiforgeryValidation();
builder.Services.AddSingleton();
-
+// Note: the StartupKey value is just there so the app will start up.
+builder.Services.AddSingleton(new OpenAIClient(builder.Configuration["AI:OpenAI:Key"] ?? "StartupKey"));
+builder.Services.AddChatClient(services =>
+{
+ var chatClient = new SmartComponentsChatClient(services.GetRequiredService()
+ .AsChatClient(builder.Configuration["AI:OpenAI:Chat:ModelId"] ?? "gpt-4o-mini"));
+ return chatClient;
+});
+builder.Services.AddEmbeddingGenerator(services =>
+ services.GetRequiredService().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:Embedding:ModelId"] ?? "text-embedding-3-small"));
var app = builder.Build();
// Configure the HTTP request pipeline.
diff --git a/samples/ExampleMvcRazorPagesApp/ExampleMvcRazorPagesApp.csproj b/samples/ExampleMvcRazorPagesApp/ExampleMvcRazorPagesApp.csproj
index db40a57..4d69fa7 100644
--- a/samples/ExampleMvcRazorPagesApp/ExampleMvcRazorPagesApp.csproj
+++ b/samples/ExampleMvcRazorPagesApp/ExampleMvcRazorPagesApp.csproj
@@ -9,7 +9,6 @@
-
@@ -17,6 +16,12 @@
+
+
+
+
+
+
diff --git a/samples/ExampleMvcRazorPagesApp/Program.cs b/samples/ExampleMvcRazorPagesApp/Program.cs
index 024507b..d329d41 100644
--- a/samples/ExampleMvcRazorPagesApp/Program.cs
+++ b/samples/ExampleMvcRazorPagesApp/Program.cs
@@ -1,7 +1,9 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using SmartComponents.Inference.OpenAI;
+using Microsoft.Extensions.AI;
+using OpenAI;
+using SmartComponents.Inference;
using SmartComponents.LocalEmbeddings;
var builder = WebApplication.CreateBuilder(args);
@@ -11,10 +13,19 @@
builder.Services.AddControllersWithViews();
builder.Services.AddRazorPages();
builder.Services.AddSmartComponents()
- .WithInferenceBackend()
.WithAntiforgeryValidation();
builder.Services.AddSingleton();
+// Note: the StartupKey value is just there so the app will start up.
+builder.Services.AddSingleton(new OpenAIClient(builder.Configuration["AI:OpenAI:Key"] ?? "StartupKey"));
+builder.Services.AddChatClient(services =>
+{
+ var chatClient = new SmartComponentsChatClient(services.GetRequiredService()
+ .AsChatClient(builder.Configuration["AI:OpenAI:Chat:ModelId"] ?? "gpt-4o-mini"));
+ return chatClient;
+});
+builder.Services.AddEmbeddingGenerator(services =>
+ services.GetRequiredService().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:Embedding:ModelId"] ?? "text-embedding-3-small"));
var app = builder.Build();
diff --git a/src/SmartComponents.AspNetCore/DefaultSmartComponentsBuilder.cs b/src/SmartComponents.AspNetCore/DefaultSmartComponentsBuilder.cs
index 607b98b..3ae82a6 100644
--- a/src/SmartComponents.AspNetCore/DefaultSmartComponentsBuilder.cs
+++ b/src/SmartComponents.AspNetCore/DefaultSmartComponentsBuilder.cs
@@ -2,21 +2,21 @@
// The .NET Foundation licenses this file to you under the MIT license.
using Microsoft.Extensions.DependencyInjection;
-using SmartComponents.StaticAssets.Inference;
+using Microsoft.Extensions.AI;
namespace Microsoft.AspNetCore.Builder;
internal sealed class DefaultSmartComponentsBuilder(IServiceCollection services) : ISmartComponentsBuilder
{
- public ISmartComponentsBuilder WithInferenceBackend(string? name) where T : class, IInferenceBackend
+ public ISmartComponentsBuilder WithInferenceBackend(string? name) where T : class, IChatClient
{
if (string.IsNullOrEmpty(name))
{
- services.AddSingleton();
+ services.AddSingleton();
}
else
{
- services.AddKeyedSingleton(name);
+ services.AddKeyedSingleton(name);
}
return this;
diff --git a/src/SmartComponents.AspNetCore/ISmartComponentsBuilder.cs b/src/SmartComponents.AspNetCore/ISmartComponentsBuilder.cs
index 4486137..5b9400a 100644
--- a/src/SmartComponents.AspNetCore/ISmartComponentsBuilder.cs
+++ b/src/SmartComponents.AspNetCore/ISmartComponentsBuilder.cs
@@ -1,13 +1,13 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using SmartComponents.StaticAssets.Inference;
+using Microsoft.Extensions.AI;
namespace Microsoft.AspNetCore.Builder;
public interface ISmartComponentsBuilder
{
- public ISmartComponentsBuilder WithInferenceBackend(string? name = null) where T : class, IInferenceBackend;
+ public ISmartComponentsBuilder WithInferenceBackend(string? name = null) where T : class, IChatClient;
public ISmartComponentsBuilder WithAntiforgeryValidation();
}
diff --git a/src/SmartComponents.AspNetCore/SmartComponents.AspNetCore.csproj b/src/SmartComponents.AspNetCore/SmartComponents.AspNetCore.csproj
index 7e07d27..d05fa3e 100644
--- a/src/SmartComponents.AspNetCore/SmartComponents.AspNetCore.csproj
+++ b/src/SmartComponents.AspNetCore/SmartComponents.AspNetCore.csproj
@@ -11,6 +11,7 @@
+
diff --git a/src/SmartComponents.AspNetCore/SmartComponentsServiceCollectionExtensions.cs b/src/SmartComponents.AspNetCore/SmartComponentsServiceCollectionExtensions.cs
index 9258818..4b7d1bc 100644
--- a/src/SmartComponents.AspNetCore/SmartComponentsServiceCollectionExtensions.cs
+++ b/src/SmartComponents.AspNetCore/SmartComponentsServiceCollectionExtensions.cs
@@ -7,12 +7,12 @@
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Razor.TagHelpers;
+using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using SmartComponents.AspNetCore;
using SmartComponents.Inference;
using SmartComponents.Infrastructure;
-using SmartComponents.StaticAssets.Inference;
namespace Microsoft.AspNetCore.Builder;
@@ -40,7 +40,7 @@ public Action Configure(Action next) =
builder.UseEndpoints(app =>
{
- var smartPasteEndpoint = app.MapPost("/_smartcomponents/smartpaste", async ([FromServices] IInferenceBackend inference, HttpContext httpContext, [FromServices] IAntiforgery antiforgery, [FromServices] SmartPasteInference smartPasteInference) =>
+ var smartPasteEndpoint = app.MapPost("/_smartcomponents/smartpaste", async ([FromServices] IChatClient inference, HttpContext httpContext, [FromServices] IAntiforgery antiforgery, [FromServices] SmartPasteInference smartPasteInference) =>
{
// The rules about whether antiforgery are enabled by default vary across different
// ASP.NET Core versions. To make it consistent, we disable the default enablement on
@@ -62,7 +62,7 @@ public Action Configure(Action next) =
return result.BadRequest ? Results.BadRequest() : Results.Content(result.Response!);
});
- var smartTextAreaEndpoint = app.MapPost("/_smartcomponents/smarttextarea", async ([FromServices] IInferenceBackend inference, HttpContext httpContext, [FromServices] IAntiforgery antiforgery, [FromServices] SmartTextAreaInference smartTextAreaInference) =>
+ var smartTextAreaEndpoint = app.MapPost("/_smartcomponents/smarttextarea", async ([FromServices] IChatClient inference, HttpContext httpContext, [FromServices] IAntiforgery antiforgery, [FromServices] SmartTextAreaInference smartTextAreaInference) =>
{
if (validateAntiforgery)
{
diff --git a/src/SmartComponents.Inference.OpenAI/ApiConfig.cs b/src/SmartComponents.Inference.OpenAI/ApiConfig.cs
deleted file mode 100644
index 5a2cc86..0000000
--- a/src/SmartComponents.Inference.OpenAI/ApiConfig.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using Microsoft.Extensions.Configuration;
-
-namespace SmartComponents.Inference.OpenAI;
-
-internal class ApiConfig
-{
- public string? ApiKey { get; }
- public string? DeploymentName { get; }
- public Uri? Endpoint { get; }
- public bool SelfHosted { get; }
-
- public ApiConfig(IConfiguration config)
- {
- var configSection = config.GetRequiredSection("SmartComponents");
-
- SelfHosted = configSection.GetValue("SelfHosted") ?? false;
-
- if (SelfHosted)
- {
- Endpoint = configSection.GetValue("Endpoint")
- ?? throw new InvalidOperationException("Missing required configuration value: SmartComponents:Endpoint. This is required for SelfHosted inference.");
-
- // Ollama uses this, but other self-hosted backends might not, so it's optional.
- DeploymentName = configSection.GetValue("DeploymentName");
-
- // Ollama doesn't use this, but other self-hosted backends might do, so it's optional.
- ApiKey = configSection.GetValue("ApiKey");
- }
- else
- {
- // If set, we assume Azure OpenAI. If not, we assume OpenAI.
- Endpoint = configSection.GetValue("Endpoint");
-
- // For Azure OpenAI, it's your deployment name. For OpenAI, it's the model name.
- DeploymentName = configSection.GetValue("DeploymentName")
- ?? throw new InvalidOperationException("Missing required configuration value: SmartComponents:DeploymentName");
-
- ApiKey = configSection.GetValue("ApiKey")
- ?? throw new InvalidOperationException("Missing required configuration value: SmartComponents:ApiKey");
- }
- }
-}
diff --git a/src/SmartComponents.Inference.OpenAI/OpenAIInferenceBackend.cs b/src/SmartComponents.Inference.OpenAI/OpenAIInferenceBackend.cs
deleted file mode 100644
index faa7cf2..0000000
--- a/src/SmartComponents.Inference.OpenAI/OpenAIInferenceBackend.cs
+++ /dev/null
@@ -1,89 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Linq;
-using System.Threading.Tasks;
-using Azure;
-using Azure.AI.OpenAI;
-using Microsoft.Extensions.Configuration;
-using SmartComponents.StaticAssets.Inference;
-
-namespace SmartComponents.Inference.OpenAI;
-
-public class OpenAIInferenceBackend(IConfiguration configuration)
- : IInferenceBackend
-{
- public async Task GetChatResponseAsync(ChatParameters options)
- {
-#if DEBUG
- if (ResponseCache.TryGetCachedResponse(options, out var cachedResponse))
- {
- return cachedResponse!;
- }
-#endif
-
- var apiConfig = new ApiConfig(configuration);
- var client = CreateClient(apiConfig);
- var chatCompletionsOptions = new ChatCompletionsOptions
- {
- DeploymentName = apiConfig.DeploymentName,
- Temperature = options.Temperature ?? 0f,
- NucleusSamplingFactor = options.TopP ?? 1,
- MaxTokens = options.MaxTokens ?? 200,
- FrequencyPenalty = options.FrequencyPenalty ?? 0,
- PresencePenalty = options.PresencePenalty ?? 0,
- ResponseFormat = options.RespondJson ? ChatCompletionsResponseFormat.JsonObject : ChatCompletionsResponseFormat.Text,
- };
-
- foreach (var message in options.Messages ?? Enumerable.Empty())
- {
- chatCompletionsOptions.Messages.Add(message.Role switch
- {
- ChatMessageRole.System => new ChatRequestSystemMessage(message.Text),
- ChatMessageRole.User => new ChatRequestUserMessage(message.Text),
- ChatMessageRole.Assistant => new ChatRequestAssistantMessage(message.Text),
- _ => throw new InvalidOperationException($"Unknown chat message role: {message.Role}")
- });
- }
-
- if (options.StopSequences is { } stopSequences)
- {
- foreach (var stopSequence in stopSequences)
- {
- chatCompletionsOptions.StopSequences.Add(stopSequence);
- }
- }
-
- var completionsResponse = await client.GetChatCompletionsAsync(chatCompletionsOptions);
-
- var response = completionsResponse.Value.Choices.FirstOrDefault()?.Message.Content ?? string.Empty;
-
-#if DEBUG
- ResponseCache.SetCachedResponse(options, response);
-#endif
-
- return response;
- }
-
- private static OpenAIClient CreateClient(ApiConfig apiConfig)
- {
- if (apiConfig.SelfHosted)
- {
- var transport = new SelfHostedLlmTransport(apiConfig.Endpoint!);
- return new OpenAIClient(apiConfig.ApiKey, new() { Transport = transport });
- }
- else if (apiConfig.Endpoint is null)
- {
- // OpenAI
- return new OpenAIClient(apiConfig.ApiKey);
- }
- else
- {
- // Azure OpenAI
- return new OpenAIClient(
- apiConfig.Endpoint,
- new AzureKeyCredential(apiConfig.ApiKey!));
- }
- }
-}
diff --git a/src/SmartComponents.Inference.OpenAI/SelfHostedLlmTransport.cs b/src/SmartComponents.Inference.OpenAI/SelfHostedLlmTransport.cs
deleted file mode 100644
index 9c79915..0000000
--- a/src/SmartComponents.Inference.OpenAI/SelfHostedLlmTransport.cs
+++ /dev/null
@@ -1,24 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Threading.Tasks;
-using Azure.Core;
-using Azure.Core.Pipeline;
-
-namespace SmartComponents.Inference.OpenAI;
-
-///
-/// Used to resolve queries using Ollama or anything else that exposes an OpenAI-compatible
-/// endpoint with a scheme/host/port set of your choice.
-///
-internal class SelfHostedLlmTransport(Uri endpoint) : HttpClientTransport
-{
- public override ValueTask ProcessAsync(HttpMessage message)
- {
- message.Request.Uri.Scheme = endpoint.Scheme;
- message.Request.Uri.Host = endpoint.Host;
- message.Request.Uri.Port = endpoint.Port;
- return base.ProcessAsync(message);
- }
-}
diff --git a/src/SmartComponents.Inference.OpenAI/SmartComponents.Inference.OpenAI.csproj b/src/SmartComponents.Inference.OpenAI/SmartComponents.Inference.OpenAI.csproj
deleted file mode 100644
index ed231ad..0000000
--- a/src/SmartComponents.Inference.OpenAI/SmartComponents.Inference.OpenAI.csproj
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
- netstandard2.0
- latest
- true
- enable
-
-
-
-
-
-
-
-
-
diff --git a/src/SmartComponents.Inference/ChatParameters.cs b/src/SmartComponents.Inference/ChatParameters.cs
index 435ff6c..952da99 100644
--- a/src/SmartComponents.Inference/ChatParameters.cs
+++ b/src/SmartComponents.Inference/ChatParameters.cs
@@ -1,34 +1,17 @@
-// Licensed to the .NET Foundation under one or more agreements.
+// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Collections.Generic;
using System.Text.Json.Serialization;
+using Microsoft.Extensions.AI;
namespace SmartComponents.StaticAssets.Inference;
public class ChatParameters
{
- public IList? Messages { get; set; }
- public float? Temperature { get; set; }
- public float? TopP { get; set; }
- public int? MaxTokens { get; set; }
- public float? FrequencyPenalty { get; set; }
- public float? PresencePenalty { get; set; }
- public IList? StopSequences { get; set; }
+ public IList Messages { get; set; } = [];
+ public ChatOptions? Options { get; set; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool RespondJson { get; set; }
}
-
-public class ChatMessage(ChatMessageRole role, string text)
-{
- public ChatMessageRole Role => role;
- public string Text => text;
-}
-
-public enum ChatMessageRole
-{
- System,
- User,
- Assistant,
-}
diff --git a/src/SmartComponents.Inference/IInferenceBackend.cs b/src/SmartComponents.Inference/IInferenceBackend.cs
deleted file mode 100644
index 0ed0771..0000000
--- a/src/SmartComponents.Inference/IInferenceBackend.cs
+++ /dev/null
@@ -1,11 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Threading.Tasks;
-
-namespace SmartComponents.StaticAssets.Inference;
-
-public interface IInferenceBackend
-{
- Task GetChatResponseAsync(ChatParameters options);
-}
diff --git a/src/SmartComponents.Inference.OpenAI/ResponseCache.cs b/src/SmartComponents.Inference/ResponseCache.cs
similarity index 95%
rename from src/SmartComponents.Inference.OpenAI/ResponseCache.cs
rename to src/SmartComponents.Inference/ResponseCache.cs
index 9435b4d..f3c9159 100644
--- a/src/SmartComponents.Inference.OpenAI/ResponseCache.cs
+++ b/src/SmartComponents.Inference/ResponseCache.cs
@@ -11,7 +11,7 @@
using System.Text.Json;
using SmartComponents.StaticAssets.Inference;
-namespace SmartComponents.Inference.OpenAI;
+namespace SmartComponents.Inference;
// This is primarily so that E2E tests running in CI don't have to call OpenAI for real, so that:
// [1] We don't have to make the API keys available to CI
@@ -20,9 +20,9 @@ namespace SmartComponents.Inference.OpenAI;
internal static class ResponseCache
{
- static bool IsEnabled = Environment.GetEnvironmentVariable("SMARTCOMPONENTS_E2E_TEST") == "true";
+ static readonly bool IsEnabled = Environment.GetEnvironmentVariable("SMARTCOMPONENTS_E2E_TEST") == "true";
- readonly static Lazy CacheDir = new(() =>
+ static readonly Lazy CacheDir = new(() =>
{
var dir = Path.Combine(GetSolutionDirectory(), "test", "CachedResponses");
Directory.CreateDirectory(dir);
diff --git a/src/SmartComponents.Inference/SmartComponents.Inference.csproj b/src/SmartComponents.Inference/SmartComponents.Inference.csproj
index b458e8d..75a67aa 100644
--- a/src/SmartComponents.Inference/SmartComponents.Inference.csproj
+++ b/src/SmartComponents.Inference/SmartComponents.Inference.csproj
@@ -8,6 +8,7 @@
+
diff --git a/src/SmartComponents.Inference/SmartComponentsChatClient.cs b/src/SmartComponents.Inference/SmartComponentsChatClient.cs
new file mode 100644
index 0000000..0bc9ff6
--- /dev/null
+++ b/src/SmartComponents.Inference/SmartComponentsChatClient.cs
@@ -0,0 +1,30 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.AI;
+using SmartComponents.StaticAssets.Inference;
+
+namespace SmartComponents.Inference;
+
+public class SmartComponentsChatClient(IChatClient client) : DelegatingChatClient(client)
+{
+ public override async Task GetResponseAsync(IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
+ {
+ var chatParameters = new ChatParameters() { Messages = [.. messages], Options = options };
+#if DEBUG
+ if (ResponseCache.TryGetCachedResponse(chatParameters, out var cachedResponse))
+ {
+ return await Task.FromResult(new ChatResponse(new ChatMessage(ChatRole.Assistant, cachedResponse)));
+ }
+#endif
+ var response = await base.GetResponseAsync(messages, options, cancellationToken);
+
+#if DEBUG
+ ResponseCache.SetCachedResponse(chatParameters, response.Text);
+#endif
+ return response;
+ }
+}
diff --git a/src/SmartComponents.Inference/SmartPasteInference.cs b/src/SmartComponents.Inference/SmartPasteInference.cs
index 84b187f..d952fb4 100644
--- a/src/SmartComponents.Inference/SmartPasteInference.cs
+++ b/src/SmartComponents.Inference/SmartPasteInference.cs
@@ -7,13 +7,13 @@
using System.Text.Json;
using System.Threading.Tasks;
using SmartComponents.StaticAssets.Inference;
+using Microsoft.Extensions.AI;
namespace SmartComponents.Inference;
public class SmartPasteInference
{
- private static readonly JsonSerializerOptions jsonSerializerOptions
- = new JsonSerializerOptions(JsonSerializerDefaults.Web);
+ private static readonly JsonSerializerOptions jsonSerializerOptions = new(JsonSerializerDefaults.Web);
public class SmartPasteRequestData
{
@@ -35,7 +35,7 @@ public readonly struct SmartPasteResponseData
public string? Response { get; init; }
}
- public Task GetFormCompletionsAsync(IInferenceBackend inferenceBackend, string dataJson)
+ public Task GetFormCompletionsAsync(IChatClient inferenceBackend, string dataJson)
{
var data = JsonSerializer.Deserialize(dataJson, jsonSerializerOptions)!;
if (data.FormFields is null || data.FormFields.Length == 0 || string.IsNullOrEmpty(data.ClipboardContents))
@@ -65,23 +65,26 @@ Do not explain how the values were determined.
return new ChatParameters
{
Messages = [
- new (ChatMessageRole.System, systemMessage),
- new (ChatMessageRole.User, prompt),
+ new (ChatRole.System, systemMessage),
+ new (ChatRole.User, prompt),
],
- Temperature = 0,
- TopP = 1,
- MaxTokens = 2000,
- FrequencyPenalty = 0.1f,
- PresencePenalty = 0,
+ Options = new ChatOptions
+ {
+ Temperature = 0,
+ TopP = 1,
+ MaxOutputTokens = 2000,
+ FrequencyPenalty = 0.1f,
+ PresencePenalty = 0
+ },
RespondJson = true,
};
}
- public virtual async Task GetFormCompletionsAsync(IInferenceBackend inferenceBackend, SmartPasteRequestData requestData)
+ public virtual async Task GetFormCompletionsAsync(IChatClient inferenceBackend, SmartPasteRequestData requestData)
{
- var chatOptions = BuildPrompt(requestData);
- var completionsResponse = await inferenceBackend.GetChatResponseAsync(chatOptions);
- return new SmartPasteResponseData { Response = completionsResponse };
+ var chatParameters = BuildPrompt(requestData);
+ var completionsResponse = await inferenceBackend.GetResponseAsync(chatParameters.Messages, chatParameters.Options);
+ return new SmartPasteResponseData { Response = completionsResponse.Text };
}
private static string ToFieldOutputExamples(FormField[] fields)
diff --git a/src/SmartComponents.Inference/SmartTextAreaInference.cs b/src/SmartComponents.Inference/SmartTextAreaInference.cs
index cc942f3..40d2a77 100644
--- a/src/SmartComponents.Inference/SmartTextAreaInference.cs
+++ b/src/SmartComponents.Inference/SmartTextAreaInference.cs
@@ -1,10 +1,11 @@
-// Licensed to the .NET Foundation under one or more agreements.
+// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
+using Microsoft.Extensions.AI;
using SmartComponents.Infrastructure;
using SmartComponents.StaticAssets.Inference;
@@ -16,14 +17,14 @@ public virtual ChatParameters BuildPrompt(SmartTextAreaConfig config, string tex
{
var systemMessageBuilder = new StringBuilder();
systemMessageBuilder.Append(@"Predict what text the user in the given ROLE would insert at the cursor position indicated by ^^^.
-Only give predictions for which you have an EXTREMELY high confidence that the user would insert that EXACT text.
-Do not make up new information. If you're not sure, just reply with NO_PREDICTION.
+ Only give predictions for which you have an EXTREMELY high confidence that the user would insert that EXACT text.
+ Do not make up new information. If you're not sure, just reply with NO_PREDICTION.
-RULES:
-1. Reply with OK:, then in square brackets the predicted text, then END_INSERTION, and no other output.
-2. When a specific value or quantity cannot be inferred and would need to be provided, use the word NEED_INFO.
-3. If there isn't enough information to predict any words that the user would type next, just reply with the word NO_PREDICTION.
-4. NEVER invent new information. If you can't be sure what the user is about to type, ALWAYS stop the prediction with END_INSERTION.");
+ RULES:
+ 1. Reply with OK:, then in square brackets the predicted text, then END_INSERTION, and no other output.
+ 2. When a specific value or quantity cannot be inferred and would need to be provided, use the word NEED_INFO.
+ 3. If there isn't enough information to predict any words that the user would type next, just reply with the word NO_PREDICTION.
+ 4. NEVER invent new information. If you can't be sure what the user is about to type, ALWAYS stop the prediction with END_INSERTION.");
if (config.UserPhrases is { Length: > 0 } stockPhrases)
{
@@ -34,68 +35,73 @@ public virtual ChatParameters BuildPrompt(SmartTextAreaConfig config, string tex
}
}
- List messages =
- [
- new(ChatMessageRole.System, systemMessageBuilder.ToString()),
+ List messages = new()
+ {
+ new(ChatRole.System, systemMessageBuilder.ToString()),
- new(ChatMessageRole.User, @"ROLE: Family member sending a text
-USER_TEXT: Hey, it's a nice day - the weather is ^^^"),
- new(ChatMessageRole.Assistant, @"OK:[great!]END_INSERTION"),
+ new(ChatRole.User, @"ROLE: Family member sending a text
+ USER_TEXT: Hey, it's a nice day - the weather is ^^^"),
+ new(ChatRole.Assistant, @"OK:[great!]END_INSERTION"),
- new(ChatMessageRole.User, @"ROLE: Customer service assistant
-USER_TEXT: You can find more information on^^^
+ new(ChatRole.User, @"ROLE: Customer service assistant
+ USER_TEXT: You can find more information on^^^
-Alternatively, phone us."),
- new(ChatMessageRole.Assistant, @"OK:[ our website at NEED_INFO]END_INSERTION"),
+ Alternatively, phone us."),
+ new(ChatRole.Assistant, @"OK:[ our website at NEED_INFO]END_INSERTION"),
- new(ChatMessageRole.User, @"ROLE: Casual
-USER_TEXT: Oh I see!
+ new(ChatRole.User, @"ROLE: Casual
+ USER_TEXT: Oh I see!
-Well sure thing, we can"),
- new(ChatMessageRole.Assistant, @"OK:[ help you out with that!]END_INSERTION"),
+ Well sure thing, we can"),
+ new(ChatRole.Assistant, @"OK:[ help you out with that!]END_INSERTION"),
- new(ChatMessageRole.User, @"ROLE: Storyteller
-USER_TEXT: Sir Digby Chicken Caesar, also know^^^"),
- new(ChatMessageRole.Assistant, @"OK:[n as NEED_INFO]END_INSERTION"),
+ new(ChatRole.User, @"ROLE: Storyteller
+ USER_TEXT: Sir Digby Chicken Caesar, also know^^^"),
+ new(ChatRole.Assistant, @"OK:[n as NEED_INFO]END_INSERTION"),
- new(ChatMessageRole.User, @"ROLE: Customer support agent
-USER_TEXT: Goodbye for now.^^^"),
- new(ChatMessageRole.Assistant, @"NO_PREDICTION END_INSERTION"),
+ new(ChatRole.User, @"ROLE: Customer support agent
+ USER_TEXT: Goodbye for now.^^^"),
+ new(ChatRole.Assistant, @"NO_PREDICTION END_INSERTION"),
- new(ChatMessageRole.User, @"ROLE: Pirate
-USER_TEXT: Have you found^^^"),
- new(ChatMessageRole.Assistant, @"OK:[ the treasure, me hearties?]END_INSERTION"),
+ new(ChatRole.User, @"ROLE: Pirate
+ USER_TEXT: Have you found^^^"),
+ new(ChatRole.Assistant, @"OK:[ the treasure, me hearties?]END_INSERTION"),
- new(ChatMessageRole.User, @$"ROLE: {config.UserRole}
-USER_TEXT: {textBefore}^^^{textAfter}"),
- ];
+ new(ChatRole.User, @$"ROLE: {config.UserRole}
+ USER_TEXT: {textBefore}^^^{textAfter}"),
+ };
return new ChatParameters
{
Messages = messages,
- Temperature = 0,
- MaxTokens = 400,
- StopSequences = ["END_INSERTION", "NEED_INFO"],
- FrequencyPenalty = 0,
- PresencePenalty = 0,
+ Options = new ChatOptions
+ {
+ Temperature = 0,
+ MaxOutputTokens = 400,
+ StopSequences = ["END_INSERTION", "NEED_INFO"],
+ FrequencyPenalty = 0,
+ PresencePenalty = 0,
+ }
};
}
- public virtual async Task GetInsertionSuggestionAsync(IInferenceBackend inference, SmartTextAreaConfig config, string textBefore, string textAfter)
+ public virtual async Task GetInsertionSuggestionAsync(IChatClient inference, SmartTextAreaConfig config, string textBefore, string textAfter)
{
- var chatOptions = BuildPrompt(config, textBefore, textAfter);
- var response = await inference.GetChatResponseAsync(chatOptions);
- if (response.Length > 5 && response.StartsWith("OK:[", StringComparison.Ordinal))
+ var chatParameters = BuildPrompt(config, textBefore, textAfter);
+ var response = await inference.GetResponseAsync(chatParameters.Messages, chatParameters.Options);
+ var responseText = response.Text;
+
+ if (responseText.Length > 5 && responseText.StartsWith("OK:[", StringComparison.Ordinal))
{
// Avoid returning multiple sentences as it's unlikely to avoid inventing some new train of thought.
- var trimAfter = response.IndexOfAny(['.', '?', '!']);
- if (trimAfter > 0 && response.Length > trimAfter + 1 && response[trimAfter + 1] == ' ')
+ var trimAfter = responseText.IndexOfAny(['.', '?', '!']);
+ if (trimAfter > 0 && responseText.Length > trimAfter + 1 && responseText[trimAfter + 1] == ' ')
{
- response = response.Substring(0, trimAfter + 1);
+ responseText = responseText.Substring(0, trimAfter + 1);
}
// Leave it up to the frontend code to decide whether to add a training space
- var trimmedResponse = response.Substring(4).TrimEnd(']', ' ');
+ var trimmedResponse = responseText.Substring(4).TrimEnd(']', ' ');
// Don't have a leading space on the suggestion if there's already a space right
// before the cursor. The language model normally gets this right anyway (distinguishing
diff --git a/src/SmartComponents.LocalEmbeddings/LocalEmbedder.FindClosest.cs b/src/SmartComponents.LocalEmbeddings/LocalEmbedder.FindClosest.cs
index c828c5f..d32ea05 100644
--- a/src/SmartComponents.LocalEmbeddings/LocalEmbedder.FindClosest.cs
+++ b/src/SmartComponents.LocalEmbeddings/LocalEmbedder.FindClosest.cs
@@ -1,4 +1,4 @@
-// Licensed to the .NET Foundation under one or more agreements.
+// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
diff --git a/src/shared/RepoSharedConfigUtil.cs b/src/shared/RepoSharedConfigUtil.cs
index faf882f..7c964ff 100644
--- a/src/shared/RepoSharedConfigUtil.cs
+++ b/src/shared/RepoSharedConfigUtil.cs
@@ -2,7 +2,6 @@
// The .NET Foundation licenses this file to you under the MIT license.
using System.Reflection;
-using SmartComponents.Inference.OpenAI;
public static class RepoSharedConfigUtil
{
@@ -41,20 +40,22 @@ public static void AddRepoSharedConfig(this IConfigurationBuilder configuration)
public static Exception? GetConfigError(IConfiguration config)
{
- var apiConfigType = typeof(OpenAIInferenceBackend).Assembly
- .GetType("SmartComponents.Inference.OpenAI.ApiConfig", true)!;
- try
- {
- _ = Activator.CreateInstance(apiConfigType, config);
- }
- catch (TargetInvocationException ex) when (ex.InnerException is not null)
- {
- return ex.InnerException;
- }
- catch (Exception ex)
- {
- return ex;
- }
+ // TODO: now that we are using the Microsoft.Extensions.AI.Abstractions library, what should we do here? Maybe we can just remove this?
+
+ //var apiConfigType = typeof(OpenAIInferenceBackend).Assembly
+ // .GetType("SmartComponents.Inference.OpenAI.ApiConfig", true)!;
+ //try
+ //{
+ // _ = Activator.CreateInstance(apiConfigType, config);
+ //}
+ //catch (TargetInvocationException ex) when (ex.InnerException is not null)
+ //{
+ // return ex.InnerException;
+ //}
+ //catch (Exception ex)
+ //{
+ // return ex;
+ //}
return null;
}
diff --git a/test/CachedResponses/12093a4e527fd7ad_USERDATA_____________Hairstyle.response.txt b/test/CachedResponses/12093a4e527fd7ad_USERDATA_____________Hairstyle.response.txt
new file mode 100644
index 0000000..fcc7377
--- /dev/null
+++ b/test/CachedResponses/12093a4e527fd7ad_USERDATA_____________Hairstyle.response.txt
@@ -0,0 +1,7 @@
+{
+ "explicitly-annotated": "Cairns",
+ "labelled-field": "Sad Iron",
+ "inferred-from-nearby-text": "Tonsure",
+ "shoe-size": "55",
+ "philosophy": "Nihilism"
+}
\ No newline at end of file
diff --git a/test/CachedResponses/160b624941a6227d_ROLE_Weather_reporter____USERT.response.txt b/test/CachedResponses/160b624941a6227d_ROLE_Weather_reporter____USERT.response.txt
new file mode 100644
index 0000000..cd864db
--- /dev/null
+++ b/test/CachedResponses/160b624941a6227d_ROLE_Weather_reporter____USERT.response.txt
@@ -0,0 +1 @@
+OK:[itively sweltering!]
\ No newline at end of file
diff --git a/test/CachedResponses/2b2c2ffa8720c384_USERDATA_____________Hairstyle.response.txt b/test/CachedResponses/2b2c2ffa8720c384_USERDATA_____________Hairstyle.response.txt
new file mode 100644
index 0000000..fcc7377
--- /dev/null
+++ b/test/CachedResponses/2b2c2ffa8720c384_USERDATA_____________Hairstyle.response.txt
@@ -0,0 +1,7 @@
+{
+ "explicitly-annotated": "Cairns",
+ "labelled-field": "Sad Iron",
+ "inferred-from-nearby-text": "Tonsure",
+ "shoe-size": "55",
+ "philosophy": "Nihilism"
+}
\ No newline at end of file
diff --git a/test/CachedResponses/3a876d98b76a4111_ROLE_Weather_reporter____USERT.response.txt b/test/CachedResponses/3a876d98b76a4111_ROLE_Weather_reporter____USERT.response.txt
new file mode 100644
index 0000000..451d745
--- /dev/null
+++ b/test/CachedResponses/3a876d98b76a4111_ROLE_Weather_reporter____USERT.response.txt
@@ -0,0 +1 @@
+OK:[I hope you're staying cool out there!]
\ No newline at end of file
diff --git a/test/CachedResponses/4c76eb1253df6168_USERDATA_AI_Artificial_Intelli.response.txt b/test/CachedResponses/4c76eb1253df6168_USERDATA_AI_Artificial_Intelli.response.txt
new file mode 100644
index 0000000..bb9ec92
--- /dev/null
+++ b/test/CachedResponses/4c76eb1253df6168_USERDATA_AI_Artificial_Intelli.response.txt
@@ -0,0 +1,9 @@
+{
+ "movie.title": "AI: Artificial Intelligence",
+ "movie.release_year": 2001,
+ "movie.description": "A sci-fi movie about a robot boy who desperately wants to be human. The tragedy at the heart of the film is star Haley Joel Osment’s immortality. He was designed as a child, but outlives everyone he ever loves.",
+ "movie_genre": "Sci-fi",
+ "movie.for_kids": false,
+ "movie.can_stream": true,
+ "movie.starring": "Haley Joel Osment"
+}
\ No newline at end of file
diff --git a/test/CachedResponses/4cbda80b74b3c063_USERDATA_Rahul_Mandal.response.txt b/test/CachedResponses/4cbda80b74b3c063_USERDATA_Rahul_Mandal.response.txt
new file mode 100644
index 0000000..4922304
--- /dev/null
+++ b/test/CachedResponses/4cbda80b74b3c063_USERDATA_Rahul_Mandal.response.txt
@@ -0,0 +1,5 @@
+{
+ "firstname": "Rahul",
+ "lastname": "Mandal",
+ "unrelated": null
+}
\ No newline at end of file
diff --git a/test/CachedResponses/64f8c9e256903113_USERDATA_Rahul_Mandal.response.txt b/test/CachedResponses/64f8c9e256903113_USERDATA_Rahul_Mandal.response.txt
new file mode 100644
index 0000000..4922304
--- /dev/null
+++ b/test/CachedResponses/64f8c9e256903113_USERDATA_Rahul_Mandal.response.txt
@@ -0,0 +1,5 @@
+{
+ "firstname": "Rahul",
+ "lastname": "Mandal",
+ "unrelated": null
+}
\ No newline at end of file
diff --git a/test/CachedResponses/79ee341c9c0df80e_USERDATA_AI_Artificial_Intelli.response.txt b/test/CachedResponses/79ee341c9c0df80e_USERDATA_AI_Artificial_Intelli.response.txt
new file mode 100644
index 0000000..bb9ec92
--- /dev/null
+++ b/test/CachedResponses/79ee341c9c0df80e_USERDATA_AI_Artificial_Intelli.response.txt
@@ -0,0 +1,9 @@
+{
+ "movie.title": "AI: Artificial Intelligence",
+ "movie.release_year": 2001,
+ "movie.description": "A sci-fi movie about a robot boy who desperately wants to be human. The tragedy at the heart of the film is star Haley Joel Osment’s immortality. He was designed as a child, but outlives everyone he ever loves.",
+ "movie_genre": "Sci-fi",
+ "movie.for_kids": false,
+ "movie.can_stream": true,
+ "movie.starring": "Haley Joel Osment"
+}
\ No newline at end of file
diff --git a/test/CachedResponses/c3728e29a1014e5a_ROLE_Weather_reporter____USERT.response.txt b/test/CachedResponses/c3728e29a1014e5a_ROLE_Weather_reporter____USERT.response.txt
new file mode 100644
index 0000000..77126b4
--- /dev/null
+++ b/test/CachedResponses/c3728e29a1014e5a_ROLE_Weather_reporter____USERT.response.txt
@@ -0,0 +1 @@
+OK:[ the hottest day of the year! That's positively sweltering!]
\ No newline at end of file
diff --git a/test/testassets/TestBlazorApp/Program.cs b/test/testassets/TestBlazorApp/Program.cs
index 6e3a9db..f165ea9 100644
--- a/test/testassets/TestBlazorApp/Program.cs
+++ b/test/testassets/TestBlazorApp/Program.cs
@@ -2,8 +2,9 @@
// The .NET Foundation licenses this file to you under the MIT license.
using E2ETests;
+using Microsoft.Extensions.AI;
+using OpenAI;
using SmartComponents.Inference;
-using SmartComponents.Inference.OpenAI;
using SmartComponents.LocalEmbeddings;
using TestBlazorApp.Components;
@@ -22,9 +23,19 @@ private static void Main(string[] args)
.AddInteractiveWebAssemblyComponents();
builder.Services.AddScoped();
builder.Services.AddSmartComponents()
- .WithInferenceBackend()
.WithAntiforgeryValidation(); // This doesn't benefit most apps, but we'll validate it works in E2E tests
+ // Note: the StartupKey value is just there so the app will start up.
+ builder.Services.AddSingleton(new OpenAIClient(builder.Configuration["AI:OpenAI:Key"] ?? "StartupKey"));
+ builder.Services.AddChatClient(services =>
+ {
+ var chatClient = new SmartComponentsChatClient(services.GetRequiredService()
+ .AsChatClient(builder.Configuration["AI:OpenAI:Chat:ModelId"] ?? "gpt-4o-mini"));
+ return chatClient;
+ });
+ builder.Services.AddEmbeddingGenerator(services =>
+ services.GetRequiredService().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:Embedding:ModelId"] ?? "text-embedding-3-small"));
+
var app = builder.Build();
// Show we can work with pathbase by enforcing its use
diff --git a/test/testassets/TestBlazorApp/SmartPasteInferenceForTests.cs b/test/testassets/TestBlazorApp/SmartPasteInferenceForTests.cs
index 4347038..9c2e451 100644
--- a/test/testassets/TestBlazorApp/SmartPasteInferenceForTests.cs
+++ b/test/testassets/TestBlazorApp/SmartPasteInferenceForTests.cs
@@ -1,8 +1,9 @@
-// Licensed to the .NET Foundation under one or more agreements.
+// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Globalization;
using System.Text.RegularExpressions;
+using Microsoft.Extensions.AI;
using SmartComponents.Inference;
using SmartComponents.StaticAssets.Inference;
diff --git a/test/testassets/TestBlazorApp/TestBlazorApp.csproj b/test/testassets/TestBlazorApp/TestBlazorApp.csproj
index 18aef1a..ad3adef 100644
--- a/test/testassets/TestBlazorApp/TestBlazorApp.csproj
+++ b/test/testassets/TestBlazorApp/TestBlazorApp.csproj
@@ -8,10 +8,11 @@
-
+
+
diff --git a/test/testassets/TestBlazorServerNet6App/Program.cs b/test/testassets/TestBlazorServerNet6App/Program.cs
index afb5f10..e93e4a5 100644
--- a/test/testassets/TestBlazorServerNet6App/Program.cs
+++ b/test/testassets/TestBlazorServerNet6App/Program.cs
@@ -2,8 +2,9 @@
// The .NET Foundation licenses this file to you under the MIT license.
using E2ETests;
+using Microsoft.Extensions.AI;
+using OpenAI;
using SmartComponents.Inference;
-using SmartComponents.Inference.OpenAI;
namespace TestBlazorServerNet6App;
@@ -17,9 +18,20 @@ private static void Main(string[] args)
builder.Services.AddScoped();
builder.Services.AddRazorPages();
builder.Services.AddServerSideBlazor();
- builder.Services.AddSmartComponents().WithInferenceBackend();
+ builder.Services.AddSmartComponents();
builder.Configuration.AddRepoSharedConfig();
+ // Note: the StartupKey value is just there so the app will start up.
+ builder.Services.AddSingleton(new OpenAIClient(builder.Configuration["AI:OpenAI:Key"] ?? "StartupKey"));
+ builder.Services.AddChatClient(services =>
+ {
+ var chatClient = new SmartComponentsChatClient(services.GetRequiredService()
+ .AsChatClient(builder.Configuration["AI:OpenAI:Chat:ModelId"] ?? "gpt-4o-mini"));
+ return chatClient;
+ });
+ builder.Services.AddEmbeddingGenerator(services =>
+ services.GetRequiredService().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:Embedding:ModelId"] ?? "text-embedding-3-small"));
+
var app = builder.Build();
// Show we can work with pathbase by enforcing its use
diff --git a/test/testassets/TestBlazorServerNet6App/TestBlazorServerNet6App.csproj b/test/testassets/TestBlazorServerNet6App/TestBlazorServerNet6App.csproj
index 58ef720..53ca60e 100644
--- a/test/testassets/TestBlazorServerNet6App/TestBlazorServerNet6App.csproj
+++ b/test/testassets/TestBlazorServerNet6App/TestBlazorServerNet6App.csproj
@@ -8,10 +8,14 @@
-
+
+
+
+
+
diff --git a/test/testassets/TestMvcApp/Program.cs b/test/testassets/TestMvcApp/Program.cs
index 97c3447..9450940 100644
--- a/test/testassets/TestMvcApp/Program.cs
+++ b/test/testassets/TestMvcApp/Program.cs
@@ -2,8 +2,9 @@
// The .NET Foundation licenses this file to you under the MIT license.
using E2ETests;
+using Microsoft.Extensions.AI;
+using OpenAI;
using SmartComponents.Inference;
-using SmartComponents.Inference.OpenAI;
using SmartComponents.LocalEmbeddings;
namespace TestMvcApp;
@@ -19,9 +20,19 @@ private static void Main(string[] args)
builder.Services.AddControllersWithViews();
builder.Services.AddScoped();
builder.Services.AddSmartComponents()
- .WithInferenceBackend()
.WithAntiforgeryValidation(); // This doesn't benefit most apps, but we'll validate it works in E2E tests
+ // Note: the StartupKey value is just there so the app will start up.
+ builder.Services.AddSingleton(new OpenAIClient(builder.Configuration["AI:OpenAI:Key"] ?? "StartupKey"));
+ builder.Services.AddChatClient(services =>
+ {
+ var chatClient = new SmartComponentsChatClient(services.GetRequiredService()
+ .AsChatClient(builder.Configuration["AI:OpenAI:Chat:ModelId"] ?? "gpt-4o-mini"));
+ return chatClient;
+ });
+ builder.Services.AddEmbeddingGenerator(services =>
+ services.GetRequiredService().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:Embedding:ModelId"] ?? "text-embedding-3-small"));
+
var app = builder.Build();
// Show we can work with pathbase by enforcing its use
diff --git a/test/testassets/TestMvcApp/TestMvcApp.csproj b/test/testassets/TestMvcApp/TestMvcApp.csproj
index 4248717..5a0a579 100644
--- a/test/testassets/TestMvcApp/TestMvcApp.csproj
+++ b/test/testassets/TestMvcApp/TestMvcApp.csproj
@@ -9,7 +9,6 @@
-
@@ -19,6 +18,11 @@
+
+
+
+
+