-
Notifications
You must be signed in to change notification settings - Fork 3.5k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
.Net Simplify configuration by ServiceId on Multi Model Scenarios. (#…
…6416) ### Motivation and Context Setting multiple execution settings is not simple and demands creating a dictionary on the caller side to set directly into the `ExecutionSettings` setter property. This change adds a `ServiceId` property to the execution settings which will be used during the initialization and deserialization to set the expected `Key` in the dictionary as well as the setting for filtering and executing a service specific function invocation. With this change were also added new constructors for `PromptTemplateConfig`, `KernelArguments` accepting multiple `PromptExecutionSettings` as well as added multiple for `Kernel.CreateFunctionFromPrompt` and `KernelFunctionFromPrompt.Create` ### ServiceId Settings Before: ```csharp KernelArguments arguments = []; arguments.ExecutionSettings = new Dictionary<string, PromptExecutionSettings>() { { serviceId, new PromptExecutionSettings() } }; var result = await kernel.InvokePromptAsync(prompt, arguments); ``` After: ```csharp var result = await kernel.InvokePromptAsync(prompt, new(new PromptExecutionSettings { ServiceId = serviceId })); ``` ### ModelIds Settings Before: ```csharp string[] modelIds = ["model1", "model2", ...]; var modelSettings = new Dictionary<string, PromptExecutionSettings>(); foreach (var modelId in modelIds) { modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId }); } var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings }; var function = kernel.CreateFunctionFromPrompt(promptConfig); ``` After: ```csharp string[] modelIds = ["model1", "model2", ...]; var function = kernel.CreateFunctionFromPrompt(prompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })); ``` The same can be done for ServiceId settings: ```csharp string[] serviceIds = ["service1", "service2"... ]; var function = kernel.CreateFunctionFromPrompt(prompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })); ``` --------- Co-authored-by: Mark Wallace <[email protected]>
- Loading branch information
1 parent
8d7845d
commit f9a53a1
Showing
13 changed files
with
734 additions
and
58 deletions.
There are no files selected for viewing
197 changes: 150 additions & 47 deletions
197
dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,82 +1,185 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using Microsoft.SemanticKernel; | ||
using xRetry; | ||
|
||
namespace ChatCompletion; | ||
|
||
public class Connectors_WithMultipleLLMs(ITestOutputHelper output) : BaseTest(output) | ||
{ | ||
/// <summary> | ||
/// Show how to run a prompt function and specify a specific service to use. | ||
/// </summary> | ||
[RetryFact(typeof(HttpOperationException))] | ||
public async Task RunAsync() | ||
private const string ChatPrompt = "Hello AI, what can you do for me?"; | ||
|
||
private static Kernel BuildKernel() | ||
{ | ||
Kernel kernel = Kernel.CreateBuilder() | ||
.AddAzureOpenAIChatCompletion( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
serviceId: "AzureOpenAIChat", | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId) | ||
.AddOpenAIChatCompletion( | ||
modelId: TestConfiguration.OpenAI.ChatModelId, | ||
apiKey: TestConfiguration.OpenAI.ApiKey, | ||
serviceId: "OpenAIChat") | ||
.Build(); | ||
|
||
await RunByServiceIdAsync(kernel, "AzureOpenAIChat"); | ||
await RunByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); | ||
await RunByFirstModelIdAsync(kernel, "gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId); | ||
return Kernel.CreateBuilder() | ||
.AddAzureOpenAIChatCompletion( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
serviceId: "AzureOpenAIChat", | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId) | ||
.AddOpenAIChatCompletion( | ||
modelId: TestConfiguration.OpenAI.ChatModelId, | ||
apiKey: TestConfiguration.OpenAI.ApiKey, | ||
serviceId: "OpenAIChat") | ||
.Build(); | ||
} | ||
|
||
private async Task RunByServiceIdAsync(Kernel kernel, string serviceId) | ||
/// <summary> | ||
/// Shows how to invoke a prompt and specify the service id of the preferred AI service. When the prompt is executed the AI Service with the matching service id will be selected. | ||
/// </summary> | ||
/// <param name="serviceId">Service Id</param> | ||
[Theory] | ||
[InlineData("AzureOpenAIChat")] | ||
public async Task InvokePromptByServiceIdAsync(string serviceId) | ||
{ | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Service Id: {serviceId} ========"); | ||
|
||
var prompt = "Hello AI, what can you do for me?"; | ||
var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId })); | ||
|
||
KernelArguments arguments = []; | ||
arguments.ExecutionSettings = new Dictionary<string, PromptExecutionSettings>() | ||
{ | ||
{ serviceId, new PromptExecutionSettings() } | ||
}; | ||
var result = await kernel.InvokePromptAsync(prompt, arguments); | ||
Console.WriteLine(result.GetValue<string>()); | ||
} | ||
|
||
private async Task RunByModelIdAsync(Kernel kernel, string modelId) | ||
/// <summary> | ||
/// Shows how to invoke a prompt and specify the model id of the preferred AI service. When the prompt is executed the AI Service with the matching model id will be selected. | ||
/// </summary> | ||
[Fact] | ||
private async Task InvokePromptByModelIdAsync() | ||
{ | ||
var modelId = TestConfiguration.OpenAI.ChatModelId; | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Model Id: {modelId} ========"); | ||
|
||
var prompt = "Hello AI, what can you do for me?"; | ||
var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId })); | ||
|
||
var result = await kernel.InvokePromptAsync( | ||
prompt, | ||
new(new PromptExecutionSettings() | ||
{ | ||
ModelId = modelId | ||
})); | ||
Console.WriteLine(result.GetValue<string>()); | ||
} | ||
|
||
private async Task RunByFirstModelIdAsync(Kernel kernel, params string[] modelIds) | ||
/// <summary> | ||
/// Shows how to invoke a prompt and specify the service ids of the preferred AI services. | ||
/// When the prompt is executed the AI Service will be selected based on the order of the provided service ids. | ||
/// </summary> | ||
[Fact] | ||
public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync() | ||
{ | ||
string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"]; | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); | ||
|
||
var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }))); | ||
|
||
Console.WriteLine(result.GetValue<string>()); | ||
} | ||
|
||
/// <summary> | ||
/// Shows how to invoke a prompt and specify the model ids of the preferred AI services. | ||
/// When the prompt is executed the AI Service will be selected based on the order of the provided model ids. | ||
/// </summary> | ||
[Fact] | ||
public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync() | ||
{ | ||
string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]; | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); | ||
|
||
var prompt = "Hello AI, what can you do for me?"; | ||
var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }))); | ||
|
||
var modelSettings = new Dictionary<string, PromptExecutionSettings>(); | ||
foreach (var modelId in modelIds) | ||
{ | ||
modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId }); | ||
} | ||
var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings }; | ||
Console.WriteLine(result.GetValue<string>()); | ||
} | ||
|
||
/// <summary> | ||
/// Shows how to create a KernelFunction from a prompt and specify the service ids of the preferred AI services. | ||
/// When the function is invoked the AI Service will be selected based on the order of the provided service ids. | ||
/// </summary> | ||
[Fact] | ||
public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync() | ||
{ | ||
string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"]; | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); | ||
|
||
var function = kernel.CreateFunctionFromPrompt(ChatPrompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })); | ||
var result = await kernel.InvokeAsync(function); | ||
|
||
var function = kernel.CreateFunctionFromPrompt(promptConfig); | ||
Console.WriteLine(result.GetValue<string>()); | ||
} | ||
|
||
/// <summary> | ||
/// Shows how to create a KernelFunction from a prompt and specify the model ids of the preferred AI services. | ||
/// When the function is invoked the AI Service will be selected based on the order of the provided model ids. | ||
/// </summary> | ||
[Fact] | ||
public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync() | ||
{ | ||
string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]; | ||
var kernel = BuildKernel(); | ||
|
||
Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); | ||
|
||
var function = kernel.CreateFunctionFromPrompt(ChatPrompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })); | ||
var result = await kernel.InvokeAsync(function); | ||
|
||
Console.WriteLine(result.GetValue<string>()); | ||
} | ||
|
||
/// <summary> | ||
/// Shows how to invoke a KernelFunction and specify the model id of the AI Service the function will use. | ||
/// </summary> | ||
[Fact] | ||
public async Task InvokePreconfiguredFunctionByModelIdAsync() | ||
{ | ||
var modelId = TestConfiguration.OpenAI.ChatModelId; | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Model Id: {modelId} ========"); | ||
|
||
var function = kernel.CreateFunctionFromPrompt(ChatPrompt); | ||
var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ModelId = modelId })); | ||
|
||
Console.WriteLine(result.GetValue<string>()); | ||
} | ||
|
||
/// <summary> | ||
/// Shows how to invoke a KernelFunction and specify the service id of the AI Service the function will use. | ||
/// </summary> | ||
/// <param name="serviceId">Service Id</param> | ||
[Theory] | ||
[InlineData("AzureOpenAIChat")] | ||
public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId) | ||
{ | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Service Id: {serviceId} ========"); | ||
|
||
var function = kernel.CreateFunctionFromPrompt(ChatPrompt); | ||
var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ServiceId = serviceId })); | ||
|
||
Console.WriteLine(result.GetValue<string>()); | ||
} | ||
|
||
/// <summary> | ||
/// Shows when specifying a non-existent ServiceId the kernel throws an exception. | ||
/// </summary> | ||
/// <param name="serviceId">Service Id</param> | ||
[Theory] | ||
[InlineData("NotFound")] | ||
public async Task InvokePromptByNonExistingServiceIdThrowsExceptionAsync(string serviceId) | ||
{ | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Service Id: {serviceId} ========"); | ||
|
||
await Assert.ThrowsAsync<KernelException>(async () => await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId }))); | ||
} | ||
|
||
/// <summary> | ||
/// Shows how in the execution settings when no model id is found it falls back to the default service. | ||
/// </summary> | ||
/// <param name="modelId">Model Id</param> | ||
[Theory] | ||
[InlineData("NotFound")] | ||
public async Task InvokePromptByNonExistingModelIdUsesDefaultServiceAsync(string modelId) | ||
{ | ||
var kernel = BuildKernel(); | ||
Console.WriteLine($"======== Model Id: {modelId} ========"); | ||
|
||
await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ModelId = modelId })); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.