From 1022444803a67561032abcec5e0da8d3f82a17de Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Mon, 27 May 2024 16:37:39 +0100 Subject: [PATCH 01/15] Service Id on Execution Settings --- .../Orchestration.Flow/FlowOrchestrator.cs | 2 +- .../AI/PromptExecutionSettings.cs | 17 +++ .../Functions/KernelArguments.cs | 54 +++++++- .../PromptTemplate/PromptTemplateConfig.cs | 19 +++ .../Functions/KernelFunctionFactory.cs | 40 +++++- .../SemanticKernel.Core/KernelExtensions.cs | 36 +++++ .../KernelFunctionFromPromptTests.cs | 54 ++++++++ .../PromptTemplateConfigTests.cs | 130 ++++++++++++++++++ 8 files changed, 342 insertions(+), 10 deletions(-) diff --git a/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs b/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs index d86c1681b96e..67abae8ef61c 100644 --- a/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs +++ b/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs @@ -73,6 +73,6 @@ public async Task ExecuteFlowAsync( } var executor = new FlowExecutor(this._kernelBuilder, this._flowStatusProvider, this._globalPluginCollection, this._config); - return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments(null)).ConfigureAwait(false); + return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments()).ConfigureAwait(false); } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index bce11b356e0f..3a7966ae271a 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -27,6 +27,22 @@ public class PromptExecutionSettings /// public static string DefaultServiceId => "default"; + /// + /// Service identifier. + /// This identifies the service these settings are configured for e.g., openai, ollama, huggingface, etc. + /// + [JsonPropertyName("service_id")] + public string? ServiceId + { + get => this._serviceId; + + set + { + this.ThrowIfFrozen(); + this._serviceId = value; + } + } + /// /// Model identifier. /// This identifies the AI model these settings are configured for e.g., gpt-4, gpt-3.5-turbo @@ -113,6 +129,7 @@ protected void ThrowIfFrozen() private string? _modelId; private IDictionary? _extensionData; + private string? _serviceId; #endregion } diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index d7776f83f24a..c51aa988169a 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -21,6 +21,7 @@ public sealed class KernelArguments : IDictionary, IReadOnlyDic { /// Dictionary of name/values for all the arguments in the instance. private readonly Dictionary _arguments; + private IReadOnlyDictionary? _executionSettings; /// /// Initializes a new instance of the class with the specified AI execution settings. @@ -36,12 +37,32 @@ public KernelArguments() /// /// The prompt execution settings. public KernelArguments(PromptExecutionSettings? executionSettings) + : this(executionSettings is null ? null : [executionSettings]) { - this._arguments = new(StringComparer.OrdinalIgnoreCase); + } + /// + /// Initializes a new instance of the class with the specified AI execution settings. + /// + /// The prompt execution settings. + public KernelArguments(IReadOnlyCollection? executionSettings) + { + this._arguments = new(StringComparer.OrdinalIgnoreCase); if (executionSettings is not null) { - this.ExecutionSettings = new Dictionary() { { PromptExecutionSettings.DefaultServiceId, executionSettings } }; + var newExecutionSettings = new Dictionary(executionSettings.Count); + foreach (var settings in executionSettings) + { + var targetServiceId = settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId; + if (newExecutionSettings.ContainsKey(targetServiceId)) + { + throw new ArgumentException("When adding multiple execution settings, the service id needs to be provided and be unique for each."); + } + + newExecutionSettings[targetServiceId] = settings; + } + + this.ExecutionSettings = newExecutionSettings; } } @@ -65,7 +86,34 @@ public KernelArguments(IDictionary source, Dictionary /// Gets or sets the prompt execution settings. /// - public IReadOnlyDictionary? ExecutionSettings { get; set; } + public IReadOnlyDictionary? ExecutionSettings + { + get => this._executionSettings; + set + { + this._executionSettings = value; + + if (this._executionSettings is null || + this._executionSettings.Count == 0) + { + return; + } + + foreach (var kv in this._executionSettings) + { + // Ensures that if a service id is not specified and is not default, it is set to the current service id. + if (kv.Key != kv.Value.ServiceId) + { + if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId)) + { + throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); + } + + kv.Value.ServiceId = kv.Key; + } + } + } + } /// /// Gets the number of arguments contained in the . diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs index 1a55cbbff837..fa628b1ce4f8 100644 --- a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs @@ -187,6 +187,25 @@ public Dictionary ExecutionSettings { Verify.NotNull(value); this._executionSettings = value; + + if (value.Count == 0) + { + return; + } + + foreach (var kv in value) + { + // Ensures that if a service id is not specified and is not default, it is set to the current service id. + if (kv.Key != kv.Value.ServiceId) + { + if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId)) + { + throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); + } + + kv.Value.ServiceId = kv.Key; + } + } } } diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs index 25d384d51351..717880b01c30 100644 --- a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; +using System.Linq; using System.Reflection; using Microsoft.Extensions.Logging; @@ -107,6 +108,37 @@ public static KernelFunction CreateFromPrompt( string? templateFormat = null, IPromptTemplateFactory? promptTemplateFactory = null, ILoggerFactory? loggerFactory = null) => + KernelFunctionFromPrompt.Create( + promptTemplate, + CreateSettingsDictionary(executionSettings is null ? null : [executionSettings]), + functionName, + description, + templateFormat, + promptTemplateFactory, + loggerFactory); + + /// + /// Creates a instance for a prompt specified via a prompt template. + /// + /// Prompt template for the function. + /// Default execution settings to use when invoking this prompt function. + /// The name to use for the function. If null, it will default to a randomly generated name. + /// The description to use for the function. + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The to use for logging. If null, no logging will be performed. + /// The created for invoking the prompt. + public static KernelFunction CreateFromPrompt( + string promptTemplate, + IReadOnlyCollection? executionSettings, + string? functionName = null, + string? description = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null, + ILoggerFactory? loggerFactory = null) => KernelFunctionFromPrompt.Create(promptTemplate, CreateSettingsDictionary(executionSettings), functionName, description, templateFormat, promptTemplateFactory, loggerFactory); /// @@ -141,10 +173,6 @@ public static KernelFunction CreateFromPrompt( /// Wraps the specified settings into a dictionary with the default service ID as the key. /// [return: NotNullIfNotNull(nameof(settings))] - private static Dictionary? CreateSettingsDictionary(PromptExecutionSettings? settings) => - settings is null ? null : - new Dictionary(1) - { - { PromptExecutionSettings.DefaultServiceId, settings }, - }; + private static Dictionary? CreateSettingsDictionary(IReadOnlyCollection? settings) => + settings?.ToDictionary(s => s.ServiceId ?? PromptExecutionSettings.DefaultServiceId, s => s); } diff --git a/dotnet/src/SemanticKernel.Core/KernelExtensions.cs b/dotnet/src/SemanticKernel.Core/KernelExtensions.cs index a05340a64775..1d8dfec045a3 100644 --- a/dotnet/src/SemanticKernel.Core/KernelExtensions.cs +++ b/dotnet/src/SemanticKernel.Core/KernelExtensions.cs @@ -109,6 +109,42 @@ public static KernelFunction CreateFunctionFromPrompt( kernel.LoggerFactory); } + /// + /// Creates a instance for a prompt specified via a prompt template. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Prompt template for the function. + /// List of execution settings to use when invoking this prompt function. + /// The name to use for the function. If null, it will default to a randomly generated name. + /// The description to use for the function. + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The created for invoking the prompt. + public static KernelFunction CreateFunctionFromPrompt( + this Kernel kernel, + string promptTemplate, + IReadOnlyCollection? executionSettings, + string? functionName = null, + string? description = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null) + { + Verify.NotNull(kernel); + Verify.NotNull(promptTemplate); + + return KernelFunctionFactory.CreateFromPrompt( + promptTemplate, + executionSettings, + functionName, + description, + templateFormat, + promptTemplateFactory, + kernel.LoggerFactory); + } + /// /// Creates a instance for a prompt specified via a prompt template configuration. /// diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs index ae9838e77414..eb1a71d7d67b 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs @@ -116,6 +116,60 @@ public async Task ItUsesServiceIdWhenProvidedAsync() mockTextGeneration2.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never()); } + [Fact] + public async Task ItUsesServiceIdWhenProvidedInMethodAsync() + { + // Arrange + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); + var fakeTextContent = new TextContent("llmResult"); + + mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]); + mockTextGeneration2.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + Kernel kernel = builder.Build(); + + var func = kernel.CreateFunctionFromPrompt("my prompt", [new PromptExecutionSettings { ServiceId = "service2" }]); + + // Act + await kernel.InvokeAsync(func); + + // Assert + mockTextGeneration1.Verify(a => a.GetTextContentsAsync("my prompt", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never()); + mockTextGeneration2.Verify(a => a.GetTextContentsAsync("my prompt", It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task ItUsesChatServiceIdWhenProvidedInMethodAsync() + { + // Arrange + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); + var fakeTextContent = new TextContent("llmResult"); + var fakeChatContent = new ChatMessageContent(AuthorRole.User, "content"); + + mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]); + mockTextGeneration2.Setup(c => c.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeChatContent]); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + builder.Services.AddKeyedSingleton("service3", mockTextGeneration1.Object); + Kernel kernel = builder.Build(); + + var func = kernel.CreateFunctionFromPrompt("my prompt", [new PromptExecutionSettings { ServiceId = "service2" }]); + + // Act + await kernel.InvokeAsync(func); + + // Assert + mockTextGeneration1.Verify(a => a.GetTextContentsAsync("my prompt", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never()); + mockTextGeneration2.Verify(a => a.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); + } + [Fact] public async Task ItFailsIfInvalidServiceIdIsProvidedAsync() { diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs index 3285ed6b819f..c4b160572be5 100644 --- a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs @@ -105,6 +105,136 @@ public void DeserializingExpectMultipleModels() Assert.Equal(2, promptTemplateConfig.ExecutionSettings.Count); } + [Fact] + public void DeserializingAutoSetServiceIdWhenNotProvided() + { + // Arrange + string configPayload = """ + { + "schema": 1, + "description": "", + "execution_settings": + { + "service1": { + "model_id": "gpt-4", + "max_tokens": 200, + "temperature": 0.2, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": + [ + "Human", + "AI" + ] + }, + "service2": { + "model_id": "gpt-3.5_turbo", + "max_tokens": 256, + "temperature": 0.3, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": + [ + "Human", + "AI" + ] + } + } + } + """; + + // Act + var promptTemplateConfig = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.Equal("service1", promptTemplateConfig.ExecutionSettings["service1"].ServiceId); + Assert.Equal("service2", promptTemplateConfig.ExecutionSettings["service2"].ServiceId); + } + + [Fact] + public void DeserializingAutoSetServiceIdWhenDefault() + { + // Arrange + string configPayload = """ + { + "schema": 1, + "description": "", + "execution_settings": + { + "default": { + "model_id": "gpt-4", + "max_tokens": 200, + "temperature": 0.2, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": + [ + "Human", + "AI" + ] + } + } + } + """; + + // Act + var promptTemplateConfig = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.NotNull(promptTemplateConfig.DefaultExecutionSettings); + Assert.Equal(PromptExecutionSettings.DefaultServiceId, promptTemplateConfig.DefaultExecutionSettings?.ServiceId); + } + + [Fact] + public void DeserializingServiceIdUnmatchingIndexShouldThrow() + { + // Arrange + string configPayload = """ + { + "schema": 1, + "description": "", + "execution_settings": + { + "service1": { + "model_id": "gpt-4", + "max_tokens": 200, + "temperature": 0.2, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": + [ + "Human", + "AI" + ] + }, + "service2": { + "service_id": "service3", + "model_id": "gpt-3.5_turbo", + "max_tokens": 256, + "temperature": 0.3, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": + [ + "Human", + "AI" + ] + } + } + } + """; + + // Act & Assert + var exception = Assert.Throws(() => JsonSerializer.Deserialize(configPayload)); + } + [Fact] public void DeserializingExpectCompletion() { From 8f4fdd182bb9682cd49cc78bffdd9e5d771d52e0 Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Mon, 27 May 2024 21:57:21 +0100 Subject: [PATCH 02/15] Unit Tests Added, Sample Added --- .../Connectors_WithMultipleLLMs.cs | 35 ++++--- .../AI/PromptExecutionSettings.cs | 1 + .../PromptTemplate/PromptTemplateConfig.cs | 10 +- .../Functions/KernelFunctionFactory.cs | 4 +- .../SemanticKernel.Core/KernelExtensions.cs | 2 +- .../AI/PromptExecutionSettingsTests.cs | 32 ++++++ .../Functions/KernelArgumentsTests.cs | 39 ++++++++ .../Functions/KernelExtensionsTests.cs | 37 +++++++ .../PromptTemplateConfigTests.cs | 97 +++++++++++++++++++ 9 files changed, 236 insertions(+), 21 deletions(-) diff --git a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs index 592146da6799..e2751590033b 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs @@ -28,7 +28,8 @@ public async Task RunAsync() await RunByServiceIdAsync(kernel, "AzureOpenAIChat"); await RunByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); - await RunByFirstModelIdAsync(kernel, "gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId); + await RunByFirstModelIdAsync(kernel, ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]); + await RunByFirstServiceIdAsync(kernel, ["NotFound", "AzureOpenAIChat", "OpenAIChat"]); } private async Task RunByServiceIdAsync(Kernel kernel, string serviceId) @@ -37,12 +38,21 @@ private async Task RunByServiceIdAsync(Kernel kernel, string serviceId) var prompt = "Hello AI, what can you do for me?"; - KernelArguments arguments = []; - arguments.ExecutionSettings = new Dictionary() - { - { serviceId, new PromptExecutionSettings() } - }; - var result = await kernel.InvokePromptAsync(prompt, arguments); + var result = await kernel.InvokePromptAsync(prompt, new(new PromptExecutionSettings { ServiceId = serviceId })); + + Console.WriteLine(result.GetValue()); + } + + private async Task RunByFirstServiceIdAsync(Kernel kernel, string[] serviceIds) + { + Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); + + var prompt = "Hello AI, what can you do for me?"; + + var function = kernel.CreateFunctionFromPrompt(prompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })); + + var result = await kernel.InvokeAsync(function); + Console.WriteLine(result.GetValue()); } @@ -61,20 +71,13 @@ private async Task RunByModelIdAsync(Kernel kernel, string modelId) Console.WriteLine(result.GetValue()); } - private async Task RunByFirstModelIdAsync(Kernel kernel, params string[] modelIds) + private async Task RunByFirstModelIdAsync(Kernel kernel, string[] modelIds) { Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); var prompt = "Hello AI, what can you do for me?"; - var modelSettings = new Dictionary(); - foreach (var modelId in modelIds) - { - modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId }); - } - var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings }; - - var function = kernel.CreateFunctionFromPrompt(promptConfig); + var function = kernel.CreateFunctionFromPrompt(prompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })); var result = await kernel.InvokeAsync(function); Console.WriteLine(result.GetValue()); diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index 3a7966ae271a..25555c0824f2 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -109,6 +109,7 @@ public virtual PromptExecutionSettings Clone() return new() { ModelId = this.ModelId, + ServiceId = this.ServiceId, ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null }; } diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs index fa628b1ce4f8..2a3431dfed0c 100644 --- a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs @@ -243,13 +243,19 @@ public void AddExecutionSettings(PromptExecutionSettings settings, string? servi { Verify.NotNull(settings); - var key = serviceId ?? PromptExecutionSettings.DefaultServiceId; + var key = serviceId ?? settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId; + + // To avoid any reference changes to the settings object, clone it before changing service id. + var clonedSettings = settings.Clone(); + + // Overwrite the service id if provided in the method. + clonedSettings.ServiceId = key; if (this.ExecutionSettings.ContainsKey(key)) { throw new ArgumentException($"Execution settings for service id '{key}' already exists.", nameof(serviceId)); } - this.ExecutionSettings[key] = settings; + this.ExecutionSettings[key] = clonedSettings; } /// diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs index 717880b01c30..f6f0a805f4a6 100644 --- a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs @@ -133,7 +133,7 @@ public static KernelFunction CreateFromPrompt( /// The created for invoking the prompt. public static KernelFunction CreateFromPrompt( string promptTemplate, - IReadOnlyCollection? executionSettings, + IEnumerable? executionSettings, string? functionName = null, string? description = null, string? templateFormat = null, @@ -173,6 +173,6 @@ public static KernelFunction CreateFromPrompt( /// Wraps the specified settings into a dictionary with the default service ID as the key. /// [return: NotNullIfNotNull(nameof(settings))] - private static Dictionary? CreateSettingsDictionary(IReadOnlyCollection? settings) => + private static Dictionary? CreateSettingsDictionary(IEnumerable? settings) => settings?.ToDictionary(s => s.ServiceId ?? PromptExecutionSettings.DefaultServiceId, s => s); } diff --git a/dotnet/src/SemanticKernel.Core/KernelExtensions.cs b/dotnet/src/SemanticKernel.Core/KernelExtensions.cs index 1d8dfec045a3..6a96395cedea 100644 --- a/dotnet/src/SemanticKernel.Core/KernelExtensions.cs +++ b/dotnet/src/SemanticKernel.Core/KernelExtensions.cs @@ -126,7 +126,7 @@ public static KernelFunction CreateFunctionFromPrompt( public static KernelFunction CreateFunctionFromPrompt( this Kernel kernel, string promptTemplate, - IReadOnlyCollection? executionSettings, + IEnumerable? executionSettings, string? functionName = null, string? description = null, string? templateFormat = null, diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs index 83257b701112..dd822a091175 100644 --- a/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs @@ -14,6 +14,8 @@ public void PromptExecutionSettingsCloneWorksAsExpected() // Arrange string configPayload = """ { + "model_id": "gpt-3", + "service_id": "service-1", "max_tokens": 60, "temperature": 0.5, "top_p": 0.0, @@ -30,6 +32,36 @@ public void PromptExecutionSettingsCloneWorksAsExpected() Assert.NotNull(clone); Assert.Equal(executionSettings.ModelId, clone.ModelId); Assert.Equivalent(executionSettings.ExtensionData, clone.ExtensionData); + Assert.Equal(executionSettings.ServiceId, clone.ServiceId); + } + + [Fact] + public void PromptExecutionSettingsSerializationWorksAsExpected() + { + // Arrange + string configPayload = """ + { + "model_id": "gpt-3", + "service_id": "service-1", + "max_tokens": 60, + "temperature": 0.5, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0 + } + """; + + // Act + var executionSettings = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal("gpt-3", executionSettings.ModelId); + Assert.Equal("service-1", executionSettings.ServiceId); + Assert.Equal(60, ((JsonElement)executionSettings.ExtensionData!["max_tokens"]).GetInt32()); + Assert.Equal(0.5, ((JsonElement)executionSettings.ExtensionData!["temperature"]).GetDouble()); + Assert.Equal(0.0, ((JsonElement)executionSettings.ExtensionData!["top_p"]).GetDouble()); + Assert.Equal(0.0, ((JsonElement)executionSettings.ExtensionData!["presence_penalty"]).GetDouble()); } [Fact] diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs index a9d1625e79e7..8899668fd573 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; +using System.Linq; using Microsoft.SemanticKernel; using Xunit; @@ -62,6 +64,43 @@ public void ItCanBeCreatedWithBothExecutionSettingsAndArguments() Assert.Equal("fake-value", argument.Value); } + [Fact] + public void ItCanBeCreatedWithMultipleExecutionSettingsAndArguments() + { + // Arrange + var executionSettings1 = new PromptExecutionSettings(); + var executionSettings2 = new PromptExecutionSettings() { ServiceId = "service-2" }; + var executionSettings3 = new PromptExecutionSettings() { ServiceId = "service-3" }; + + // Act + KernelArguments sut = new([executionSettings1, executionSettings2, executionSettings3]) { { "fake-key", "fake-value" } }; + + // Assert + Assert.Same(executionSettings1, sut.ExecutionSettings?[PromptExecutionSettings.DefaultServiceId]); + Assert.Same(executionSettings2, sut.ExecutionSettings?["service-2"]); + Assert.Same(executionSettings3, sut.ExecutionSettings?["service-3"]); + + var argument = Assert.Single(sut); + Assert.Equal("fake-key", argument.Key); + Assert.Equal("fake-value", argument.Value); + } + + [Theory] + [InlineData(null, null)] + [InlineData("default", null)] + [InlineData(null, "default")] + [InlineData("service1", null, "service1")] + [InlineData(null, "service2", "service2")] + [InlineData("service1", "service2", "service3", null, "service1")] + public void ItCannotBeCreatedWithMultipleExecutionSettingsWithClashingServiceIdOrWithoutServiceIdSet(params string?[] serviceIds) + { + // Arrange + var executionSettingsList = serviceIds?.Select(serviceId => new PromptExecutionSettings() { ServiceId = serviceId }).ToList(); + + // Act & Assert + Assert.Throws(() => new KernelArguments(executionSettingsList) { { "fake-key", "fake-value" } }); + } + [Fact] public void ItCanPerformCaseInsensitiveSearch() { diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs index ea36d8864d17..57010c640b91 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs @@ -1,6 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.TextGeneration; +using Moq; using Xunit; namespace SemanticKernel.UnitTests.Functions; @@ -65,6 +71,37 @@ public void CreatePluginFromDescriptionAndFunctions() Assert.True(plugin.Contains("Function2")); } + [Fact] + public async Task CreateFunctionFromPromptWithMultipleSettingsUseCorrectServiceAsync() + { + // Arrange + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); + var fakeTextContent = new TextContent("llmResult"); + var fakeChatContent = new ChatMessageContent(AuthorRole.User, "content"); + + mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]); + mockTextGeneration2.Setup(c => c.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeChatContent]); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + builder.Services.AddKeyedSingleton("service3", mockTextGeneration1.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = kernel.CreateFunctionFromPrompt("coolfunction", [ + new PromptExecutionSettings { ServiceId = "service5" }, // Should ignore this as service5 is not registered + new PromptExecutionSettings { ServiceId = "service2" }, + ]); + + // Act + await kernel.InvokeAsync(function); + + // Assert + mockTextGeneration1.Verify(a => a.GetTextContentsAsync("coolfunction", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never()); + mockTextGeneration2.Verify(a => a.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); + } + [Fact] public void ImportPluginFromFunctions() { diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs index c4b160572be5..26e53513bd73 100644 --- a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs @@ -235,6 +235,103 @@ public void DeserializingServiceIdUnmatchingIndexShouldThrow() var exception = Assert.Throws(() => JsonSerializer.Deserialize(configPayload)); } + [Fact] + public void ItCannotAddExecutionSettingsWithSameServiceId() + { + // Arrange + var settings = new PromptTemplateConfig(); + settings.AddExecutionSettings(new PromptExecutionSettings(), "service1"); + + // Act & Assert + Assert.Throws(() => settings.AddExecutionSettings(new PromptExecutionSettings(), "service1")); + } + + [Fact] + public void ItAddExecutionSettingsAndOverwriteServiceIdAsExpected() + { + // Arrange + var promptTemplateConfig = new PromptTemplateConfig(); + var settings1 = new PromptExecutionSettings { ModelId = "model-service-3", ServiceId = "should not override" }; + + // Act + promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ModelId = "model1" }); + promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ModelId = "model2" }, "service1"); + promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ServiceId = "service2", ModelId = "model-service-2" }, "override"); + promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ServiceId = "service3", ModelId = "model-service-3" }); + promptTemplateConfig.AddExecutionSettings(settings1, "service4"); + + // Assert + // Ovewrite with Default + Assert.Equal("model1", promptTemplateConfig.ExecutionSettings["default"].ModelId); + Assert.Equal("default", promptTemplateConfig.ExecutionSettings["default"].ServiceId); + + // Ovewrite with ServiceId from Argument (Not Defaulting) + Assert.Equal("model2", promptTemplateConfig.ExecutionSettings["service1"].ModelId); + Assert.Equal("service1", promptTemplateConfig.ExecutionSettings["service1"].ServiceId); + + // Ovewrite with ServiceId from Argument + Assert.Equal("model-service-2", promptTemplateConfig.ExecutionSettings["override"].ModelId); + Assert.Equal("override", promptTemplateConfig.ExecutionSettings["override"].ServiceId); + + // Don't override from argument and use ServiceId from ExecutionSettings + Assert.Equal("model-service-3", promptTemplateConfig.ExecutionSettings["service3"].ModelId); + Assert.Equal("service3", promptTemplateConfig.ExecutionSettings["service3"].ServiceId); + + // Don't change settings by reference (it was cloned) + Assert.Equal("should not override", settings1.ServiceId); + } + + [Fact] + public void DeserializingServiceIdSameIndexKeepsLast() + { + // Arrange + string configPayload = """ + { + "schema": 1, + "description": "", + "execution_settings": + { + "service1": { + "model_id": "gpt-4", + "max_tokens": 200, + "temperature": 0.2, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": + [ + "Human", + "AI" + ] + }, + "service1": { + "model_id": "gpt-3.5_turbo", + "max_tokens": 256, + "temperature": 0.3, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": + [ + "Human", + "AI" + ] + } + } + } + """; + + // Act + var promptTemplate = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(promptTemplate); + Assert.NotNull(promptTemplate.ExecutionSettings); + Assert.Single(promptTemplate.ExecutionSettings); + Assert.Equal("service1", promptTemplate.ExecutionSettings["service1"].ServiceId); + Assert.Equal("gpt-3.5_turbo", promptTemplate.ExecutionSettings["service1"].ModelId); + } + [Fact] public void DeserializingExpectCompletion() { From b900aac6f0356e5608f082e836f2684df4e4b324 Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Mon, 27 May 2024 22:34:42 +0100 Subject: [PATCH 03/15] Adjust UT --- .../Functions/OrderedAIServiceSelectorTests.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs index 15b001c13c99..1ddb34c819ba 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs @@ -75,8 +75,8 @@ public void ItGetsAIServiceConfigurationForTextGenerationByServiceId() Kernel kernel = builder.Build(); var promptConfig = new PromptTemplateConfig() { Template = "Hello AI" }; - var executionSettings = new PromptExecutionSettings(); - promptConfig.AddExecutionSettings(executionSettings, "service2"); + promptConfig.AddExecutionSettings(new(), "service2"); + var executionSettings = promptConfig.ExecutionSettings["service2"]; var function = kernel.CreateFunctionFromPrompt(promptConfig); var serviceSelector = new OrderedAIServiceSelector(); From 33cba2943c0104f7336558ca23a28967bea8321f Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Mon, 27 May 2024 22:35:36 +0100 Subject: [PATCH 04/15] Fix typos --- .../PromptTemplate/PromptTemplateConfigTests.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs index 26e53513bd73..b6bd83abb5e1 100644 --- a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs @@ -261,15 +261,15 @@ public void ItAddExecutionSettingsAndOverwriteServiceIdAsExpected() promptTemplateConfig.AddExecutionSettings(settings1, "service4"); // Assert - // Ovewrite with Default + // Overwrite with Default Assert.Equal("model1", promptTemplateConfig.ExecutionSettings["default"].ModelId); Assert.Equal("default", promptTemplateConfig.ExecutionSettings["default"].ServiceId); - // Ovewrite with ServiceId from Argument (Not Defaulting) + // Overwrite with ServiceId from Argument (Not Defaulting) Assert.Equal("model2", promptTemplateConfig.ExecutionSettings["service1"].ModelId); Assert.Equal("service1", promptTemplateConfig.ExecutionSettings["service1"].ServiceId); - // Ovewrite with ServiceId from Argument + // Overwrite with ServiceId from Argument Assert.Equal("model-service-2", promptTemplateConfig.ExecutionSettings["override"].ModelId); Assert.Equal("override", promptTemplateConfig.ExecutionSettings["override"].ServiceId); From f702bb783852ecc41f5ad6b3239aaa44c654778d Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Wed, 29 May 2024 09:30:43 +0100 Subject: [PATCH 05/15] Address PR Feedback --- .../AI/PromptExecutionSettings.cs | 2 +- .../Functions/KernelArguments.cs | 39 ++++++++++++------- .../PromptTemplate/PromptTemplateConfig.cs | 26 ++++++------- .../OrderedAIServiceSelectorTests.cs | 4 +- 4 files changed, 40 insertions(+), 31 deletions(-) diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index 25555c0824f2..e973c6f24755 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -29,7 +29,7 @@ public class PromptExecutionSettings /// /// Service identifier. - /// This identifies the service these settings are configured for e.g., openai, ollama, huggingface, etc. + /// This identifies the service these settings are configured for e.g., azure_openai_eastus, openai, ollama, huggingface, etc. /// [JsonPropertyName("service_id")] public string? ServiceId diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index c51aa988169a..b59d5b8be9ea 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -3,6 +3,7 @@ using System; using System.Collections; using System.Collections.Generic; +using System.Linq; using System.Text.Json.Serialization; #pragma warning disable CA1710 // Identifiers should have correct suffix @@ -56,7 +57,11 @@ public KernelArguments(IReadOnlyCollection? executionSe var targetServiceId = settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId; if (newExecutionSettings.ContainsKey(targetServiceId)) { - throw new ArgumentException("When adding multiple execution settings, the service id needs to be provided and be unique for each."); + var exceptionMessage = (targetServiceId == PromptExecutionSettings.DefaultServiceId) + ? $"Default service id '{PromptExecutionSettings.DefaultServiceId}' must not be duplicated." + : $"Service id '{settings.ServiceId}' must not be duplicated and should match the key '{targetServiceId}'."; + + throw new ArgumentException(exceptionMessage, nameof(executionSettings)); } newExecutionSettings[targetServiceId] = settings; @@ -86,30 +91,34 @@ public KernelArguments(IDictionary source, Dictionary /// Gets or sets the prompt execution settings. /// + /// + /// The settings dictionary is keyed by the service ID, or for the default execution settings. + /// When setting, the service id of each must match the key in the dictionary. + /// public IReadOnlyDictionary? ExecutionSettings { get => this._executionSettings; set { - this._executionSettings = value; - - if (this._executionSettings is null || - this._executionSettings.Count == 0) - { - return; - } + // Clone the settings to avoid reference changes. + this._executionSettings = value is IDictionary dictionary + ? new Dictionary(dictionary) + : (IReadOnlyDictionary?)(value?.ToDictionary(kv => kv.Key, kv => kv.Value)); - foreach (var kv in this._executionSettings) + if (this._executionSettings is not null && this._executionSettings.Count != 0) { - // Ensures that if a service id is not specified and is not default, it is set to the current service id. - if (kv.Key != kv.Value.ServiceId) + foreach (var kv in this._executionSettings!) { - if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId)) + // Ensures that if a service id is not specified and is not default, it is set to the current service id. + if (kv.Key != kv.Value.ServiceId) { - throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); - } + if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId)) + { + throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); + } - kv.Value.ServiceId = kv.Key; + kv.Value.ServiceId = kv.Key; + } } } } diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs index 2a3431dfed0c..32212fab94f8 100644 --- a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs @@ -178,6 +178,7 @@ public List InputVariables /// /// /// The settings dictionary is keyed by the service ID, or for the default execution settings. + /// When setting, the service id of each must match the key in the dictionary. /// [JsonPropertyName("execution_settings")] public Dictionary ExecutionSettings @@ -186,24 +187,23 @@ public Dictionary ExecutionSettings set { Verify.NotNull(value); - this._executionSettings = value; - if (value.Count == 0) + // Clone the settings to avoid reference changes. + this._executionSettings = new(value); + if (this._executionSettings.Count != 0) { - return; - } - - foreach (var kv in value) - { - // Ensures that if a service id is not specified and is not default, it is set to the current service id. - if (kv.Key != kv.Value.ServiceId) + foreach (var kv in this._executionSettings) { - if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId)) + // Ensures that if a service id is not specified and is not default, it is set to the current service id. + if (kv.Key != kv.Value.ServiceId) { - throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); - } + if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId)) + { + throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); + } - kv.Value.ServiceId = kv.Key; + kv.Value.ServiceId = kv.Key; + } } } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs index 1ddb34c819ba..400b725db483 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs @@ -75,8 +75,8 @@ public void ItGetsAIServiceConfigurationForTextGenerationByServiceId() Kernel kernel = builder.Build(); var promptConfig = new PromptTemplateConfig() { Template = "Hello AI" }; - promptConfig.AddExecutionSettings(new(), "service2"); - var executionSettings = promptConfig.ExecutionSettings["service2"]; + var executionSettings = new PromptExecutionSettings() { ServiceId = "service2" }; + promptConfig.AddExecutionSettings(executionSettings); var function = kernel.CreateFunctionFromPrompt(promptConfig); var serviceSelector = new OrderedAIServiceSelector(); From 14f768bfbba74833365ffe6a2c02fd86d87b89d1 Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Wed, 29 May 2024 12:44:51 +0100 Subject: [PATCH 06/15] Added Invoke and PreConfigured samples, simplified code --- .../Connectors_WithMultipleLLMs.cs | 81 +++++++++++++------ .../Functions/KernelArguments.cs | 4 +- 2 files changed, 59 insertions(+), 26 deletions(-) diff --git a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs index e2751590033b..bcb5d729529c 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs @@ -7,6 +7,7 @@ namespace ChatCompletion; public class Connectors_WithMultipleLLMs(ITestOutputHelper output) : BaseTest(output) { + private const string ChatPrompt = "Hello AI, what can you do for me?"; /// /// Show how to run a prompt function and specify a specific service to use. /// @@ -26,60 +27,92 @@ public async Task RunAsync() serviceId: "OpenAIChat") .Build(); - await RunByServiceIdAsync(kernel, "AzureOpenAIChat"); - await RunByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); - await RunByFirstModelIdAsync(kernel, ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]); - await RunByFirstServiceIdAsync(kernel, ["NotFound", "AzureOpenAIChat", "OpenAIChat"]); + // Preconfigured function settings + await PreconfiguredFunctionSettingsByFirstModelIdAsync(kernel, ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]); + await PreconfiguredFunctionSettingsByFirstServiceIdAsync(kernel, ["NotFound", "AzureOpenAIChat", "OpenAIChat"]); + await PreconfiguredFunctionSettingsByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); + await PreconfiguredFunctionSettingsByServiceIdAsync(kernel, "AzureOpenAIChat"); + + // Per invocation settings + await InvocationSettingsByServiceIdAsync(kernel, "AzureOpenAIChat"); + await InvocationSettingsByFirstServiceIdAsync(kernel, ["NotFound", "AzureOpenAIChat", "OpenAIChat"]); + await InvocationSettingsByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); + await InvocationSettingsByFirstModelIdAsync(kernel, ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]); } - private async Task RunByServiceIdAsync(Kernel kernel, string serviceId) + private async Task InvocationSettingsByServiceIdAsync(Kernel kernel, string serviceId) { Console.WriteLine($"======== Service Id: {serviceId} ========"); - var prompt = "Hello AI, what can you do for me?"; - - var result = await kernel.InvokePromptAsync(prompt, new(new PromptExecutionSettings { ServiceId = serviceId })); + var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId })); Console.WriteLine(result.GetValue()); } - private async Task RunByFirstServiceIdAsync(Kernel kernel, string[] serviceIds) + private async Task InvocationSettingsByFirstServiceIdAsync(Kernel kernel, string[] serviceIds) { Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); - var prompt = "Hello AI, what can you do for me?"; + var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }))); - var function = kernel.CreateFunctionFromPrompt(prompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })); + Console.WriteLine(result.GetValue()); + } - var result = await kernel.InvokeAsync(function); + private async Task InvocationSettingsByFirstModelIdAsync(Kernel kernel, string[] modelIds) + { + Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); + + var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }))); Console.WriteLine(result.GetValue()); } - private async Task RunByModelIdAsync(Kernel kernel, string modelId) + private async Task InvocationSettingsByModelIdAsync(Kernel kernel, string modelId) { Console.WriteLine($"======== Model Id: {modelId} ========"); - var prompt = "Hello AI, what can you do for me?"; + var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId })); - var result = await kernel.InvokePromptAsync( - prompt, - new(new PromptExecutionSettings() - { - ModelId = modelId - })); Console.WriteLine(result.GetValue()); } - private async Task RunByFirstModelIdAsync(Kernel kernel, string[] modelIds) + private async Task PreconfiguredFunctionSettingsByFirstServiceIdAsync(Kernel kernel, string[] serviceIds) { - Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); + Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); - var prompt = "Hello AI, what can you do for me?"; + var function = kernel.CreateFunctionFromPrompt(ChatPrompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })); + var result = await kernel.InvokeAsync(function); - var function = kernel.CreateFunctionFromPrompt(prompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })); + Console.WriteLine(result.GetValue()); + } + + private async Task PreconfiguredFunctionSettingsByFirstModelIdAsync(Kernel kernel, string[] modelIds) + { + Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); + var function = kernel.CreateFunctionFromPrompt(ChatPrompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })); var result = await kernel.InvokeAsync(function); + + Console.WriteLine(result.GetValue()); + } + + private async Task PreconfiguredFunctionSettingsByModelIdAsync(Kernel kernel, string modelId) + { + Console.WriteLine($"======== Model Id: {modelId} ========"); + + var function = kernel.CreateFunctionFromPrompt(ChatPrompt); + var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ModelId = modelId })); + + Console.WriteLine(result.GetValue()); + } + + private async Task PreconfiguredFunctionSettingsByServiceIdAsync(Kernel kernel, string serviceId) + { + Console.WriteLine($"======== Service Id: {serviceId} ========"); + + var function = kernel.CreateFunctionFromPrompt(ChatPrompt); + var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ServiceId = serviceId })); + Console.WriteLine(result.GetValue()); } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index b59d5b8be9ea..794b6c7664f7 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -46,12 +46,12 @@ public KernelArguments(PromptExecutionSettings? executionSettings) /// Initializes a new instance of the class with the specified AI execution settings. /// /// The prompt execution settings. - public KernelArguments(IReadOnlyCollection? executionSettings) + public KernelArguments(IEnumerable? executionSettings) { this._arguments = new(StringComparer.OrdinalIgnoreCase); if (executionSettings is not null) { - var newExecutionSettings = new Dictionary(executionSettings.Count); + var newExecutionSettings = new Dictionary(); foreach (var settings in executionSettings) { var targetServiceId = settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId; From 7845415c5e062b1d6b2fa01e79be63d740ddda7e Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Wed, 29 May 2024 12:50:08 +0100 Subject: [PATCH 07/15] Fix warnings --- .../SemanticKernel.Abstractions/Functions/KernelArguments.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index 794b6c7664f7..555c53c26422 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -22,7 +22,7 @@ public sealed class KernelArguments : IDictionary, IReadOnlyDic { /// Dictionary of name/values for all the arguments in the instance. private readonly Dictionary _arguments; - private IReadOnlyDictionary? _executionSettings; + private Dictionary? _executionSettings; /// /// Initializes a new instance of the class with the specified AI execution settings. @@ -103,7 +103,7 @@ public IReadOnlyDictionary? ExecutionSettings // Clone the settings to avoid reference changes. this._executionSettings = value is IDictionary dictionary ? new Dictionary(dictionary) - : (IReadOnlyDictionary?)(value?.ToDictionary(kv => kv.Key, kv => kv.Value)); + : value?.ToDictionary(kv => kv.Key, kv => kv.Value); if (this._executionSettings is not null && this._executionSettings.Count != 0) { From 285782a7f6600893893179399457b138519b7e4e Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Thu, 30 May 2024 10:52:17 +0100 Subject: [PATCH 08/15] Address PR Comments --- .../Connectors_WithMultipleLLMs.cs | 126 ++++++++++++------ .../Functions/KernelArguments.cs | 2 +- 2 files changed, 83 insertions(+), 45 deletions(-) diff --git a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs index bcb5d729529c..830264ff41f9 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs @@ -1,47 +1,38 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.SemanticKernel; -using xRetry; namespace ChatCompletion; public class Connectors_WithMultipleLLMs(ITestOutputHelper output) : BaseTest(output) { private const string ChatPrompt = "Hello AI, what can you do for me?"; - /// - /// Show how to run a prompt function and specify a specific service to use. - /// - [RetryFact(typeof(HttpOperationException))] - public async Task RunAsync() + + private static Kernel BuildKernel() { - Kernel kernel = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - serviceId: "AzureOpenAIChat", - modelId: TestConfiguration.AzureOpenAI.ChatModelId) - .AddOpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey, - serviceId: "OpenAIChat") - .Build(); - - // Preconfigured function settings - await PreconfiguredFunctionSettingsByFirstModelIdAsync(kernel, ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]); - await PreconfiguredFunctionSettingsByFirstServiceIdAsync(kernel, ["NotFound", "AzureOpenAIChat", "OpenAIChat"]); - await PreconfiguredFunctionSettingsByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); - await PreconfiguredFunctionSettingsByServiceIdAsync(kernel, "AzureOpenAIChat"); - - // Per invocation settings - await InvocationSettingsByServiceIdAsync(kernel, "AzureOpenAIChat"); - await InvocationSettingsByFirstServiceIdAsync(kernel, ["NotFound", "AzureOpenAIChat", "OpenAIChat"]); - await InvocationSettingsByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); - await InvocationSettingsByFirstModelIdAsync(kernel, ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]); + return Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + serviceId: "AzureOpenAIChat", + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey, + serviceId: "OpenAIChat") + .Build(); } - private async Task InvocationSettingsByServiceIdAsync(Kernel kernel, string serviceId) + /// + /// Invoke the prompt function to run for a specific service id. + /// + /// Service Id + [Theory] + [InlineData("AzureOpenAIChat")] + public async Task InvokePromptByServiceIdAsync(string serviceId) { + var kernel = BuildKernel(); Console.WriteLine($"======== Service Id: {serviceId} ========"); var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId })); @@ -49,35 +40,61 @@ private async Task InvocationSettingsByServiceIdAsync(Kernel kernel, string serv Console.WriteLine(result.GetValue()); } - private async Task InvocationSettingsByFirstServiceIdAsync(Kernel kernel, string[] serviceIds) + /// + /// Invoke the prompt function to run for a specific model id. + /// + [Fact] + private async Task InvokePromptByModelIdAsync() { - Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); + var modelId = TestConfiguration.OpenAI.ChatModelId; + var kernel = BuildKernel(); + Console.WriteLine($"======== Model Id: {modelId} ========"); - var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }))); + var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId })); Console.WriteLine(result.GetValue()); } - private async Task InvocationSettingsByFirstModelIdAsync(Kernel kernel, string[] modelIds) + /// + /// Invoke the prompt function to preferably run for a list specific service ids where the + /// first service id that is found respecting the order of the options provided will be used. + /// + [Fact] + public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync() { - Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); + string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"]; + var kernel = BuildKernel(); + Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); - var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }))); + var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }))); Console.WriteLine(result.GetValue()); } - private async Task InvocationSettingsByModelIdAsync(Kernel kernel, string modelId) + /// + /// Invoke the prompt function to preferably run for a list of specific model ids where the + /// first model id that is found respecting the order of the options provided will be used. + /// + [Fact] + public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync() { - Console.WriteLine($"======== Model Id: {modelId} ========"); + string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]; + var kernel = BuildKernel(); + Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); - var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId })); + var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }))); Console.WriteLine(result.GetValue()); } - private async Task PreconfiguredFunctionSettingsByFirstServiceIdAsync(Kernel kernel, string[] serviceIds) + /// + /// Create a function with a predefined configuration and invoke at later moment. + /// + [Fact] + public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync() { + string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"]; + var kernel = BuildKernel(); Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========"); var function = kernel.CreateFunctionFromPrompt(ChatPrompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })); @@ -86,8 +103,16 @@ private async Task PreconfiguredFunctionSettingsByFirstServiceIdAsync(Kernel ker Console.WriteLine(result.GetValue()); } - private async Task PreconfiguredFunctionSettingsByFirstModelIdAsync(Kernel kernel, string[] modelIds) + /// + /// Create a function with a predefined configuration to preferably run for a list specific model ids where the + /// first model id that is found respecting the order of the options provided will be used. + /// + [Fact] + public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync() { + string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId]; + var kernel = BuildKernel(); + Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); var function = kernel.CreateFunctionFromPrompt(ChatPrompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })); @@ -96,8 +121,14 @@ private async Task PreconfiguredFunctionSettingsByFirstModelIdAsync(Kernel kerne Console.WriteLine(result.GetValue()); } - private async Task PreconfiguredFunctionSettingsByModelIdAsync(Kernel kernel, string modelId) + /// + /// Create a function with a predefined configuration to run for a specific model id. + /// + [Fact] + public async Task InvokePreconfiguredFunctionByModelIdAsync() { + var modelId = TestConfiguration.OpenAI.ChatModelId; + var kernel = BuildKernel(); Console.WriteLine($"======== Model Id: {modelId} ========"); var function = kernel.CreateFunctionFromPrompt(ChatPrompt); @@ -106,8 +137,15 @@ private async Task PreconfiguredFunctionSettingsByModelIdAsync(Kernel kernel, st Console.WriteLine(result.GetValue()); } - private async Task PreconfiguredFunctionSettingsByServiceIdAsync(Kernel kernel, string serviceId) + /// + /// Create a function with a predefined configuration to run for a specific service id. + /// + /// Service Id + [Theory] + [InlineData("AzureOpenAIChat")] + public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId) { + var kernel = BuildKernel(); Console.WriteLine($"======== Service Id: {serviceId} ========"); var function = kernel.CreateFunctionFromPrompt(ChatPrompt); diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index 555c53c26422..0c8a4298cc54 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -105,7 +105,7 @@ public IReadOnlyDictionary? ExecutionSettings ? new Dictionary(dictionary) : value?.ToDictionary(kv => kv.Key, kv => kv.Value); - if (this._executionSettings is not null && this._executionSettings.Count != 0) + if (this._executionSettings is not { Count: 0 }) { foreach (var kv in this._executionSettings!) { From f707927ebae03ef1eae5902d27da69747c9c71b8 Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Thu, 30 May 2024 11:36:51 +0100 Subject: [PATCH 09/15] is not, inverted --- .../SemanticKernel.Abstractions/Functions/KernelArguments.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index 0c8a4298cc54..b33954df85fb 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -105,7 +105,7 @@ public IReadOnlyDictionary? ExecutionSettings ? new Dictionary(dictionary) : value?.ToDictionary(kv => kv.Key, kv => kv.Value); - if (this._executionSettings is not { Count: 0 }) + if (this._executionSettings is { Count: > 0 }) { foreach (var kv in this._executionSettings!) { From 1b8cf016b8a60a8ce5df22fce142c9d520a6aafa Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Thu, 30 May 2024 11:40:36 +0100 Subject: [PATCH 10/15] Adding experimental tag --- .../SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index e973c6f24755..83ad21f5fcc3 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Collections.ObjectModel; +using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.TextGeneration; @@ -31,6 +32,7 @@ public class PromptExecutionSettings /// Service identifier. /// This identifies the service these settings are configured for e.g., azure_openai_eastus, openai, ollama, huggingface, etc. /// + [Experimental("SKEXP0001")] [JsonPropertyName("service_id")] public string? ServiceId { From b0256630536169fa0c357ea9614b7fd3cea05b66 Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Tue, 4 Jun 2024 13:39:20 +0100 Subject: [PATCH 11/15] Update serviceId to be optional and not be auto-settable --- .../AI/PromptExecutionSettings.cs | 3 ++ .../Functions/KernelArguments.cs | 20 +++------ .../PromptTemplate/PromptTemplateConfig.cs | 31 ++++++------- .../OrderedAIServiceSelectorTests.cs | 4 +- .../PromptTemplateConfigTests.cs | 44 +++++++++++-------- 5 files changed, 50 insertions(+), 52 deletions(-) diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index 83ad21f5fcc3..a396033b770d 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -32,6 +32,9 @@ public class PromptExecutionSettings /// Service identifier. /// This identifies the service these settings are configured for e.g., azure_openai_eastus, openai, ollama, huggingface, etc. /// + /// + /// When provided, this service identifier will be the key in a dictionary collection of execution settings for both and . + /// [Experimental("SKEXP0001")] [JsonPropertyName("service_id")] public string? ServiceId diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index b33954df85fb..1bff0b90c2a7 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -22,7 +22,7 @@ public sealed class KernelArguments : IDictionary, IReadOnlyDic { /// Dictionary of name/values for all the arguments in the instance. private readonly Dictionary _arguments; - private Dictionary? _executionSettings; + private IReadOnlyDictionary? _executionSettings; /// /// Initializes a new instance of the class with the specified AI execution settings. @@ -100,27 +100,19 @@ public IReadOnlyDictionary? ExecutionSettings get => this._executionSettings; set { - // Clone the settings to avoid reference changes. - this._executionSettings = value is IDictionary dictionary - ? new Dictionary(dictionary) - : value?.ToDictionary(kv => kv.Key, kv => kv.Value); - if (this._executionSettings is { Count: > 0 }) { foreach (var kv in this._executionSettings!) { - // Ensures that if a service id is not specified and is not default, it is set to the current service id. - if (kv.Key != kv.Value.ServiceId) + // Ensures that if a service id is specified it needs to match to the current key in the dictionary. + if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId) && kv.Key != kv.Value.ServiceId) { - if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId)) - { - throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); - } - - kv.Value.ServiceId = kv.Key; + throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); } } } + + this._executionSettings = value; } } diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs index 32212fab94f8..1cce254ec1a8 100644 --- a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs @@ -188,24 +188,19 @@ public Dictionary ExecutionSettings { Verify.NotNull(value); - // Clone the settings to avoid reference changes. - this._executionSettings = new(value); - if (this._executionSettings.Count != 0) + if (value.Count != 0) { - foreach (var kv in this._executionSettings) + foreach (var kv in value) { - // Ensures that if a service id is not specified and is not default, it is set to the current service id. - if (kv.Key != kv.Value.ServiceId) + // Ensures that if a service id is provided it must match the key in the dictionary. + if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId) && kv.Key != kv.Value.ServiceId) { - if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId)) - { - throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); - } - - kv.Value.ServiceId = kv.Key; + throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings)); } } } + + this._executionSettings = value; } } @@ -243,19 +238,19 @@ public void AddExecutionSettings(PromptExecutionSettings settings, string? servi { Verify.NotNull(settings); - var key = serviceId ?? settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId; + if (!string.IsNullOrWhiteSpace(serviceId) && !string.IsNullOrWhiteSpace(settings.ServiceId)) + { + throw new ArgumentException($"Service id must not be passed when '{nameof(settings.ServiceId)}' is already provided in execution settings.", nameof(serviceId)); + } - // To avoid any reference changes to the settings object, clone it before changing service id. - var clonedSettings = settings.Clone(); + var key = serviceId ?? settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId; - // Overwrite the service id if provided in the method. - clonedSettings.ServiceId = key; if (this.ExecutionSettings.ContainsKey(key)) { throw new ArgumentException($"Execution settings for service id '{key}' already exists.", nameof(serviceId)); } - this.ExecutionSettings[key] = clonedSettings; + this.ExecutionSettings[key] = settings; } /// diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs index 400b725db483..15b001c13c99 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs @@ -75,8 +75,8 @@ public void ItGetsAIServiceConfigurationForTextGenerationByServiceId() Kernel kernel = builder.Build(); var promptConfig = new PromptTemplateConfig() { Template = "Hello AI" }; - var executionSettings = new PromptExecutionSettings() { ServiceId = "service2" }; - promptConfig.AddExecutionSettings(executionSettings); + var executionSettings = new PromptExecutionSettings(); + promptConfig.AddExecutionSettings(executionSettings, "service2"); var function = kernel.CreateFunctionFromPrompt(promptConfig); var serviceSelector = new OrderedAIServiceSelector(); diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs index b6bd83abb5e1..5fecdf71b8c3 100644 --- a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs @@ -106,7 +106,7 @@ public void DeserializingExpectMultipleModels() } [Fact] - public void DeserializingAutoSetServiceIdWhenNotProvided() + public void DeserializingDoesNotAutoSetServiceIdWhenNotProvided() { // Arrange string configPayload = """ @@ -150,12 +150,12 @@ public void DeserializingAutoSetServiceIdWhenNotProvided() // Assert Assert.NotNull(promptTemplateConfig); - Assert.Equal("service1", promptTemplateConfig.ExecutionSettings["service1"].ServiceId); - Assert.Equal("service2", promptTemplateConfig.ExecutionSettings["service2"].ServiceId); + Assert.Null(promptTemplateConfig.ExecutionSettings["service1"].ServiceId); + Assert.Null(promptTemplateConfig.ExecutionSettings["service2"].ServiceId); } [Fact] - public void DeserializingAutoSetServiceIdWhenDefault() + public void DeserializingDoesNotAutoSetServiceIdWhenDefault() { // Arrange string configPayload = """ @@ -187,7 +187,7 @@ public void DeserializingAutoSetServiceIdWhenDefault() // Assert Assert.NotNull(promptTemplateConfig); Assert.NotNull(promptTemplateConfig.DefaultExecutionSettings); - Assert.Equal(PromptExecutionSettings.DefaultServiceId, promptTemplateConfig.DefaultExecutionSettings?.ServiceId); + Assert.Null(promptTemplateConfig.DefaultExecutionSettings?.ServiceId); } [Fact] @@ -247,7 +247,7 @@ public void ItCannotAddExecutionSettingsWithSameServiceId() } [Fact] - public void ItAddExecutionSettingsAndOverwriteServiceIdAsExpected() + public void ItAddExecutionSettingsAndNeverOverwriteServiceId() { // Arrange var promptTemplateConfig = new PromptTemplateConfig(); @@ -256,29 +256,37 @@ public void ItAddExecutionSettingsAndOverwriteServiceIdAsExpected() // Act promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ModelId = "model1" }); promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ModelId = "model2" }, "service1"); - promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ServiceId = "service2", ModelId = "model-service-2" }, "override"); + promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ServiceId = "service2", ModelId = "model-service-2" }); promptTemplateConfig.AddExecutionSettings(new PromptExecutionSettings { ServiceId = "service3", ModelId = "model-service-3" }); - promptTemplateConfig.AddExecutionSettings(settings1, "service4"); + promptTemplateConfig.AddExecutionSettings(settings1); // Assert - // Overwrite with Default Assert.Equal("model1", promptTemplateConfig.ExecutionSettings["default"].ModelId); - Assert.Equal("default", promptTemplateConfig.ExecutionSettings["default"].ServiceId); + Assert.Null(promptTemplateConfig.ExecutionSettings["default"].ServiceId); - // Overwrite with ServiceId from Argument (Not Defaulting) Assert.Equal("model2", promptTemplateConfig.ExecutionSettings["service1"].ModelId); - Assert.Equal("service1", promptTemplateConfig.ExecutionSettings["service1"].ServiceId); + Assert.Null(promptTemplateConfig.ExecutionSettings["service1"].ServiceId); - // Overwrite with ServiceId from Argument - Assert.Equal("model-service-2", promptTemplateConfig.ExecutionSettings["override"].ModelId); - Assert.Equal("override", promptTemplateConfig.ExecutionSettings["override"].ServiceId); + Assert.Equal("model-service-2", promptTemplateConfig.ExecutionSettings["service2"].ModelId); + Assert.Equal("service2", promptTemplateConfig.ExecutionSettings["service2"].ServiceId); - // Don't override from argument and use ServiceId from ExecutionSettings Assert.Equal("model-service-3", promptTemplateConfig.ExecutionSettings["service3"].ModelId); Assert.Equal("service3", promptTemplateConfig.ExecutionSettings["service3"].ServiceId); - // Don't change settings by reference (it was cloned) + // Never changes settings id Assert.Equal("should not override", settings1.ServiceId); + Assert.True(promptTemplateConfig.ExecutionSettings.ContainsKey("should not override")); + } + + [Fact] + public void ItThrowsWhenServiceIdIsProvidedAndExecutionSettingsAlreadyHasAServiceIdPropertySet() + { + // Arrange + var promptTemplateConfig = new PromptTemplateConfig(); + var settings = new PromptExecutionSettings { ModelId = "model-service-3", ServiceId = "service2" }; + + // Act & Assert + Assert.Throws(() => promptTemplateConfig.AddExecutionSettings(settings, "service1")); } [Fact] @@ -328,7 +336,7 @@ public void DeserializingServiceIdSameIndexKeepsLast() Assert.NotNull(promptTemplate); Assert.NotNull(promptTemplate.ExecutionSettings); Assert.Single(promptTemplate.ExecutionSettings); - Assert.Equal("service1", promptTemplate.ExecutionSettings["service1"].ServiceId); + Assert.Null(promptTemplate.ExecutionSettings["service1"].ServiceId); Assert.Equal("gpt-3.5_turbo", promptTemplate.ExecutionSettings["service1"].ModelId); } From 2488dffbc176aacc52d98e0c8e6417195ad1e6c6 Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Tue, 4 Jun 2024 13:46:43 +0100 Subject: [PATCH 12/15] Fix warnings --- .../src/SemanticKernel.Abstractions/Functions/KernelArguments.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index 1bff0b90c2a7..937143e98022 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -3,7 +3,6 @@ using System; using System.Collections; using System.Collections.Generic; -using System.Linq; using System.Text.Json.Serialization; #pragma warning disable CA1710 // Identifiers should have correct suffix From dbdb4afc1a0ba8cd59a9f360f6eadb434e714897 Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Tue, 4 Jun 2024 18:45:45 +0100 Subject: [PATCH 13/15] Address PR comments --- .../SemanticKernel.Abstractions/Functions/KernelArguments.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index 937143e98022..d8e151a75eac 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -99,9 +99,9 @@ public IReadOnlyDictionary? ExecutionSettings get => this._executionSettings; set { - if (this._executionSettings is { Count: > 0 }) + if (value is { Count: > 0 }) { - foreach (var kv in this._executionSettings!) + foreach (var kv in value!) { // Ensures that if a service id is specified it needs to match to the current key in the dictionary. if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId) && kv.Key != kv.Value.ServiceId) From 423d24ee297c8d3ec4c650acb98a06fc8bff13bb Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Wed, 19 Jun 2024 11:57:01 +0100 Subject: [PATCH 14/15] Address PR Comments --- .../Connectors_WithMultipleLLMs.cs | 51 +++++++++++++++---- .../AI/PromptExecutionSettings.cs | 1 + .../Functions/KernelArguments.cs | 4 +- .../OrderedAIServiceSelectorTests.cs | 20 ++++++++ 4 files changed, 63 insertions(+), 13 deletions(-) diff --git a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs index 830264ff41f9..fbd3f6da549b 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs @@ -25,7 +25,7 @@ private static Kernel BuildKernel() } /// - /// Invoke the prompt function to run for a specific service id. + /// Shows how to invoke a prompt and specify the service id of the preferred AI service. When the prompt is executed the AI Service with the matching service id will be selected. /// /// Service Id [Theory] @@ -41,7 +41,7 @@ public async Task InvokePromptByServiceIdAsync(string serviceId) } /// - /// Invoke the prompt function to run for a specific model id. + /// Shows how to invoke a prompt and specify the model id of the preferred AI service. When the prompt is executed the AI Service with the matching model id will be selected. /// [Fact] private async Task InvokePromptByModelIdAsync() @@ -56,8 +56,8 @@ private async Task InvokePromptByModelIdAsync() } /// - /// Invoke the prompt function to preferably run for a list specific service ids where the - /// first service id that is found respecting the order of the options provided will be used. + /// Shows how to invoke a prompt and specify the service ids of the preferred AI services. + /// When the prompt is executed the AI Service will be selected based on the order of the provided service ids. /// [Fact] public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync() @@ -72,8 +72,8 @@ public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync() } /// - /// Invoke the prompt function to preferably run for a list of specific model ids where the - /// first model id that is found respecting the order of the options provided will be used. + /// Shows how to invoke a prompt and specify the model ids of the preferred AI services. + /// When the prompt is executed the AI Service will be selected based on the order of the provided model ids. /// [Fact] public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync() @@ -88,7 +88,8 @@ public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync() } /// - /// Create a function with a predefined configuration and invoke at later moment. + /// Shows how to create a KernelFunction from a prompt and specify the service ids of the preferred AI services. + /// When the function is invoked the AI Service will be selected based on the order of the provided service ids. /// [Fact] public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync() @@ -104,8 +105,8 @@ public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync() } /// - /// Create a function with a predefined configuration to preferably run for a list specific model ids where the - /// first model id that is found respecting the order of the options provided will be used. + /// Shows how to create a KernelFunction from a prompt and specify the model ids of the preferred AI services. + /// When the function is invoked the AI Service will be selected based on the order of the provided model ids. /// [Fact] public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync() @@ -122,7 +123,7 @@ public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync() } /// - /// Create a function with a predefined configuration to run for a specific model id. + /// Shows how to invoke a KernelFunction and specify the model id of the AI Service the function will use. /// [Fact] public async Task InvokePreconfiguredFunctionByModelIdAsync() @@ -138,7 +139,7 @@ public async Task InvokePreconfiguredFunctionByModelIdAsync() } /// - /// Create a function with a predefined configuration to run for a specific service id. + /// Shows how to invoke a KernelFunction and specify the service id of the AI Service the function will use. /// /// Service Id [Theory] @@ -153,4 +154,32 @@ public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId) Console.WriteLine(result.GetValue()); } + + /// + /// Shows when specifying a non-existant ServiceId the kernel throws an exception. + /// + /// Service Id + [Theory] + [InlineData("NotFound")] + public async Task InvokePromptByNonExistingServiceIdThowsExceptionAsync(string serviceId) + { + var kernel = BuildKernel(); + Console.WriteLine($"======== Service Id: {serviceId} ========"); + + await Assert.ThrowsAsync(async () => await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId }))); + } + + /// + /// Shows how in the execution settings when no model id is found it falls back to the default service. + /// + /// Model Id + [Theory] + [InlineData("NotFound")] + public async Task InvokePromptByNonExistingModelIdUsesDefaultServiceAsync(string modelId) + { + var kernel = BuildKernel(); + Console.WriteLine($"======== Model Id: {modelId} ========"); + + await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ModelId = modelId })); + } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index a396033b770d..f10ccaa3ff39 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -34,6 +34,7 @@ public class PromptExecutionSettings /// /// /// When provided, this service identifier will be the key in a dictionary collection of execution settings for both and . + /// If not provided the service identifier will be the default value in . /// [Experimental("SKEXP0001")] [JsonPropertyName("service_id")] diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs index d8e151a75eac..eda736b3f583 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -57,8 +57,8 @@ public KernelArguments(IEnumerable? executionSettings) if (newExecutionSettings.ContainsKey(targetServiceId)) { var exceptionMessage = (targetServiceId == PromptExecutionSettings.DefaultServiceId) - ? $"Default service id '{PromptExecutionSettings.DefaultServiceId}' must not be duplicated." - : $"Service id '{settings.ServiceId}' must not be duplicated and should match the key '{targetServiceId}'."; + ? $"Multiple prompt execution settings with the default service id '{PromptExecutionSettings.DefaultServiceId}' or no service id have been provided. Specify a single default prompt execution settings and provide a unique service id for all other instances." + : $"Multiple prompt execution settings with the service id '{targetServiceId}' have been provided. Provide a unique service id for all instances."; throw new ArgumentException(exceptionMessage, nameof(executionSettings)); } diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs index 15b001c13c99..eafac8ac5ca3 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs @@ -109,6 +109,26 @@ public void ItThrowsAKernelExceptionForNotFoundService() Assert.Throws(() => serviceSelector.SelectAIService(kernel, function, [])); } + [Fact] + public void ItGetsDefaultServiceForNotFoundModel() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new TextGenerationService("model_id_1")); + builder.Services.AddKeyedSingleton("service2", new TextGenerationService("model_id_2")); + Kernel kernel = builder.Build(); + + var promptConfig = new PromptTemplateConfig() { Template = "Hello AI" }; + promptConfig.AddExecutionSettings(new PromptExecutionSettings { ModelId = "notfound" }); + var function = kernel.CreateFunctionFromPrompt(promptConfig); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + // Assert + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, []); + Assert.Equal(kernel.GetRequiredService("service2"), aiService); + } + [Fact] public void ItUsesDefaultServiceForNoExecutionSettings() { From 182b9bb6a11188dfa758c6259fa4754dc1728bd6 Mon Sep 17 00:00:00 2001 From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Date: Wed, 19 Jun 2024 12:08:34 +0100 Subject: [PATCH 15/15] Typo fix --- .../Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs index fbd3f6da549b..81fbc2492d4a 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs @@ -156,12 +156,12 @@ public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId) } /// - /// Shows when specifying a non-existant ServiceId the kernel throws an exception. + /// Shows when specifying a non-existent ServiceId the kernel throws an exception. /// /// Service Id [Theory] [InlineData("NotFound")] - public async Task InvokePromptByNonExistingServiceIdThowsExceptionAsync(string serviceId) + public async Task InvokePromptByNonExistingServiceIdThrowsExceptionAsync(string serviceId) { var kernel = BuildKernel(); Console.WriteLine($"======== Service Id: {serviceId} ========");