From d4ac6c404d1194b2cbd64ff9c34a5ea10bc5c542 Mon Sep 17 00:00:00 2001 From: kavin <115390646+singhk97@users.noreply.github.com> Date: Thu, 17 Oct 2024 09:35:40 -0700 Subject: [PATCH] [C#] feat: O1 model support (#2111) ## Linked issues closes: #2103 ## Details Added support for `o1-preview` and `o1-mini` models. * Bumped `OpenAI` and `Azure.AI.OpenAI` to `2.1.0-beta.1`. * Tested o1 support with light bot sample with monologue augmentation. * Updated `teamsChefBot-streaming` to use deployed `Microsoft.Teams.AI` nuget package. * Fixed `LayoutSection` bug with incorrect ordering of sections. ## Attestation Checklist - [x] My code follows the style guidelines of this project - I have checked for/fixed spelling, linting, and other errors - I have commented my code for clarity - I have made corresponding changes to the documentation (updating the doc strings in the code is sufficient) - My changes generate no new warnings - I have added tests that validates my changes, and provides sufficient test coverage. I have tested with: - Local testing - E2E testing in Teams - New and existing unit tests pass locally with my changes --- .../AITests/AssistantMessageTests.cs | 2 +- .../AITests/ChatMessageTests.cs | 12 +-- .../Models/ChatCompletionToolCallTests.cs | 2 +- .../Models/ChatMessageExtensionsTests.cs | 10 +-- .../AITests/Models/OpenAIModelTests.cs | 8 +- .../AITests/OpenAIEmbeddingsTests.cs | 4 +- .../Microsoft.Teams.AI.Tests.csproj | 4 +- .../TestUtils/OpenAIModelFactory.cs | 74 ++++------------ .../TestUtils/TestAssistantsOpenAIClient.cs | 14 +-- .../Microsoft.TeamsAI/AI/Clients/LLMClient.cs | 2 +- .../AI/Embeddings/OpenAIEmbeddings.cs | 8 +- .../AI/Models/AssistantsMessage.cs | 11 +-- .../AI/Models/ChatCompletionToolCall.cs | 6 +- .../AI/Models/ChatMessage.cs | 30 +++---- .../AI/Models/MessageContext.cs | 10 ++- .../AI/Models/OpenAIModel.cs | 86 +++++++++++-------- .../AI/Planners/ActionPlanner.cs | 2 +- .../AI/Planners/AssistantsPlanner.cs | 28 +++--- .../AI/Prompts/PromptManager.cs | 2 +- .../AI/Prompts/PromptTemplate.cs | 6 +- .../AI/Prompts/Sections/LayoutSection.cs | 4 +- .../AI/Tokenizers/GPTTokenizer.cs | 4 +- .../Application/TeamsAttachmentDownloader.cs | 4 +- .../Microsoft.Teams.AI.csproj | 8 +- .../TeamsChefBot.csproj | 17 +--- .../06.assistants.a.mathBot/MathBot.csproj | 2 +- .../06.assistants.b.orderBot/OrderBot.csproj | 4 +- .../06.assistants.b.orderBot/Program.cs | 14 +-- getting-started/CONCEPTS/STREAMING.md | 1 + 29 files changed, 172 insertions(+), 207 deletions(-) diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AssistantMessageTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AssistantMessageTests.cs index 9d2cb6595..31725bc8a 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AssistantMessageTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AssistantMessageTests.cs @@ -15,7 +15,7 @@ public void Test_Constructor() { // Arrange MessageContent content = OpenAIModelFactory.CreateMessageContent("message", "fileId"); - Mock fileClientMock = new Mock(); + Mock fileClientMock = new Mock(); fileClientMock.Setup(fileClient => fileClient.DownloadFileAsync("fileId", It.IsAny())).Returns(() => { return Task.FromResult(ClientResult.FromValue(BinaryData.FromString("test"), new Mock().Object)); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs index 4c7b9a0fa..ab37f6541 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs @@ -48,7 +48,7 @@ public void Test_Initialization_From_OpenAISdk_ChatMessage() ""citations"": [ {{ ""title"": ""test-title"", - ""url"": ""test-url"", + ""url"": ""https://www.test-uri.com/"", ""content"": ""test-content"" }} ] @@ -69,7 +69,7 @@ public void Test_Initialization_From_OpenAISdk_ChatMessage() Assert.NotNull(context); Assert.Single(context.Citations); Assert.Equal("test-title", context.Citations[0].Title); - Assert.Equal("test-url", context.Citations[0].Url); + Assert.Equal("https://www.test-uri.com/", context.Citations[0].Url); Assert.Equal("test-content", context.Citations[0].Content); } @@ -179,10 +179,10 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_FunctionCall() // Assert var assistantMessage = result as AssistantChatMessage; Assert.NotNull(assistantMessage); - Assert.Equal("test-content", assistantMessage.Content[0].Text); + Assert.Empty(assistantMessage.Content); // TODO: Uncomment when participant name issue is resolved. //Assert.Equal("test-name", assistantMessage.ParticipantName); - Assert.Equal("test-arg1", assistantMessage.FunctionCall.FunctionArguments); + Assert.Equal("test-arg1", assistantMessage.FunctionCall.FunctionArguments.ToString()); Assert.Equal("test-name", assistantMessage.FunctionCall.FunctionName); } @@ -206,7 +206,7 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_ActionCall() // Assert var assistantMessage = result as AssistantChatMessage; Assert.NotNull(assistantMessage); - Assert.Equal("test-content", assistantMessage.Content[0].Text); + Assert.Empty(assistantMessage.Content); // TODO: Uncomment when participant name issue is resolved. //Assert.Equal("test-name", assistantMessage.ParticipantName); @@ -215,7 +215,7 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_ActionCall() Assert.NotNull(toolCall); Assert.Equal("test-id", toolCall.Id); Assert.Equal("test-tool-name", toolCall.FunctionName); - Assert.Equal("test-tool-arg1", toolCall.FunctionArguments); + Assert.Equal("test-tool-arg1", toolCall.FunctionArguments.ToString()); } [Fact] diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatCompletionToolCallTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatCompletionToolCallTests.cs index 0455a759e..7105c8693 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatCompletionToolCallTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatCompletionToolCallTests.cs @@ -11,7 +11,7 @@ public sealed class ChatCompletionToolCallTests public void Test_ChatCompletionsToolCall_ToFunctionToolCall() { // Arrange - var functionToolCall = ChatToolCall.CreateFunctionToolCall("test-id", "test-name", "test-arg1"); + var functionToolCall = ChatToolCall.CreateFunctionToolCall("test-id", "test-name", BinaryData.FromString("test-arg1")); // Act var azureSdkFunctionToolCall = ChatCompletionsToolCall.FromChatToolCall(functionToolCall); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs index ac7618e2c..a05156a69 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs @@ -86,10 +86,10 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_FunctionCall() // Assert var assistantMessage = result as AssistantChatMessage; Assert.NotNull(assistantMessage); - Assert.Equal("test-content", assistantMessage.Content[0].Text); + Assert.Empty(assistantMessage.Content); // TODO: Uncomment when participant name issue is resolved. //Assert.Equal("test-name", assistantMessage.ParticipantName); - Assert.Equal("test-arg1", assistantMessage.FunctionCall.FunctionArguments); + Assert.Equal("test-arg1", assistantMessage.FunctionCall.FunctionArguments.ToString()); Assert.Equal("test-name", assistantMessage.FunctionCall.FunctionName); } @@ -113,14 +113,14 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_ToolCall() // Assert var assistantMessage = result as AssistantChatMessage; Assert.NotNull(assistantMessage); - Assert.Equal("test-content", assistantMessage.Content[0].Text); + Assert.Empty(assistantMessage.Content); Assert.Single(assistantMessage.ToolCalls); ChatToolCall toolCall = assistantMessage.ToolCalls[0]; Assert.NotNull(toolCall); Assert.Equal("test-id", toolCall.Id); Assert.Equal("test-tool-name", toolCall.FunctionName); - Assert.Equal("test-tool-arg1", toolCall.FunctionArguments); + Assert.Equal("test-tool-arg1", toolCall.FunctionArguments.ToString()); } [Fact] @@ -198,7 +198,7 @@ public void Test_ChatCompletionsToolCall_ToFunctionToolCall() Assert.NotNull(chatToolCall); Assert.Equal("test-id", chatToolCall.Id); Assert.Equal("test-name", chatToolCall.FunctionName); - Assert.Equal("test-arg1", chatToolCall.FunctionArguments); + Assert.Equal("test-arg1", chatToolCall.FunctionArguments.ToString()); } [Fact] diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs index 1e9e1bb47..2b5b98f27 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs @@ -40,7 +40,7 @@ public void Test_Constructor_AzureOpenAI_InvalidAzureApiVersion() var options = new AzureOpenAIModelOptions("test-key", "test-deployment", "https://test.openai.azure.com/"); var versions = new List { - "2024-04-01-preview", "2024-05-01-preview", "2024-06-01" + "2024-06-01", "2024-08-01-preview", "2024-10-01-preview" }; // Act @@ -279,8 +279,8 @@ public async Task Test_CompletePromptAsync_AzureOpenAI_Chat_WithTools() Assert.NotNull(result.Message.ActionCalls); Assert.Single(result.Message.ActionCalls); - Assert.Equal("testAction", result.Message.ActionCalls[0].Function.Name); - + Assert.Equal("testAction", result.Message.ActionCalls[0].Function!.Name); + Assert.Null(result.Error); Assert.Equal(ChatRole.Assistant, result.Message.Role); Assert.Null(result.Message.Content); @@ -326,7 +326,7 @@ public async Task Test_CompletePromptAsync_AzureOpenAI_Streaming() ] }}")); - TestAsyncResultCollection updates = new(update!, Mock.Of()); + TestAsyncCollectionResult updates = new(update!, Mock.Of()); var response = new TestResponse(200, string.Empty); clientMock.Setup((client) => diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/OpenAIEmbeddingsTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/OpenAIEmbeddingsTests.cs index b7d84bc51..46a0a8ddc 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/OpenAIEmbeddingsTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/OpenAIEmbeddingsTests.cs @@ -37,7 +37,7 @@ public async Task Test_OpenAI_CreateEmbeddings_ReturnEmbeddings() IList inputs = new List { "test" }; var clientMock = new Mock(new ApiKeyCredential(apiKey), It.IsAny()); var response = new TestResponse(200, string.Empty); - var embeddingCollection = ModelReaderWriter.Read(BinaryData.FromString(@"{ + var embeddingCollection = ModelReaderWriter.Read(BinaryData.FromString(@"{ ""data"": [ { ""object"": ""embedding"", @@ -76,7 +76,7 @@ public async Task Test_AzureOpenAI_CreateEmbeddings_ReturnEmbeddings() IList inputs = new List { "test" }; var clientMock = new Mock(new ApiKeyCredential(apiKey), It.IsAny()); var response = new TestResponse(200, string.Empty); - var embeddingCollection = ModelReaderWriter.Read(BinaryData.FromString(@"{ + var embeddingCollection = ModelReaderWriter.Read(BinaryData.FromString(@"{ ""data"": [ { ""object"": ""embedding"", diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj index e5e48327f..422955daf 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj @@ -11,13 +11,13 @@ - + - + diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/OpenAIModelFactory.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/OpenAIModelFactory.cs index fdff163f4..644611a5e 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/OpenAIModelFactory.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/OpenAIModelFactory.cs @@ -1,7 +1,7 @@ using OpenAI.Assistants; -using OpenAI.Files; using System.ClientModel; using System.ClientModel.Primitives; +using OAI = OpenAI; namespace Microsoft.Teams.AI.Tests.TestUtils { @@ -89,7 +89,7 @@ public static MessageContent CreateMessageContent(string message, string fileId) return threadMessage.Content[0]; } - public static OpenAIFileInfo CreateOpenAIFileInfo(string fileId) + public static OAI.Files.OpenAIFile CreateOpenAIFileInfo(string fileId) { var json = @$"{{ ""id"": ""{fileId}"", @@ -100,7 +100,7 @@ public static OpenAIFileInfo CreateOpenAIFileInfo(string fileId) ""purpose"": ""assistants"" }}"; - var fileInfo = ModelReaderWriter.Read(BinaryData.FromString(json))!; + var fileInfo = ModelReaderWriter.Read(BinaryData.FromString(json))!; return fileInfo; } @@ -160,82 +160,40 @@ public TestRequiredAction(string toolCallId, string functionName, string functio } } - internal sealed class TestAsyncPageCollection : AsyncPageCollection where T : class + internal sealed class TestAsyncCollectionResult : AsyncCollectionResult where T : class { public List Items; internal PipelineResponse _pipelineResponse; - private IAsyncEnumerator> _enumerator; - public TestAsyncPageCollection(List items, PipelineResponse response) + public TestAsyncCollectionResult(List items, PipelineResponse response) { Items = items; _pipelineResponse = response; - _enumerator = new TestAsyncEnumerator(items, response); } - protected override IAsyncEnumerator> GetAsyncEnumeratorCore(CancellationToken cancellationToken = default) + public TestAsyncCollectionResult(T item, PipelineResponse response) { - return _enumerator; - } - - protected override Task> GetCurrentPageAsyncCore() - { - return Task.FromResult(_enumerator.Current); - } - } - - internal sealed class TestAsyncEnumerator : IAsyncEnumerator> where T : class - { - private readonly List _items; - private readonly PipelineResponse _pipelineResponse; - private bool _movedOnToNext; - - public TestAsyncEnumerator(List items, PipelineResponse response) - { - _items = items; + Items = new() { item }; _pipelineResponse = response; - _movedOnToNext = false; } - public PageResult Current => PageResult.Create(_items, ContinuationToken.FromBytes(BinaryData.FromString("")), null, _pipelineResponse); - - public ValueTask DisposeAsync() + public override ContinuationToken? GetContinuationToken(ClientResult page) { - return new ValueTask(); + return ContinuationToken.FromBytes(BinaryData.FromString("")); } - public ValueTask MoveNextAsync() + public async override IAsyncEnumerable GetRawPagesAsync() { - if (!_movedOnToNext) - { - return new ValueTask(true); - } - else - { - _movedOnToNext = true; - return new ValueTask(false); - } - + yield return await Task.FromResult(ClientResult.FromValue(Items, _pipelineResponse)); } - } - - internal sealed class TestAsyncResultCollection : AsyncCollectionResult where T : class - { - public List Items = new(); - internal PipelineResponse _pipelineResponse; - - public TestAsyncResultCollection(T item, PipelineResponse response) + protected async override IAsyncEnumerable GetValuesFromPageAsync(ClientResult page) { - Items.Add(item); - _pipelineResponse = response; - } + foreach (T item in Items) + { + yield return await Task.FromResult(item); + } -#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - public override async IAsyncEnumerator GetAsyncEnumerator(CancellationToken cancellationToken = default) -#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously - { - yield return FromValue(Items[0], _pipelineResponse); } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/TestAssistantsOpenAIClient.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/TestAssistantsOpenAIClient.cs index 19d53a52e..bf908cc2c 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/TestAssistantsOpenAIClient.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/TestAssistantsOpenAIClient.cs @@ -76,7 +76,7 @@ private ThreadMessage _CreateMessage(string threadId, string message) return newMessage; } - public override AsyncPageCollection GetMessagesAsync(string threadId, MessageCollectionOptions options, CancellationToken cancellationToken = default) + public override AsyncCollectionResult GetMessagesAsync(string threadId, MessageCollectionOptions options, CancellationToken cancellationToken = default) { while (RemainingMessages.Count > 0) { @@ -86,12 +86,12 @@ public override AsyncPageCollection GetMessagesAsync(string threa // Sorted by oldest first List messages = Messages[threadId].ToList(); - if (options != null && options.Order != null && options.Order.Value == ListOrder.NewestFirst) + if (options != null && options.Order != null && options.Order.Value == MessageCollectionOrder.Descending) { messages.Reverse(); } - return new TestAsyncPageCollection(messages, Mock.Of()); + return new TestAsyncCollectionResult(messages, Mock.Of()); } public override Task> CreateRunAsync(string threadId, string assistantId, RunCreationOptions createRunOptions, CancellationToken cancellationToken = default) @@ -152,14 +152,14 @@ public override Task> GetRunAsync(string threadId, strin return runWithUpdatedStatus; } - public override AsyncPageCollection GetRunsAsync(string threadId, RunCollectionOptions? options = null, CancellationToken cancellationToken = default) + public override AsyncCollectionResult GetRunsAsync(string threadId, RunCollectionOptions? options = null, CancellationToken cancellationToken = default) { - AsyncPageCollection response; + AsyncCollectionResult response; // AssistantsPlanner only needs the get the latest. if (Runs[threadId].Count() == 0) { - response = new TestAsyncPageCollection(new List(), Mock.Of()); + response = new TestAsyncCollectionResult(new List(), Mock.Of()); return response; } @@ -167,7 +167,7 @@ public override AsyncPageCollection GetRunsAsync(string threadId, Run ThreadRun run = Runs[threadId][lastIndex]; ThreadRun runWithUpdatedStatus = _GetRun(threadId, run.Id)!; - response = new TestAsyncPageCollection(new List() { runWithUpdatedStatus }, Mock.Of()); + response = new TestAsyncCollectionResult(new List() { runWithUpdatedStatus }, Mock.Of()); return response; } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs index e0097d935..a77e789c2 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs @@ -368,7 +368,7 @@ CancellationToken cancellationToken repairTemplate.Prompt = new(new() { this.Options.Template.Prompt, - new ConversationHistorySection($"{this.Options.HistoryVariable}-repair") + new ConversationHistorySection($"{this.Options.HistoryVariable}-repair", -1) }); if (this.Options.LogRepairs) diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Embeddings/OpenAIEmbeddings.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Embeddings/OpenAIEmbeddings.cs index 63e2b7b93..fb2c3a2ea 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Embeddings/OpenAIEmbeddings.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Embeddings/OpenAIEmbeddings.cs @@ -129,8 +129,8 @@ public async Task CreateEmbeddingsAsync(IList inputs try { DateTime startTime = DateTime.Now; - ClientResult response = await embeddingsClient.GenerateEmbeddingsAsync(inputs); - List> embeddingItems = response.Value.OrderBy(item => item.Index).Select(item => item.Vector).ToList(); + ClientResult response = await embeddingsClient.GenerateEmbeddingsAsync(inputs); + List> embeddingItems = response.Value.OrderBy(item => item.Index).Select(item => item.ToFloats()).ToList(); if (_options.LogRequests!.Value) { @@ -170,9 +170,9 @@ public async Task CreateEmbeddingsAsync(IList inputs { return apiVersion switch { - "2024-04-01-preview" => ServiceVersion.V2024_04_01_Preview, - "2024-05-01-preview" => ServiceVersion.V2024_05_01_Preview, "2024-06-01" => ServiceVersion.V2024_06_01, + "2024-08-01-preview" => ServiceVersion.V2024_08_01_Preview, + "2024-10-01-preview" => ServiceVersion.V2024_10_01_Preview, _ => null, }; } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/AssistantsMessage.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/AssistantsMessage.cs index 13a52336e..0d09229da 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/AssistantsMessage.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/AssistantsMessage.cs @@ -2,6 +2,7 @@ using Microsoft.Bot.Schema; using OpenAI.Assistants; using OpenAI.Files; +using OAI = OpenAI; namespace Microsoft.Teams.AI.AI.Models @@ -26,7 +27,7 @@ public class AssistantsMessage : ChatMessage /// /// The Assistants API thread message. /// The OpenAI File client. - public AssistantsMessage(MessageContent content, FileClient? fileClient = null) : base(ChatRole.Assistant) + public AssistantsMessage(MessageContent content, OpenAIFileClient? fileClient = null) : base(ChatRole.Assistant) { this.MessageContent = content; @@ -39,7 +40,7 @@ public AssistantsMessage(MessageContent content, FileClient? fileClient = null) MessageContext context = new(); List>> fileContentDownloadTasks = new(); - List>> fileInfoDownloadTasks = new(); + List>> fileInfoDownloadTasks = new(); for (int i = 0; i < content.TextAnnotations.Count; i++) { @@ -73,7 +74,7 @@ public AssistantsMessage(MessageContent content, FileClient? fileClient = null) // Create attachments out of these downloaded files // Wait for tasks to complete ClientResult[] downloadedFileContent = fileContentDownloadTasks.Select((task) => task.Result).ToArray(); - ClientResult[] downloadedFileInfo = fileInfoDownloadTasks.Select((task) => task.Result).ToArray(); + ClientResult[] downloadedFileInfo = fileInfoDownloadTasks.Select((task) => task.Result).ToArray(); for (int i = 0; i < downloadedFileContent.Length; i++) { @@ -128,7 +129,7 @@ public class OpenAIFile /// /// Represents an OpenAI File information /// - public OpenAIFileInfo FileInfo; + public OAI.Files.OpenAIFile FileInfo; /// /// Represents the contents of an OpenAI File @@ -173,7 +174,7 @@ public class OpenAIFile /// /// The OpenAI File /// The OpenAI File contents - public OpenAIFile(OpenAIFileInfo fileInfo, BinaryData fileContent) + public OpenAIFile(OAI.Files.OpenAIFile fileInfo, BinaryData fileContent) { FileInfo = fileInfo; FileContent = fileContent; diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatCompletionToolCall.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatCompletionToolCall.cs index d3a484d8c..1532893fc 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatCompletionToolCall.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatCompletionToolCall.cs @@ -44,7 +44,7 @@ internal ChatToolCall ToChatToolCall() if (this.Type == ToolType.Function) { ChatCompletionsFunctionToolCall functionToolCall = (ChatCompletionsFunctionToolCall)this; - return ChatToolCall.CreateFunctionToolCall(functionToolCall.Id, functionToolCall.Name, functionToolCall.Arguments); + return ChatToolCall.CreateFunctionToolCall(functionToolCall.Id, functionToolCall.Name, BinaryData.FromString(functionToolCall.Arguments)); } throw new TeamsAIException($"Invalid tool type: {this.Type}"); @@ -60,7 +60,7 @@ internal static ChatCompletionsToolCall FromChatToolCall(ChatToolCall toolCall) { if (toolCall.Kind == ChatToolCallKind.Function) { - return new ChatCompletionsFunctionToolCall(toolCall.Id, toolCall.FunctionName, toolCall.FunctionArguments); + return new ChatCompletionsFunctionToolCall(toolCall.Id, toolCall.FunctionName, toolCall.FunctionArguments.ToString()); } throw new TeamsAIException($"Invalid ChatCompletionsToolCall type: {toolCall.GetType().Name}"); @@ -70,7 +70,7 @@ internal static ChatCompletionsToolCall FromStreamingChatToolCall(StreamingChatT { if (toolCall.Kind == ChatToolCallKind.Function) { - return new ChatCompletionsFunctionToolCall(toolCall.Id, toolCall.FunctionName, toolCall.FunctionArgumentsUpdate); + return new ChatCompletionsFunctionToolCall(toolCall.ToolCallId, toolCall.FunctionName, toolCall.FunctionArgumentsUpdate.ToString()); } throw new TeamsAIException($"Invalid ChatCompletionsToolCall type: {toolCall.GetType().Name}"); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs index 871ddfc49..1f0cc4d4f 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs @@ -1,6 +1,4 @@ -using System.Diagnostics; -using Azure.AI.OpenAI; -using Azure.AI.OpenAI.Chat; +using Azure.AI.OpenAI.Chat; using Microsoft.Bot.Schema; using Microsoft.Teams.AI.Exceptions; using Microsoft.Teams.AI.Utilities; @@ -113,7 +111,7 @@ internal ChatMessage(ChatCompletion chatCompletion) if (chatCompletion.FunctionCall != null && chatCompletion.FunctionCall.FunctionName != string.Empty) { this.Name = chatCompletion.FunctionCall.FunctionName; - this.FunctionCall = new FunctionCall(chatCompletion.FunctionCall.FunctionName, chatCompletion.FunctionCall.FunctionArguments); + this.FunctionCall = new FunctionCall(chatCompletion.FunctionCall.FunctionName, chatCompletion.FunctionCall.FunctionArguments.ToString()); } if (chatCompletion.ToolCalls != null && chatCompletion.ToolCalls.Count > 0) @@ -127,7 +125,7 @@ internal ChatMessage(ChatCompletion chatCompletion) } #pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. - AzureChatMessageContext? azureContext = chatCompletion.GetAzureMessageContext(); + ChatMessageContext? azureContext = chatCompletion.GetMessageContext(); #pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. if (azureContext != null) { @@ -155,7 +153,7 @@ internal ChatMessage(StreamingChatCompletionUpdate streamingChatCompletionUpdate if (streamingChatCompletionUpdate.FunctionCallUpdate != null && streamingChatCompletionUpdate.FunctionCallUpdate.FunctionName != string.Empty) { this.Name = streamingChatCompletionUpdate.FunctionCallUpdate.FunctionName; - this.FunctionCall = new FunctionCall(streamingChatCompletionUpdate.FunctionCallUpdate.FunctionName, streamingChatCompletionUpdate.FunctionCallUpdate.FunctionArgumentsUpdate); + this.FunctionCall = new FunctionCall(streamingChatCompletionUpdate.FunctionCallUpdate.FunctionName, streamingChatCompletionUpdate.FunctionCallUpdate.FunctionArgumentsUpdate.ToString()); } if (streamingChatCompletionUpdate.ToolCallUpdates != null && streamingChatCompletionUpdate.ToolCallUpdates.Count > 0) @@ -168,7 +166,7 @@ internal ChatMessage(StreamingChatCompletionUpdate streamingChatCompletionUpdate } #pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. - AzureChatMessageContext? azureContext = streamingChatCompletionUpdate.GetAzureMessageContext(); + ChatMessageContext? azureContext = streamingChatCompletionUpdate.GetMessageContext(); #pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. if (azureContext != null) { @@ -204,12 +202,12 @@ internal OAI.Chat.ChatMessage ToOpenAIChatMessage() { if (contentPart is TextContentPart textPart) { - contentItems.Add(ChatMessageContentPart.CreateTextMessageContentPart(textPart.Text)); + contentItems.Add(ChatMessageContentPart.CreateTextPart(textPart.Text)); textContentBuilder.AppendLine(textPart.Text); } else if (contentPart is ImageContentPart imagePart) { - contentItems.Add(ChatMessageContentPart.CreateImageMessageContentPart(new Uri(imagePart.ImageUrl))); + contentItems.Add(ChatMessageContentPart.CreateImagePart(new Uri(imagePart.ImageUrl))); } } } @@ -245,8 +243,8 @@ internal OAI.Chat.ChatMessage ToOpenAIChatMessage() if (this.FunctionCall != null) { - ChatFunctionCall functionCall = new(this.FunctionCall.Name ?? "", this.FunctionCall.Arguments ?? ""); - assistantMessage = new AssistantChatMessage(functionCall, textContent); + ChatFunctionCall functionCall = new(this.FunctionCall.Name ?? "", BinaryData.FromString(this.FunctionCall.Arguments ?? "")); + assistantMessage = new AssistantChatMessage(functionCall); } else if (this.ActionCalls != null) { @@ -255,7 +253,7 @@ internal OAI.Chat.ChatMessage ToOpenAIChatMessage() { toolCalls.Add(actionCall.ToChatToolCall()); } - assistantMessage = new AssistantChatMessage(toolCalls, textContent); + assistantMessage = new AssistantChatMessage(toolCalls); } else { @@ -394,7 +392,7 @@ public ActionCall(ChatToolCall toolCall) } Id = toolCall.Id; - Function = new ActionFunction(toolCall.FunctionName, toolCall.FunctionArguments); + Function = new ActionFunction(toolCall.FunctionName, toolCall.FunctionArguments.ToString()); } /// @@ -409,15 +407,15 @@ public ActionCall(StreamingChatToolCallUpdate toolCall) throw new TeamsAIException($"Invalid ActionCall type: {toolCall.GetType().Name}"); } - Id = toolCall.Id; - Function = new ActionFunction(toolCall.FunctionName, toolCall.FunctionArgumentsUpdate); + Id = toolCall.ToolCallId; + Function = new ActionFunction(toolCall.FunctionName, toolCall.FunctionArgumentsUpdate.ToString()); } internal ChatToolCall ToChatToolCall() { if (this.Type == ActionCallType.Function) { - return ChatToolCall.CreateFunctionToolCall(Id, Function!.Name, Function.Arguments); + return ChatToolCall.CreateFunctionToolCall(Id, Function!.Name, BinaryData.FromString(Function.Arguments)); } throw new TeamsAIException($"Invalid tool type: {this.Type}"); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/MessageContext.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/MessageContext.cs index 10f808f4b..2b7c89534 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/MessageContext.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/MessageContext.cs @@ -27,14 +27,18 @@ public MessageContext() { } /// Creates a MessageContext using OpenAI.Chat.AzureChatMessageContext. /// /// - internal MessageContext(AzureChatMessageContext azureContext) +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + internal MessageContext(ChatMessageContext azureContext) +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. { if (azureContext.Citations != null) { - foreach (AzureChatCitation citation in azureContext.Citations) +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + foreach (ChatCitation citation in azureContext.Citations) { - this.Citations.Add(new Citation(citation.Content, citation.Title, citation.Url)); + this.Citations.Add(new Citation(citation.Content, citation.Title, citation.Uri.ToString())); } +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. } this.Intent = azureContext.Intent; diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs index b3fbad3c0..be7274c9d 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs @@ -18,14 +18,33 @@ using ServiceVersion = Azure.AI.OpenAI.AzureOpenAIClientOptions.ServiceVersion; using Azure.AI.OpenAI.Chat; using OpenAI.Chat; -using Microsoft.Recognizers.Text.NumberWithUnit.Dutch; using Microsoft.Teams.AI.Application; +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. namespace Microsoft.Teams.AI.AI.Models { /// - /// A `PromptCompletionModel` for calling OpenAI and Azure OpenAI hosted models. + /// A `IPromptCompletionModel` for calling OpenAI and Azure OpenAI hosted models. /// + /// + /// The model has been updated to support calling OpenAI's new o1 family of models. That currently + /// comes with a few constraints. These constraints are mostly handled for you but are worth noting: + /// + /// * The o1 models introduce a new `max_completion_tokens` parameter and they've deprecated the + /// `max_tokens` parameter. The model will automatically convert the incoming `max_tokens` parameter + /// to `max_completion_tokens` for you. But you should be aware that o1 has hidden token usage and costs + /// that aren't constrained by the `max_completion_tokens` parameter. This means that you may see an + /// increase in token usage and costs when using the o1 models. + /// + /// * The o1 models do not currently support the sending of system messages which just means that the + /// `useSystemMessages` parameter is ignored when calling the o1 models. + /// + /// * The o1 models do not currently support setting the `temperature`, `top_p`, and `presence_penalty` + /// parameters so they will be ignored. + /// + /// * The o1 models do not currently support the use of tools so you will need to use the "monologue" + /// augmentation to call actions. + /// public class OpenAIModel : IPromptCompletionStreamingModel { private readonly BaseOpenAIModelOptions _options; @@ -161,19 +180,6 @@ public async Task CompletePromptAsync(ITurnContext turnContext, Events.OnBeforeCompletion(beforeCompletionEventArgs); } - // Setup tools if enabled - bool isToolsAugmentation = promptTemplate.Configuration.Augmentation.Type == Augmentations.AugmentationType.Tools; - List tools = new(); - - // If tools is enabled, reformat actions to schema - if (isToolsAugmentation && promptTemplate.Actions.Count > 0) - { - foreach (ChatCompletionAction action in promptTemplate.Actions) - { - tools.Add(action.ToChatTool()); - } - } - // Render prompt RenderedPromptSection> prompt = await promptTemplate.Prompt.RenderAsMessagesAsync(turnContext, memory, promptFunctions, tokenizer, maxInputTokens, cancellationToken); if (prompt.TooLong) @@ -185,7 +191,11 @@ public async Task CompletePromptAsync(ITurnContext turnContext, }; } - if (!_options.UseSystemMessages!.Value && prompt.Output.Count > 0 && prompt.Output[0].Role == ChatRole.System) + // Get the model to use. + string model = promptTemplate.Configuration.Completion.Model ?? _deploymentName; + bool isO1Model = model.StartsWith("o1-"); + bool useSystemMessages = !isO1Model && _options.UseSystemMessages.GetValueOrDefault(false); + if (!useSystemMessages && prompt.Output.Count > 0 && prompt.Output[0].Role == ChatRole.System) { prompt.Output[0].Role = ChatRole.User; } @@ -196,42 +206,48 @@ public async Task CompletePromptAsync(ITurnContext turnContext, _logger.LogTrace(JsonSerializer.Serialize(prompt.Output, _serializerOptions)); } - // Render prompt template + // Map to OpenAI ChatMessage IEnumerable chatMessages = prompt.Output.Select(chatMessage => chatMessage.ToOpenAIChatMessage()); ChatCompletionOptions chatCompletionOptions = new() { - MaxTokens = completion.MaxTokens, + MaxOutputTokenCount = completion.MaxTokens, Temperature = (float)completion.Temperature, TopP = (float)completion.TopP, PresencePenalty = (float)completion.PresencePenalty, FrequencyPenalty = (float)completion.FrequencyPenalty, }; - if (isToolsAugmentation) + if (isO1Model) { - chatCompletionOptions.ToolChoice = completion.GetOpenAIChatToolChoice(); - chatCompletionOptions.ParallelToolCallsEnabled = completion.ParallelToolCalls; + chatCompletionOptions.Temperature = 1; + chatCompletionOptions.TopP = 1; + chatCompletionOptions.PresencePenalty = 0; } - foreach (ChatTool tool in tools) + // Set tools configurations + bool isToolsAugmentation = promptTemplate.Configuration.Augmentation.Type == Augmentations.AugmentationType.Tools; + if (isToolsAugmentation) { - chatCompletionOptions.Tools.Add(tool); - } - + chatCompletionOptions.ToolChoice = completion.GetOpenAIChatToolChoice(); + chatCompletionOptions.AllowParallelToolCalls = completion.ParallelToolCalls; - if (chatCompletionOptions == null) - { - throw new TeamsAIException("Failed to create chat completions options"); + if (promptTemplate.Actions.Count > 0) + { + foreach (ChatCompletionAction action in promptTemplate.Actions) + { + chatCompletionOptions.Tools.Add(action.ToChatTool()); + } + } } + // Add Azure chat extension configurations IDictionary? additionalData = promptTemplate.Configuration.Completion.AdditionalData; if (_useAzure) { AddAzureChatExtensionConfigurations(chatCompletionOptions, additionalData); } - string model = promptTemplate.Configuration.Completion.Model ?? _deploymentName; PipelineResponse? rawResponse = null; ClientResult? chatCompletionsResponse = null; @@ -319,7 +335,6 @@ public async Task CompletePromptAsync(ITurnContext turnContext, if (_options.LogRequests!.Value) { - // TODO: Colorize _logger.LogTrace("RESPONSE:"); _logger.LogTrace($"duration {(DateTime.UtcNow - startTime).TotalMilliseconds} ms"); if (promptResponse.Status == PromptResponseStatus.Success && chatCompletionsResponse != null) @@ -409,9 +424,9 @@ public async Task CompletePromptAsync(ITurnContext turnContext, { return apiVersion switch { - "2024-04-01-preview" => ServiceVersion.V2024_04_01_Preview, - "2024-05-01-preview" => ServiceVersion.V2024_05_01_Preview, "2024-06-01" => ServiceVersion.V2024_06_01, + "2024-08-01-preview" => ServiceVersion.V2024_08_01_Preview, + "2024-10-01-preview" => ServiceVersion.V2024_10_01_Preview, _ => null, }; } @@ -430,12 +445,10 @@ private void AddAzureChatExtensionConfigurations(ChatCompletionOptions options, { try { - AzureChatDataSource? dataSource = ModelReaderWriter.Read(BinaryData.FromObjectAsJson(item)); + ChatDataSource? dataSource = ModelReaderWriter.Read(BinaryData.FromObjectAsJson(item)); if (dataSource != null) { -#pragma warning disable AOAI001 options.AddDataSource(dataSource); -#pragma warning restore AOAI001 } } catch (Exception ex) @@ -446,4 +459,5 @@ private void AddAzureChatExtensionConfigurations(ChatCompletionOptions options, } } } -} \ No newline at end of file +} +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs index f3751f6b4..8fb1432c6 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs @@ -108,7 +108,7 @@ public async Task ContinueTaskAsync(ITurnContext context, TState state, AI if (response.Status != PromptResponseStatus.Success) { - throw new Exception(response.Error?.Message ?? "[Action Planner]: an error has occurred"); + throw new Exception(response.Error?.Message ?? "[Action Planner]: an error has occurred", response.Error); } // Check to see if we have a response diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/AssistantsPlanner.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/AssistantsPlanner.cs index d352c3c26..b2f94fb24 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/AssistantsPlanner.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/AssistantsPlanner.cs @@ -8,7 +8,7 @@ using Microsoft.Teams.AI.Exceptions; using Microsoft.Teams.AI.State; using Microsoft.Teams.AI.Utilities; -using OpenAI; +using OAI = OpenAI; using OpenAI.Assistants; using OpenAI.Files; using System.ClientModel; @@ -32,7 +32,7 @@ public class AssistantsPlanner : IPlanner private readonly AssistantsPlannerOptions _options; private readonly AssistantClient _client; - private readonly FileClient _fileClient; + private readonly OpenAIFileClient _fileClient; // TODO: Write trace logs #pragma warning disable IDE0052 // Remove unread private members @@ -189,14 +189,14 @@ private async Task _BlockOnInProgressRunsAsync(string threadId, CancellationToke // Loop until the last run is completed while (true) { - AsyncPageCollection? runs = _client.GetRunsAsync(threadId, new() { Order = ListOrder.NewestFirst }, cancellationToken); + AsyncCollectionResult? runs = _client.GetRunsAsync(threadId, new() { Order = RunCollectionOrder.Descending }, cancellationToken); if (runs == null) { return; } - ThreadRun? run = runs.GetAllValuesAsync().GetAsyncEnumerator().Current; + ThreadRun? run = runs.GetAsyncEnumerator().Current; if (run == null || _IsRunCompleted(run)) { return; @@ -210,9 +210,9 @@ private async Task _BlockOnInProgressRunsAsync(string threadId, CancellationToke private async Task _GeneratePlanFromMessagesAsync(string threadId, string lastMessageId, CancellationToken cancellationToken) { // Find the new messages - AsyncPageCollection messages = _client.GetMessagesAsync(threadId, new() { Order = ListOrder.NewestFirst }, cancellationToken); + AsyncCollectionResult messages = _client.GetMessagesAsync(threadId, new() { Order = MessageCollectionOrder.Descending }, cancellationToken); List newMessages = new(); - await foreach (ThreadMessage message in messages.GetAllValuesAsync()) + await foreach (ThreadMessage message in messages) { if (string.Equals(message.Id, lastMessageId)) { @@ -380,7 +380,7 @@ internal static AssistantClient _CreateClient(TokenCredential tokenCredential, s return azureOpenAI.GetAssistantClient(); } - internal FileClient _CreateFileClient(string apiKey, string? endpoint = null) + internal OpenAIFileClient _CreateFileClient(string apiKey, string? endpoint = null) { Verify.ParamNotNull(apiKey); @@ -388,22 +388,22 @@ internal FileClient _CreateFileClient(string apiKey, string? endpoint = null) { // Azure OpenAI AzureOpenAIClient azureOpenAI = new(new Uri(endpoint), new ApiKeyCredential(apiKey)); - return azureOpenAI.GetFileClient(); + return azureOpenAI.GetOpenAIFileClient(); } else { // OpenAI - return new FileClient(apiKey); + return new OpenAIFileClient(apiKey); } } - internal FileClient _CreateFileClient(TokenCredential tokenCredential, string endpoint) + internal OpenAIFileClient _CreateFileClient(TokenCredential tokenCredential, string endpoint) { Verify.ParamNotNull(tokenCredential); Verify.ParamNotNull(endpoint); AzureOpenAIClient azureOpenAI = new(new Uri(endpoint), tokenCredential); - return azureOpenAI.GetFileClient(); + return azureOpenAI.GetOpenAIFileClient(); } private async Task _CreateUserThreadMessageAsync(string threadId, TState state, CancellationToken cancellationToken) @@ -417,16 +417,16 @@ private async Task _CreateUserThreadMessageAsync(string threadId, IList? inputFiles = state.Temp?.InputFiles.Where((file) => file.Filename != null && file.Filename != string.Empty).ToList(); if (inputFiles != null && inputFiles.Count > 0) { - List>> fileUploadTasks = new(); + List>> fileUploadTasks = new(); foreach (InputFile file in inputFiles) { fileUploadTasks.Add(_fileClient.UploadFileAsync(file.Content, file.Filename!, FileUploadPurpose.Assistants)); } - ClientResult[] uploadedFiles = await Task.WhenAll(fileUploadTasks); + ClientResult[] uploadedFiles = await Task.WhenAll(fileUploadTasks); for (int i = 0; i < uploadedFiles.Count(); i++) { - OpenAIFileInfo file = uploadedFiles[i]; + OAI.Files.OpenAIFile file = uploadedFiles[i]; if (inputFiles[i].ContentType.StartsWith("image/")) { messages.Add(MessageContent.FromImageFileId(file.Id, MessageImageDetail.Auto)); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs index d7b8a6a8c..e495e4745 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs @@ -128,7 +128,7 @@ public PromptTemplate GetPrompt(string name) template.Prompt.Sections = new List() { // The "1" place holder is to make this a fixed section so it is rendered in the correct order. // TODO: When implementing the new layout engine class refactor this. - new GroupSection(ChatRole.System, template.Prompt.Sections, 1) + new GroupSection(ChatRole.System, template.Prompt.Sections, 1) }; if (template.Configuration.Completion.IncludeHistory) diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs index 8796b651b..7d342db1b 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs @@ -314,9 +314,9 @@ internal OAI.Chat.ChatToolChoice GetOpenAIChatToolChoice() { return ToolChoice switch { - ChatToolChoice.Auto => OAI.Chat.ChatToolChoice.Auto, - ChatToolChoice.Required => OAI.Chat.ChatToolChoice.Required, - ChatToolChoice.None => OAI.Chat.ChatToolChoice.None, + ChatToolChoice.Auto => OAI.Chat.ChatToolChoice.CreateAutoChoice(), + ChatToolChoice.Required => OAI.Chat.ChatToolChoice.CreateRequiredChoice(), + ChatToolChoice.None => OAI.Chat.ChatToolChoice.CreateNoneChoice(), _ => throw new InvalidOperationException($"Unknown ChatToolChoice: {ToolChoice}"), }; } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/LayoutSection.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/LayoutSection.cs index edaeb4603..2e8f43fc4 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/LayoutSection.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/LayoutSection.cs @@ -20,7 +20,7 @@ private List _fixedSections { get { - return this.Sections.Where(s => s.Tokens > -1).OrderBy(s => s.Required).ToList(); + return this.Sections.Where(s => s.Tokens > -1).OrderBy(s => !s.Required).ToList(); } } @@ -28,7 +28,7 @@ private List _autoSections { get { - return this.Sections.Where(s => s.Tokens == -1).OrderBy(s => s.Required).ToList(); + return this.Sections.Where(s => s.Tokens == -1).OrderBy(s => !s.Required).ToList(); } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Tokenizers/GPTTokenizer.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Tokenizers/GPTTokenizer.cs index f24a07993..72b6e52cd 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Tokenizers/GPTTokenizer.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Tokenizers/GPTTokenizer.cs @@ -12,7 +12,7 @@ public class GPTTokenizer : ITokenizer /// /// Creates an instance of `GPTTokenizer` using "gpt-4" model name by default which is using the `cl100k_base` encoding /// - public GPTTokenizer() => _encoding = Tokenizer.CreateTiktokenForModel("gpt-4"); + public GPTTokenizer() => _encoding = TiktokenTokenizer.CreateForModel("gpt-4"); /// /// Creates an instance of `GPTTokenizer` @@ -24,7 +24,7 @@ public class GPTTokenizer : ITokenizer /// Creates an instance of `GPTTokenizer` /// /// model to encode/decode for - public GPTTokenizer(string model) => this._encoding = Tokenizer.CreateTiktokenForModel(model); + public GPTTokenizer(string model) => this._encoding = TiktokenTokenizer.CreateForModel("gpt-4"); /// /// Encode diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs index 887bce6af..9b46b0a07 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs @@ -39,8 +39,8 @@ public TeamsAttachmentDownloader(TeamsAttachmentDownloaderOptions options, HttpC public async Task> DownloadFilesAsync(ITurnContext turnContext, TState turnState, CancellationToken cancellationToken = default) { // Filter out HTML attachments - IEnumerable attachments = turnContext.Activity.Attachments.Where((a) => !a.ContentType.StartsWith("text/html")); - if (!attachments.Any()) + IEnumerable? attachments = turnContext.Activity.Attachments?.Where((a) => !a.ContentType.StartsWith("text/html")); + if (attachments == null || !attachments.Any()) { return new List(); } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj index e0681fff3..bec3a1d80 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj @@ -37,7 +37,7 @@ - + @@ -45,9 +45,9 @@ - - - + + + diff --git a/dotnet/samples/04.ai.g.teamsChefBot-streaming/TeamsChefBot.csproj b/dotnet/samples/04.ai.g.teamsChefBot-streaming/TeamsChefBot.csproj index 447ddbeac..5f8fa3c35 100644 --- a/dotnet/samples/04.ai.g.teamsChefBot-streaming/TeamsChefBot.csproj +++ b/dotnet/samples/04.ai.g.teamsChefBot-streaming/TeamsChefBot.csproj @@ -14,20 +14,9 @@ - - - - - - - - - - - - - - + + + diff --git a/dotnet/samples/06.assistants.a.mathBot/MathBot.csproj b/dotnet/samples/06.assistants.a.mathBot/MathBot.csproj index 38a3405b4..30d0fc8d3 100644 --- a/dotnet/samples/06.assistants.a.mathBot/MathBot.csproj +++ b/dotnet/samples/06.assistants.a.mathBot/MathBot.csproj @@ -16,7 +16,7 @@ - + diff --git a/dotnet/samples/06.assistants.b.orderBot/OrderBot.csproj b/dotnet/samples/06.assistants.b.orderBot/OrderBot.csproj index 5c27d115c..bb37ee383 100644 --- a/dotnet/samples/06.assistants.b.orderBot/OrderBot.csproj +++ b/dotnet/samples/06.assistants.b.orderBot/OrderBot.csproj @@ -15,8 +15,8 @@ - - + + diff --git a/dotnet/samples/06.assistants.b.orderBot/Program.cs b/dotnet/samples/06.assistants.b.orderBot/Program.cs index 7f6c501bc..f13db0669 100644 --- a/dotnet/samples/06.assistants.b.orderBot/Program.cs +++ b/dotnet/samples/06.assistants.b.orderBot/Program.cs @@ -10,7 +10,6 @@ using OpenAI.Assistants; using Azure.Core; using Azure.Identity; -using System.Runtime.CompilerServices; using Microsoft.Teams.AI.Application; using OpenAI.Files; using OpenAI.VectorStores; @@ -86,14 +85,14 @@ // Create Vector Store var storeClient = client.GetVectorStoreClient(); - store = storeClient.CreateVectorStore(new VectorStoreCreationOptions()); + var storeCreationOperation = storeClient.CreateVectorStore(true); // Upload file. - var fileClient = client.GetFileClient(); + var fileClient = client.GetOpenAIFileClient(); var uploadedFile = fileClient.UploadFile("./assets/menu.pdf", FileUploadPurpose.Assistants); // Attach file to vector store - var fileAssociation = storeClient.AddFileToVectorStore(store, uploadedFile); + var fileAssociation = storeClient.AddFileToVectorStore(store.Id, uploadedFile.Value.Id, true); // Poll vector store until file is uploaded var maxPollCount = 5; @@ -113,10 +112,11 @@ } catch (Exception e) { - throw new Exception("Failed to upload file to vector store.", e); + throw new Exception("Failed to upload file to vector store.", e.InnerException); } - + var fileSearchTool = new FileSearchToolResources(); + fileSearchTool.VectorStoreIds.Add(store.Id); AssistantCreationOptions assistantCreationOptions = new() { Name = "Order Bot", @@ -129,7 +129,7 @@ }), ToolResources = new ToolResources() { - FileSearch = new FileSearchToolResources() { VectorStoreIds = new List() { store.Id } } + FileSearch = fileSearchTool } }; diff --git a/getting-started/CONCEPTS/STREAMING.md b/getting-started/CONCEPTS/STREAMING.md index c99ff131e..b96221ec0 100644 --- a/getting-started/CONCEPTS/STREAMING.md +++ b/getting-started/CONCEPTS/STREAMING.md @@ -60,6 +60,7 @@ Once `endStream()` is called, the stream is considered ended and no further upda - The informative message is rendered only at the beginning of each message returned from the LLM. - Attachments can only be sent in the final streamed chunk. - Streaming is not available in conjunction with AI SDK's function calls yet. +- Streaming does not work with OpenAI's `o1` models. ### Setup Instructions: