diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs index 608573e05f6..6bdd924b87a 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs @@ -87,6 +87,11 @@ public async Task CompleteAsync(IList chatMessages, JsonContext.Default.OllamaChatRequest, cancellationToken).ConfigureAwait(false); + if (!httpResponse.IsSuccessStatusCode) + { + await OllamaUtilities.ThrowUnsuccessfulOllamaResponseAsync(httpResponse, cancellationToken).ConfigureAwait(false); + } + var response = (await httpResponse.Content.ReadFromJsonAsync( JsonContext.Default.OllamaChatResponse, cancellationToken).ConfigureAwait(false))!; @@ -117,6 +122,12 @@ public async IAsyncEnumerable CompleteStreamingAs Content = JsonContent.Create(ToOllamaChatRequest(chatMessages, options, stream: true), JsonContext.Default.OllamaChatRequest) }; using var httpResponse = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + + if (!httpResponse.IsSuccessStatusCode) + { + await OllamaUtilities.ThrowUnsuccessfulOllamaResponseAsync(httpResponse, cancellationToken).ConfigureAwait(false); + } + using var httpResponseStream = await httpResponse.Content #if NET .ReadAsStreamAsync(cancellationToken) diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs index 5377b5f7092..ac5bd608dc7 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs @@ -110,6 +110,11 @@ public async Task>> GenerateAsync( JsonContext.Default.OllamaEmbeddingRequest, cancellationToken).ConfigureAwait(false); + if (!httpResponse.IsSuccessStatusCode) + { + await OllamaUtilities.ThrowUnsuccessfulOllamaResponseAsync(httpResponse, cancellationToken).ConfigureAwait(false); + } + var response = (await httpResponse.Content.ReadFromJsonAsync( JsonContext.Default.OllamaEmbeddingResponse, cancellationToken).ConfigureAwait(false))!; diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaUtilities.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaUtilities.cs index d7db10e5a04..ea2625bd50e 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaUtilities.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaUtilities.cs @@ -2,8 +2,12 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; using System.Net.Http; +using System.Text.Json; using System.Threading; +using System.Threading.Tasks; namespace Microsoft.Extensions.AI; @@ -31,4 +35,38 @@ public static void TransferNanosecondsTime(TResponse response, Func(() => chatClient.CompleteAsync("Hello, world!")); + Assert.Contains("inexistent-model", ex.Message); + + ex = await Assert.ThrowsAsync(() => chatClient.CompleteStreamingAsync("Hello, world!").ToChatCompletionAsync()); + Assert.Contains("inexistent-model", ex.Message); + } + private sealed class AssertNoToolsDefinedChatClient(IChatClient innerClient) : DelegatingChatClient(innerClient) { public override Task CompleteAsync( diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorIntegrationTests.cs index 4333cbde636..493c0bf0333 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorIntegrationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorIntegrationTests.cs @@ -2,6 +2,9 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; +using System.Threading.Tasks; +using Microsoft.TestUtilities; +using Xunit; namespace Microsoft.Extensions.AI; @@ -11,4 +14,19 @@ public class OllamaEmbeddingGeneratorIntegrationTests : EmbeddingGeneratorIntegr IntegrationTestHelpers.GetOllamaUri() is Uri endpoint ? new OllamaEmbeddingGenerator(endpoint, "all-minilm") : null; + + [ConditionalFact] + public async Task InvalidModelParameter_ThrowsInvalidOperationException() + { + SkipIfNotEnabled(); + + var endpoint = IntegrationTestHelpers.GetOllamaUri(); + Assert.NotNull(endpoint); + + using var generator = new OllamaEmbeddingGenerator(endpoint, modelId: "inexistent-model"); + + InvalidOperationException ex; + ex = await Assert.ThrowsAsync(() => generator.GenerateAsync(["Hello, world!"])); + Assert.Contains("inexistent-model", ex.Message); + } }