From 87ea6928ab57835fdf84c46bf416510b47b8c0de Mon Sep 17 00:00:00 2001 From: Mark Waddle Date: Tue, 26 Nov 2024 21:12:53 +0000 Subject: [PATCH] Fixes service_type discriminators To use Literal types --- .../assistant/chat.py | 6 +++--- .../assistant/config.py | 16 +++++++--------- .../python/openai-client/openai_client/config.py | 4 ++-- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/examples/python/python-03-multimodel-chatbot/assistant/chat.py b/examples/python/python-03-multimodel-chatbot/assistant/chat.py index 66c42586..96653925 100644 --- a/examples/python/python-03-multimodel-chatbot/assistant/chat.py +++ b/examples/python/python-03-multimodel-chatbot/assistant/chat.py @@ -245,7 +245,7 @@ async def respond_to_conversation( if result.error: logger.exception( - f"exception occurred calling {config.service_config.service_type} chat completion: {result.error}" + f"exception occurred calling {config.service_config.llm_service_type} chat completion: {result.error}" ) # set the message type based on the content @@ -281,9 +281,9 @@ async def respond_to_conversation( response_content = content if not response_content and "error" in metadata: - response_content = f"[error from {config.service_config.service_type}: {metadata['error']}]" + response_content = f"[error from {config.service_config.llm_service_type}: {metadata['error']}]" if not response_content: - response_content = f"[no response from {config.service_config.service_type}]" + response_content = f"[no response from {config.service_config.llm_service_type}]" # send the response to the conversation await context.send_messages( diff --git a/examples/python/python-03-multimodel-chatbot/assistant/config.py b/examples/python/python-03-multimodel-chatbot/assistant/config.py index bb0836b6..5f043278 100644 --- a/examples/python/python-03-multimodel-chatbot/assistant/config.py +++ b/examples/python/python-03-multimodel-chatbot/assistant/config.py @@ -1,7 +1,7 @@ import pathlib from abc import ABC, abstractmethod from enum import StrEnum -from typing import Annotated, Any +from typing import Annotated, Any, Literal import google.generativeai as genai import openai @@ -48,12 +48,10 @@ class ServiceType(StrEnum): class ServiceConfig(ABC, BaseModel): - llm_service_type: Annotated[ServiceType, UISchema(widget="hidden")] - @property def service_type_display_name(self) -> str: # get from the class title - return self.model_config.get("title") or self.llm_service_type + return self.model_config.get("title") or self.__class__.__name__ @abstractmethod def new_client(self, **kwargs) -> Any: @@ -76,7 +74,7 @@ class AzureOpenAIServiceConfig(ServiceConfig, openai_client.AzureOpenAIServiceCo }, ) - llm_service_type: Annotated[ServiceType, UISchema(widget="hidden")] = ServiceType.AzureOpenAI + llm_service_type: Annotated[Literal[ServiceType.AzureOpenAI], UISchema(widget="hidden")] = ServiceType.AzureOpenAI openai_model: Annotated[ str, @@ -103,7 +101,7 @@ class OpenAIServiceConfig(ServiceConfig, openai_client.OpenAIServiceConfig): }, ) - llm_service_type: Annotated[ServiceType, UISchema(widget="hidden")] = ServiceType.OpenAI + llm_service_type: Annotated[Literal[ServiceType.OpenAI], UISchema(widget="hidden")] = ServiceType.OpenAI openai_model: Annotated[ str, @@ -129,7 +127,7 @@ class AnthropicServiceConfig(ServiceConfig): }, ) - service_type: Annotated[ServiceType, UISchema(widget="hidden")] = ServiceType.Anthropic + llm_service_type: Annotated[Literal[ServiceType.Anthropic], UISchema(widget="hidden")] = ServiceType.Anthropic anthropic_api_key: Annotated[ # ConfigSecretStr is a custom type that should be used for any secrets. @@ -165,7 +163,7 @@ class GeminiServiceConfig(ServiceConfig): }, ) - service_type: Annotated[ServiceType, UISchema(widget="hidden")] = ServiceType.Gemini + llm_service_type: Annotated[Literal[ServiceType.Gemini], UISchema(widget="hidden")] = ServiceType.Gemini gemini_api_key: Annotated[ # ConfigSecretStr is a custom type that should be used for any secrets. @@ -202,7 +200,7 @@ class OllamaServiceConfig(ServiceConfig): }, ) - service_type: Annotated[ServiceType, UISchema(widget="hidden")] = ServiceType.Ollama + llm_service_type: Annotated[Literal[ServiceType.Ollama], UISchema(widget="hidden")] = ServiceType.Ollama ollama_endpoint: Annotated[ str, diff --git a/libraries/python/openai-client/openai_client/config.py b/libraries/python/openai-client/openai_client/config.py index c3d073b0..bb183ee4 100644 --- a/libraries/python/openai-client/openai_client/config.py +++ b/libraries/python/openai-client/openai_client/config.py @@ -52,7 +52,7 @@ class AzureOpenAIServiceConfig(BaseModel): }, ) - service_type: Annotated[ServiceType, UISchema(widget="hidden")] = ServiceType.AzureOpenAI + service_type: Annotated[Literal[ServiceType.AzureOpenAI], UISchema(widget="hidden")] = ServiceType.AzureOpenAI auth_config: Annotated[ AzureOpenAIAzureIdentityAuthConfig | AzureOpenAIApiKeyAuthConfig, @@ -89,7 +89,7 @@ class AzureOpenAIServiceConfig(BaseModel): class OpenAIServiceConfig(BaseModel): model_config = ConfigDict(title="OpenAI", json_schema_extra={"required": ["openai_api_key"]}) - service_type: Annotated[ServiceType, UISchema(widget="hidden")] = ServiceType.OpenAI + service_type: Annotated[Literal[ServiceType.OpenAI], UISchema(widget="hidden")] = ServiceType.OpenAI openai_api_key: Annotated[ # ConfigSecretStr is a custom type that should be used for any secrets.