Skip to content

Commit

Permalink
Better OpenAI invoke error handling (agno-agi#2244)
Browse files Browse the repository at this point in the history
  • Loading branch information
dirkbrnd authored Feb 27, 2025
1 parent 7b2ec4a commit d3c321f
Showing 1 changed file with 56 additions and 8 deletions.
64 changes: 56 additions & 8 deletions libs/agno/agno/models/openai/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,14 @@ def invoke(self, messages: List[Message]) -> Union[ChatCompletion, ParsedChatCom
)
except RateLimitError as e:
logger.error(f"Rate limit error from OpenAI API: {e}")
error_message = e.response.json().get("error", {})
error_message = (
error_message.get("message", "Unknown model error")
if isinstance(error_message, dict)
else error_message
)
raise ModelProviderError(
message=e.response.json().get("error", {}).get("message", "Unknown model error"),
message=error_message,
status_code=e.response.status_code,
model_name=self.name,
model_id=self.id,
Expand All @@ -331,8 +337,14 @@ def invoke(self, messages: List[Message]) -> Union[ChatCompletion, ParsedChatCom
raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
except APIStatusError as e:
logger.error(f"API status error from OpenAI API: {e}")
error_message = e.response.json().get("error", {})
error_message = (
error_message.get("message", "Unknown model error")
if isinstance(error_message, dict)
else error_message
)
raise ModelProviderError(
message=e.response.json().get("error", {}).get("message", "Unknown model error"),
message=error_message,
status_code=e.response.status_code,
model_name=self.name,
model_id=self.id,
Expand Down Expand Up @@ -368,8 +380,14 @@ async def ainvoke(self, messages: List[Message]) -> Union[ChatCompletion, Parsed
)
except RateLimitError as e:
logger.error(f"Rate limit error from OpenAI API: {e}")
error_message = e.response.json().get("error", {})
error_message = (
error_message.get("message", "Unknown model error")
if isinstance(error_message, dict)
else error_message
)
raise ModelProviderError(
message=e.response.json().get("error", {}).get("message", "Unknown model error"),
message=error_message,
status_code=e.response.status_code,
model_name=self.name,
model_id=self.id,
Expand All @@ -379,8 +397,14 @@ async def ainvoke(self, messages: List[Message]) -> Union[ChatCompletion, Parsed
raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
except APIStatusError as e:
logger.error(f"API status error from OpenAI API: {e}")
error_message = e.response.json().get("error", {})
error_message = (
error_message.get("message", "Unknown model error")
if isinstance(error_message, dict)
else error_message
)
raise ModelProviderError(
message=e.response.json().get("error", {}).get("message", "Unknown model error"),
message=error_message,
status_code=e.response.status_code,
model_name=self.name,
model_id=self.id,
Expand Down Expand Up @@ -409,8 +433,14 @@ def invoke_stream(self, messages: List[Message]) -> Iterator[ChatCompletionChunk
) # type: ignore
except RateLimitError as e:
logger.error(f"Rate limit error from OpenAI API: {e}")
error_message = e.response.json().get("error", {})
error_message = (
error_message.get("message", "Unknown model error")
if isinstance(error_message, dict)
else error_message
)
raise ModelProviderError(
message=e.response.json().get("error", {}).get("message", "Unknown model error"),
message=error_message,
status_code=e.response.status_code,
model_name=self.name,
model_id=self.id,
Expand All @@ -420,8 +450,14 @@ def invoke_stream(self, messages: List[Message]) -> Iterator[ChatCompletionChunk
raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
except APIStatusError as e:
logger.error(f"API status error from OpenAI API: {e}")
error_message = e.response.json().get("error", {})
error_message = (
error_message.get("message", "Unknown model error")
if isinstance(error_message, dict)
else error_message
)
raise ModelProviderError(
message=e.response.json().get("error", {}).get("message", "Unknown model error"),
message=error_message,
status_code=e.response.status_code,
model_name=self.name,
model_id=self.id,
Expand Down Expand Up @@ -452,8 +488,14 @@ async def ainvoke_stream(self, messages: List[Message]) -> AsyncIterator[ChatCom
yield chunk
except RateLimitError as e:
logger.error(f"Rate limit error from OpenAI API: {e}")
error_message = e.response.json().get("error", {})
error_message = (
error_message.get("message", "Unknown model error")
if isinstance(error_message, dict)
else error_message
)
raise ModelProviderError(
message=e.response.json().get("error", {}).get("message", "Unknown model error"),
message=error_message,
status_code=e.response.status_code,
model_name=self.name,
model_id=self.id,
Expand All @@ -463,8 +505,14 @@ async def ainvoke_stream(self, messages: List[Message]) -> AsyncIterator[ChatCom
raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
except APIStatusError as e:
logger.error(f"API status error from OpenAI API: {e}")
error_message = e.response.json().get("error", {})
error_message = (
error_message.get("message", "Unknown model error")
if isinstance(error_message, dict)
else error_message
)
raise ModelProviderError(
message=e.response.json().get("error", {}).get("message", "Unknown model error"),
message=error_message,
status_code=e.response.status_code,
model_name=self.name,
model_id=self.id,
Expand Down

0 comments on commit d3c321f

Please sign in to comment.