Skip to content

Commit

Permalink
remove redundant code in ai21
Browse files Browse the repository at this point in the history
  • Loading branch information
the-praxs committed Jan 9, 2025
1 parent f9b141f commit 1a0740f
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 154 deletions.
75 changes: 9 additions & 66 deletions agentops/llms/providers/ai21.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ def handle_response(self, response, kwargs, init_timestamp, session: Optional[Se
from ai21.stream.async_stream import AsyncStream
from ai21.models.chat.chat_completion_chunk import ChatCompletionChunk
from ai21.models.chat.chat_completion_response import ChatCompletionResponse
from ai21.models.responses.answer_response import AnswerResponse

llm_event = LLMEvent(init_timestamp=init_timestamp, params=kwargs)
action_event = ActionEvent(init_timestamp=init_timestamp, params=kwargs)
Expand Down Expand Up @@ -108,27 +107,15 @@ async def async_generator():

# Handle object responses
try:
if isinstance(response, ChatCompletionResponse):
llm_event.returns = response
llm_event.agent_id = check_call_stack_for_agent_id()
llm_event.model = kwargs["model"]
llm_event.prompt = [message.model_dump() for message in kwargs["messages"]]
llm_event.prompt_tokens = response.usage.prompt_tokens
llm_event.completion = response.choices[0].message.model_dump()
llm_event.completion_tokens = response.usage.completion_tokens
llm_event.end_timestamp = get_ISO_time()
self._safe_record(session, llm_event)

elif isinstance(response, AnswerResponse):
action_event.returns = response
action_event.agent_id = check_call_stack_for_agent_id()
action_event.action_type = "Contextual Answers"
action_event.logs = [
{"context": kwargs["context"], "question": kwargs["question"]},
response.model_dump() if response.model_dump() else None,
]
action_event.end_timestamp = get_ISO_time()
self._safe_record(session, action_event)
llm_event.returns = response
llm_event.agent_id = check_call_stack_for_agent_id()
llm_event.model = kwargs["model"]
llm_event.prompt = [message.model_dump() for message in kwargs["messages"]]
llm_event.prompt_tokens = response.usage.prompt_tokens
llm_event.completion = response.choices[0].message.model_dump()
llm_event.completion_tokens = response.usage.completion_tokens
llm_event.end_timestamp = get_ISO_time()
self._safe_record(session, llm_event)

Check warning on line 118 in agentops/llms/providers/ai21.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/providers/ai21.py#L110-L118

Added lines #L110 - L118 were not covered by tests

except Exception as e:
self._safe_record(session, ErrorEvent(trigger_event=llm_event, exception=e))
Expand All @@ -145,8 +132,6 @@ async def async_generator():
def override(self):
self._override_completion()
self._override_completion_async()
self._override_answer()
self._override_answer_async()

def _override_completion(self):
from ai21.clients.studio.resources.chat import ChatCompletions
Expand Down Expand Up @@ -184,42 +169,6 @@ async def patched_function(*args, **kwargs):
# Override the original method with the patched one
AsyncChatCompletions.create = patched_function

def _override_answer(self):
from ai21.clients.studio.resources.studio_answer import StudioAnswer

global original_answer
original_answer = StudioAnswer.create

def patched_function(*args, **kwargs):
# Call the original function with its original arguments
init_timestamp = get_ISO_time()

session = kwargs.get("session", None)
if "session" in kwargs.keys():
del kwargs["session"]
result = original_answer(*args, **kwargs)
return self.handle_response(result, kwargs, init_timestamp, session=session)

StudioAnswer.create = patched_function

def _override_answer_async(self):
from ai21.clients.studio.resources.studio_answer import AsyncStudioAnswer

global original_answer_async
original_answer_async = AsyncStudioAnswer.create

async def patched_function(*args, **kwargs):
# Call the original function with its original arguments
init_timestamp = get_ISO_time()

session = kwargs.get("session", None)
if "session" in kwargs.keys():
del kwargs["session"]
result = await original_answer_async(*args, **kwargs)
return self.handle_response(result, kwargs, init_timestamp, session=session)

AsyncStudioAnswer.create = patched_function

def undo_override(self):
if (
self.original_create is not None
Expand All @@ -231,12 +180,6 @@ def undo_override(self):
ChatCompletions,
AsyncChatCompletions,
)
from ai21.clients.studio.resources.studio_answer import (
StudioAnswer,
AsyncStudioAnswer,
)

ChatCompletions.create = self.original_create
AsyncChatCompletions.create = self.original_create_async
StudioAnswer.create = self.original_answer
AsyncStudioAnswer.create = self.original_answer_async
94 changes: 6 additions & 88 deletions examples/ai21_examples/ai21_examples.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -54,7 +54,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
Expand Down Expand Up @@ -82,7 +82,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
Expand Down Expand Up @@ -114,7 +114,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
Expand Down Expand Up @@ -171,7 +171,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
Expand Down Expand Up @@ -226,88 +226,6 @@
"await main()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Task-Specific Models Examples"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Contextual Answers"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The following example demonstrates the answering capability of AI21 without streaming."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"CONTEXT = \"\"\"\n",
"In 2020 and 2021, enormous QE — approximately $4.4 trillion, or 18%, of 2021 gross\n",
"domestic product (GDP) — and enormous fiscal stimulus (which has been and\n",
"always will be inflationary) — approximately $5 trillion, or 21%, of 2021 GDP\n",
"— stabilized markets and allowed companies to raise enormous amounts of\n",
"capital. In addition, this infusion of capital saved many small businesses and\n",
"put more than $2.5 trillion in the hands of consumers and almost $1 trillion into\n",
"state and local coffers. These actions led to a rapid decline in unemployment, \n",
"dropping from 15% to under 4% in 20 months — the magnitude and speed of which were both\n",
"unprecedented. Additionally, the economy grew 7% in 2021 despite the arrival of\n",
"the Delta and Omicron variants and the global supply chain shortages, which were\n",
"largely fueled by the dramatic upswing in consumer spending and the shift in\n",
"that spend from services to goods.\n",
"\"\"\"\n",
"response = client.answer.create(\n",
" context=CONTEXT,\n",
" question=\"Did the economy shrink after the Omicron variant arrived?\",\n",
")\n",
"print(response.answer)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Similarly, we can use streaming to get the answer."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"CONTEXT = \"\"\"\n",
"In the rapidly evolving field of Artificial Intelligence (AI), mathematical \n",
"foundations such as calculus, linear algebra, and statistics play a crucial role. \n",
"For instance, linear algebra is essential for understanding and developing machine \n",
"learning algorithms. It involves the study of vectors, matrices, and tensor operations \n",
"which are critical for performing transformations and optimizations. Additionally, \n",
"concepts from calculus like derivatives and integrals are used to optimize the \n",
"performance of AI models through gradient descent and other optimization techniques. \n",
"Statistics and probability form the backbone for making inferences and predictions, \n",
"enabling AI systems to learn from data and make decisions under uncertainty. \n",
"Understanding these mathematical principles allows for the development of more robust \n",
"and effective AI systems.\n",
"\"\"\"\n",
"response = client.answer.create(\n",
" context=CONTEXT,\n",
" question=\"Why is linear algebra important for machine learning algorithms?\",\n",
" stream=True,\n",
")\n",
"print(response.answer)"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -334,7 +252,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.19"
"version": "3.10.16"
}
},
"nbformat": 4,
Expand Down

0 comments on commit 1a0740f

Please sign in to comment.