From 7f917fa67e483b0447a723bb54de7df5f0500b01 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Wed, 4 Dec 2024 19:20:08 -0800 Subject: [PATCH] chore(weave): capture request id for openai completions --- .../instructor/instructor_test.py | 6 ++-- tests/integrations/openai/openai_test.py | 36 +++++++++---------- weave/integrations/openai/openai_sdk.py | 13 ++++--- 3 files changed, 30 insertions(+), 25 deletions(-) diff --git a/tests/integrations/instructor/instructor_test.py b/tests/integrations/instructor/instructor_test.py index 761d961b9af9..b9667ebb0866 100644 --- a/tests/integrations/instructor/instructor_test.py +++ b/tests/integrations/instructor/instructor_test.py @@ -64,7 +64,7 @@ def test_instructor_openai( assert op_name_from_ref(call.op_name) == "openai.chat.completions.create" output = call.output output_arguments = json.loads( - output.choices[0].message.tool_calls[0].function.arguments + output["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] ) assert "person_name" in output_arguments assert "age" in output_arguments @@ -112,7 +112,7 @@ async def extract_person(text: str) -> Person: assert op_name_from_ref(call.op_name) == "openai.chat.completions.create" output = call.output output_arguments = json.loads( - output.choices[0].message.tool_calls[0].function.arguments + output["choices"][0]["message"]["tool_calls"][0]["function"]["arguments"] ) assert "person_name" in output_arguments assert "age" in output_arguments @@ -166,7 +166,7 @@ def test_instructor_iterable( assert call.started_at < call.ended_at assert op_name_from_ref(call.op_name) == "openai.chat.completions.create" output = call.output - output_arguments = json.loads(output.choices[0].message.content) + output_arguments = json.loads(output["choices"][0]["message"]["content"]) assert "tasks" in output_arguments assert "person_name" in output_arguments["tasks"][0] assert "age" in output_arguments["tasks"][0] diff --git a/tests/integrations/openai/openai_test.py b/tests/integrations/openai/openai_test.py index 331a25800305..3022defc5a1f 100644 --- a/tests/integrations/openai/openai_test.py +++ b/tests/integrations/openai/openai_test.py @@ -38,10 +38,10 @@ def test_openai_quickstart(client: weave.trace.weave_client.WeaveClient) -> None assert call.started_at < call.ended_at # type: ignore output = call.output - assert output.model == "gpt-4o-2024-05-13" - assert output.object == "chat.completion" + assert output["model"] == "gpt-4o-2024-05-13" + assert output["object"] == "chat.completion" - usage = call.summary["usage"][output.model] # type: ignore + usage = call.summary["usage"][output["model"]] # type: ignore assert usage["requests"] == 1 assert usage["completion_tokens"] == 28 assert usage["prompt_tokens"] == 11 @@ -86,10 +86,10 @@ async def test_openai_async_quickstart( assert call.started_at < call.ended_at # type: ignore output = call.output - assert output.model == "gpt-4o-2024-05-13" - assert output.object == "chat.completion" + assert output["model"] == "gpt-4o-2024-05-13" + assert output["object"] == "chat.completion" - usage = call.summary["usage"][output.model] # type: ignore + usage = call.summary["usage"][output["model"]] # type: ignore assert usage["requests"] == 1 assert usage["completion_tokens"] == 28 assert usage["prompt_tokens"] == 11 @@ -315,10 +315,10 @@ def test_openai_function_call(client: weave.trace.weave_client.WeaveClient) -> N assert call.started_at < call.ended_at # type: ignore output = call.output - assert output.model == "gpt-4o-2024-05-13" - assert output.object == "chat.completion" + assert output["model"] == "gpt-4o-2024-05-13" + assert output["object"] == "chat.completion" - usage = call.summary["usage"][output.model] # type: ignore + usage = call.summary["usage"][output["model"]] # type: ignore assert usage["total_tokens"] == 117 assert usage["completion_tokens"] == 18 assert usage["prompt_tokens"] == 99 @@ -401,10 +401,10 @@ async def test_openai_function_call_async( assert call.started_at < call.ended_at # type: ignore output = call.output - assert output.model == "gpt-4o-2024-05-13" - assert output.object == "chat.completion" + assert output["model"] == "gpt-4o-2024-05-13" + assert output["object"] == "chat.completion" - usage = call.summary["usage"][output.model] # type: ignore + usage = call.summary["usage"][output["model"]] # type: ignore assert usage["total_tokens"] == 117 assert usage["completion_tokens"] == 18 assert usage["prompt_tokens"] == 99 @@ -577,10 +577,10 @@ def test_openai_tool_call(client: weave.trace.weave_client.WeaveClient) -> None: assert call.started_at < call.ended_at # type: ignore output = call.output - assert output.model == "gpt-4o-2024-05-13" - assert output.object == "chat.completion" + assert output["model"] == "gpt-4o-2024-05-13" + assert output["object"] == "chat.completion" - usage = call.summary["usage"][output.model] # type: ignore + usage = call.summary["usage"][output["model"]] # type: ignore assert usage["total_tokens"] == 117 assert usage["completion_tokens"] == 27 assert usage["prompt_tokens"] == 90 @@ -664,10 +664,10 @@ async def test_openai_tool_call_async( assert call.started_at < call.ended_at # type: ignore output = call.output - assert output.model == "gpt-4o-2024-05-13" - assert output.object == "chat.completion" + assert output["model"] == "gpt-4o-2024-05-13" + assert output["object"] == "chat.completion" - usage = call.summary["usage"][output.model] # type: ignore + usage = call.summary["usage"][output["model"]] # type: ignore assert usage["total_tokens"] == 117 assert usage["completion_tokens"] == 27 assert usage["prompt_tokens"] == 90 diff --git a/weave/integrations/openai/openai_sdk.py b/weave/integrations/openai/openai_sdk.py index e68661875f39..7814700d4d3a 100644 --- a/weave/integrations/openai/openai_sdk.py +++ b/weave/integrations/openai/openai_sdk.py @@ -94,8 +94,9 @@ def _get_tool_calls( ) return _tool_calls + dump = None if isinstance(value, ChatCompletionChunk): - final_value = ChatCompletion( + dump = ChatCompletion( id=value.id, choices=[ { @@ -116,10 +117,14 @@ def _get_tool_calls( object="chat.completion", system_fingerprint=value.system_fingerprint, usage=value.usage if hasattr(value, "usage") else None, - ) - return final_value.model_dump(exclude_unset=True, exclude_none=True) - else: + ).model_dump(exclude_unset=True, exclude_none=True) + elif not hasattr(value, "model_dump"): return value + else: + dump = value.model_dump(exclude_unset=True, exclude_none=True) + if hasattr(value, "_request_id"): + dump["request_id"] = value._request_id + return dump def openai_accumulator(