diff --git a/python/packages/core/agent_framework/_types.py b/python/packages/core/agent_framework/_types.py index a4e3a57330..476c013176 100644 --- a/python/packages/core/agent_framework/_types.py +++ b/python/packages/core/agent_framework/_types.py @@ -70,6 +70,21 @@ def _parse_content_list(contents_data: Sequence[Any]) -> list[Content]: return contents +def normalize_function_call_arguments( + arguments: str | Mapping[str, Any] | None, +) -> str | dict[str, Any] | None: + """Normalize provider tool-call arguments to a mapping when possible.""" + if not isinstance(arguments, str): + return dict(arguments) if isinstance(arguments, Mapping) else arguments + + try: + loaded = json.loads(arguments) + except (json.JSONDecodeError, TypeError): + return arguments + + return loaded if isinstance(loaded, dict) else arguments + + # region Internal Helper functions for unified Content diff --git a/python/packages/core/agent_framework/openai/_chat_client.py b/python/packages/core/agent_framework/openai/_chat_client.py index 6df57fe428..68f70b455d 100644 --- a/python/packages/core/agent_framework/openai/_chat_client.py +++ b/python/packages/core/agent_framework/openai/_chat_client.py @@ -49,6 +49,7 @@ Message, ResponseStream, UsageDetails, + normalize_function_call_arguments, ) from ..exceptions import ( ChatClientException, @@ -556,7 +557,7 @@ def _parse_tool_calls_from_openai(self, choice: Choice | ChunkChoice) -> list[Co fcc = Content.from_function_call( call_id=tool.id if tool.id else "", name=tool.function.name if tool.function.name else "", - arguments=tool.function.arguments if tool.function.arguments else "", + arguments=normalize_function_call_arguments(tool.function.arguments if tool.function.arguments else ""), raw_representation=tool.function, ) resp.append(fcc) diff --git a/python/packages/core/agent_framework/openai/_responses_client.py b/python/packages/core/agent_framework/openai/_responses_client.py index 0769c3f1f9..46b48c9832 100644 --- a/python/packages/core/agent_framework/openai/_responses_client.py +++ b/python/packages/core/agent_framework/openai/_responses_client.py @@ -73,6 +73,7 @@ TextSpanRegion, UsageDetails, detect_media_type_from_base64, + normalize_function_call_arguments, prepend_instructions_to_messages, validate_tool_mode, ) @@ -1523,7 +1524,7 @@ def _parse_response_from_openai( Content.from_function_call( call_id=item.call_id, name=item.name, - arguments=item.arguments, + arguments=normalize_function_call_arguments(item.arguments), additional_properties={"fc_id": item.id, "status": item.status}, raw_representation=item, ) @@ -1535,7 +1536,7 @@ def _parse_response_from_openai( function_call=Content.from_function_call( call_id=item.id, name=item.name, - arguments=item.arguments, + arguments=normalize_function_call_arguments(item.arguments), additional_properties={"server_label": item.server_label}, raw_representation=item, ), @@ -1915,7 +1916,7 @@ def _parse_chunk_from_openai( function_call=Content.from_function_call( call_id=event_item.id, name=event_item.name, - arguments=event_item.arguments, + arguments=normalize_function_call_arguments(event_item.arguments), additional_properties={"server_label": event_item.server_label}, raw_representation=event_item, ), diff --git a/python/packages/core/tests/openai/test_openai_chat_client.py b/python/packages/core/tests/openai/test_openai_chat_client.py index 3dc4c23c6d..4248e37fba 100644 --- a/python/packages/core/tests/openai/test_openai_chat_client.py +++ b/python/packages/core/tests/openai/test_openai_chat_client.py @@ -862,6 +862,32 @@ def test_function_approval_content_is_skipped_in_preparation( assert prepared_mixed[0]["content"] == "I need approval for this action." +def test_parse_tool_calls_from_openai_normalizes_json_object_arguments( + openai_unit_test_env: dict[str, str], +) -> None: + client = OpenAIChatClient() + + mock_tool_function = MagicMock() + mock_tool_function.name = "get_weather" + mock_tool_function.arguments = '{"city": "Seattle"}' + + mock_tool = MagicMock() + mock_tool.id = "call_123" + mock_tool.function = mock_tool_function + + mock_message = MagicMock() + mock_message.tool_calls = [mock_tool] + + mock_choice = MagicMock() + mock_choice.delta = mock_message + + contents = client._parse_tool_calls_from_openai(mock_choice) + + assert len(contents) == 1 + assert contents[0].type == "function_call" + assert contents[0].arguments == {"city": "Seattle"} + + def test_usage_content_in_streaming_response( openai_unit_test_env: dict[str, str], ) -> None: diff --git a/python/packages/core/tests/openai/test_openai_responses_client.py b/python/packages/core/tests/openai/test_openai_responses_client.py index 6a2c9f5173..49f2667bd4 100644 --- a/python/packages/core/tests/openai/test_openai_responses_client.py +++ b/python/packages/core/tests/openai/test_openai_responses_client.py @@ -1037,7 +1037,7 @@ def test_response_content_creation_with_function_call() -> None: function_call = response.messages[0].contents[0] assert function_call.call_id == "call_123" assert function_call.name == "get_weather" - assert function_call.arguments == '{"location": "Seattle"}' + assert function_call.arguments == {"location": "Seattle"} def test_prepare_content_for_opentool_approval_response() -> None: @@ -3581,7 +3581,7 @@ def test_parse_response_from_openai_function_call_includes_status() -> None: assert function_call.type == "function_call" assert function_call.call_id == "call_123" assert function_call.name == "get_weather" - assert function_call.arguments == '{"location": "Seattle"}' + assert function_call.arguments == {"location": "Seattle"} # Verify status is included in additional_properties assert function_call.additional_properties is not None assert function_call.additional_properties.get("status") == "completed"