You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Describe the bug
Wrong option set when enable_json_response=True
To Reproduce
Steps to reproduce the behavior:
a = await AzureAssistantAgent.create(
kernel=Kernel(),
service_id="agent",
name="a1",
instructions="",
enable_json_response=True,
)
tid = await a.create_thread()
await a.add_chat_message(thread_id=tid,message=ChatMessageContent(role=AuthorRole.USER, content="Return numbers 1 to 10 in json format"))
async for r in a.invoke(thread_id=tid):
ar = r.content
print(ar)
invoke method fails with
File "...\main.py", line 357, in main
async for response in production_optimizer_agent.invoke_stream(thread_id=optimizer_thread_id):
File "....venv\Lib\site-packages\semantic_kernel\agents\open_ai\open_ai_assistant_base.py", line 827, in invoke_stream
async for content in self._invoke_internal_stream(
File "....venv\Lib\site-packages\semantic_kernel\agents\open_ai\open_ai_assistant_base.py", line 905, in _invoke_internal_stream
async with stream as response_stream:
^^^^^^
File "....venv\Lib\site-packages\openai\lib\streaming_assistants.py", line 879, in aenter
self.__stream = await self.__api_request
^^^^^^^^^^^^^^^^^^^^^^^^
File "....venv\Lib\site-packages\openai_base_client.py", line 1839, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "....venv\Lib\site-packages\openai_base_client.py", line 1533, in request
return await self._request(
^^^^^^^^^^^^^^^^^^^^
File "....venv\Lib\site-packages\openai_base_client.py", line 1634, in _request
raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - {'error': {'message': "Invalid value: 'json'. Supported values are: 'auto'.", 'type': 'invalid_request_error', 'param': 'response_format', 'code': 'invalid_value'}}
Expected behavior
A response like below
{
"numbers": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
}
Platform
Language: Python
Changing ....venv\Lib\site-packages\semantic_kernel\agents\open_ai\open_ai_assistant_base.py line 1052 (method _generate_options)
from:
"response_format": "json" if merged_options.get("enable_json_response") else None,
to:
"response_format": {"type": "json_object"} if merged_options.get("enable_json_response") else None,
solves the problem.
The text was updated successfully, but these errors were encountered:
Describe the bug
Wrong option set when enable_json_response=True
To Reproduce
Steps to reproduce the behavior:
a = await AzureAssistantAgent.create(
kernel=Kernel(),
service_id="agent",
name="a1",
instructions="",
enable_json_response=True,
)
tid = await a.create_thread()
await a.add_chat_message(thread_id=tid,message=ChatMessageContent(role=AuthorRole.USER, content="Return numbers 1 to 10 in json format"))
async for r in a.invoke(thread_id=tid):
ar = r.content
print(ar)
invoke method fails with
File "...\main.py", line 357, in main
async for response in production_optimizer_agent.invoke_stream(thread_id=optimizer_thread_id):
File "....venv\Lib\site-packages\semantic_kernel\agents\open_ai\open_ai_assistant_base.py", line 827, in invoke_stream
async for content in self._invoke_internal_stream(
File "....venv\Lib\site-packages\semantic_kernel\agents\open_ai\open_ai_assistant_base.py", line 905, in _invoke_internal_stream
async with stream as response_stream:
^^^^^^
File "....venv\Lib\site-packages\openai\lib\streaming_assistants.py", line 879, in aenter
self.__stream = await self.__api_request
^^^^^^^^^^^^^^^^^^^^^^^^
File "....venv\Lib\site-packages\openai_base_client.py", line 1839, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "....venv\Lib\site-packages\openai_base_client.py", line 1533, in request
return await self._request(
^^^^^^^^^^^^^^^^^^^^
File "....venv\Lib\site-packages\openai_base_client.py", line 1634, in _request
raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - {'error': {'message': "Invalid value: 'json'. Supported values are: 'auto'.", 'type': 'invalid_request_error', 'param': 'response_format', 'code': 'invalid_value'}}
Expected behavior
A response like below
{
"numbers": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
}
Platform
Changing ....venv\Lib\site-packages\semantic_kernel\agents\open_ai\open_ai_assistant_base.py line 1052 (method _generate_options)
from:
"response_format": "json" if merged_options.get("enable_json_response") else None,
to:
"response_format": {"type": "json_object"} if merged_options.get("enable_json_response") else None,
solves the problem.
The text was updated successfully, but these errors were encountered: