Skip to content

Commit d50550b

Browse files
RobertCraigiestainless-app[bot]
authored andcommitted
fix(assistants): add parallel_tool_calls param to runs.stream
1 parent 98d8b2a commit d50550b

File tree

2 files changed

+19
-0
lines changed

2 files changed

+19
-0
lines changed

src/openai/resources/beta/threads/runs/runs.py

+8
Original file line numberDiff line numberDiff line change
@@ -950,6 +950,7 @@ def stream(
950950
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
951951
metadata: Optional[object] | NotGiven = NOT_GIVEN,
952952
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
953+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
953954
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
954955
temperature: Optional[float] | NotGiven = NOT_GIVEN,
955956
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -979,6 +980,7 @@ def stream(
979980
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
980981
metadata: Optional[object] | NotGiven = NOT_GIVEN,
981982
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
983+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
982984
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
983985
temperature: Optional[float] | NotGiven = NOT_GIVEN,
984986
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -1008,6 +1010,7 @@ def stream(
10081010
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
10091011
metadata: Optional[object] | NotGiven = NOT_GIVEN,
10101012
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
1013+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
10111014
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
10121015
temperature: Optional[float] | NotGiven = NOT_GIVEN,
10131016
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -1051,6 +1054,7 @@ def stream(
10511054
"tool_choice": tool_choice,
10521055
"stream": True,
10531056
"tools": tools,
1057+
"parallel_tool_calls": parallel_tool_calls,
10541058
"truncation_strategy": truncation_strategy,
10551059
"top_p": top_p,
10561060
},
@@ -2246,6 +2250,7 @@ def stream(
22462250
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
22472251
metadata: Optional[object] | NotGiven = NOT_GIVEN,
22482252
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
2253+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
22492254
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
22502255
temperature: Optional[float] | NotGiven = NOT_GIVEN,
22512256
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -2275,6 +2280,7 @@ def stream(
22752280
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
22762281
metadata: Optional[object] | NotGiven = NOT_GIVEN,
22772282
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
2283+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
22782284
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
22792285
temperature: Optional[float] | NotGiven = NOT_GIVEN,
22802286
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -2304,6 +2310,7 @@ def stream(
23042310
max_prompt_tokens: Optional[int] | NotGiven = NOT_GIVEN,
23052311
metadata: Optional[object] | NotGiven = NOT_GIVEN,
23062312
model: Union[str, ChatModel, None] | NotGiven = NOT_GIVEN,
2313+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
23072314
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
23082315
temperature: Optional[float] | NotGiven = NOT_GIVEN,
23092316
tool_choice: Optional[AssistantToolChoiceOptionParam] | NotGiven = NOT_GIVEN,
@@ -2349,6 +2356,7 @@ def stream(
23492356
"tool_choice": tool_choice,
23502357
"stream": True,
23512358
"tools": tools,
2359+
"parallel_tool_calls": parallel_tool_calls,
23522360
"truncation_strategy": truncation_strategy,
23532361
"top_p": top_p,
23542362
},

tests/lib/test_assistants.py

+11
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,17 @@ def test_create_and_run_stream_method_definition_in_sync(sync: bool, client: Ope
2828
)
2929

3030

31+
@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
32+
def test_run_stream_method_definition_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
33+
checking_client: OpenAI | AsyncOpenAI = client if sync else async_client
34+
35+
assert_signatures_in_sync(
36+
checking_client.beta.threads.runs.create,
37+
checking_client.beta.threads.runs.stream,
38+
exclude_params={"stream"},
39+
)
40+
41+
3142
@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
3243
def test_create_and_poll_method_definition_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
3344
checking_client: OpenAI | AsyncOpenAI = client if sync else async_client

0 commit comments

Comments
 (0)