File tree Expand file tree Collapse file tree 3 files changed +10
-5
lines changed Expand file tree Collapse file tree 3 files changed +10
-5
lines changed Original file line number Diff line number Diff line change 5
5
from typing import Any , Literal
6
6
7
7
from openai ._types import Body , Headers , Query
8
+ from openai .types .responses import ResponseIncludable
8
9
from openai .types .shared import Reasoning
9
10
from pydantic import BaseModel
10
11
@@ -61,6 +62,10 @@ class ModelSettings:
61
62
"""Whether to include usage chunk.
62
63
Defaults to True if not provided."""
63
64
65
+ response_include : list [ResponseIncludable ] | None = None
66
+ """Additional output data to include in the model response.
67
+ [include parameter](https://platform.openai.com/docs/api-reference/responses/create#responses-create-include)"""
68
+
64
69
extra_query : Query | None = None
65
70
"""Additional query fields to provide with the request.
66
71
Defaults to None if not provided."""
Original file line number Diff line number Diff line change @@ -240,10 +240,9 @@ async def _fetch_response(
240
240
converted_tools = Converter .convert_tools (tools , handoffs )
241
241
response_format = Converter .get_response_format (output_schema )
242
242
243
- # When store is set to False, we need to include the reasoning content for the preceeding
244
- # response.
245
- if not model_settings .store :
246
- converted_tools .includes .append ("reasoning.encrypted_content" )
243
+ include : list [ResponseIncludable ] = converted_tools .includes
244
+ if model_settings .response_include is not None :
245
+ include = list ({* include , * model_settings .response_include })
247
246
248
247
if _debug .DONT_LOG_MODEL_DATA :
249
248
logger .debug ("Calling LLM" )
@@ -263,7 +262,7 @@ async def _fetch_response(
263
262
instructions = self ._non_null_or_not_given (system_instructions ),
264
263
model = self .model ,
265
264
input = list_input ,
266
- include = converted_tools . includes ,
265
+ include = include ,
267
266
tools = converted_tools .tools ,
268
267
prompt = self ._non_null_or_not_given (prompt ),
269
268
temperature = self ._non_null_or_not_given (model_settings .temperature ),
Original file line number Diff line number Diff line change @@ -44,6 +44,7 @@ def test_all_fields_serialization() -> None:
44
44
metadata = {"foo" : "bar" },
45
45
store = False ,
46
46
include_usage = False ,
47
+ response_include = ["reasoning.encrypted_content" ],
47
48
extra_query = {"foo" : "bar" },
48
49
extra_body = {"foo" : "bar" },
49
50
extra_headers = {"foo" : "bar" },
You can’t perform that action at this time.
0 commit comments