3131 parse_response ,
3232 type_to_text_format_param as _type_to_text_format_param ,
3333)
34- from ...types .shared .chat_model import ChatModel
3534from ...types .responses .response import Response
3635from ...types .responses .tool_param import ToolParam , ParseableToolParam
3736from ...types .shared_params .metadata import Metadata
@@ -881,22 +880,29 @@ def stream(
881880 self ,
882881 * ,
883882 input : Union [str , ResponseInputParam ],
884- model : Union [ str , ChatModel ] ,
883+ model : ResponsesModel ,
885884 background : Optional [bool ] | NotGiven = NOT_GIVEN ,
886885 text_format : type [TextFormatT ] | NotGiven = NOT_GIVEN ,
887886 tools : Iterable [ParseableToolParam ] | NotGiven = NOT_GIVEN ,
887+ conversation : Optional [response_create_params .Conversation ] | NotGiven = NOT_GIVEN ,
888888 include : Optional [List [ResponseIncludable ]] | NotGiven = NOT_GIVEN ,
889889 instructions : Optional [str ] | NotGiven = NOT_GIVEN ,
890890 max_output_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
891+ max_tool_calls : Optional [int ] | NotGiven = NOT_GIVEN ,
891892 metadata : Optional [Metadata ] | NotGiven = NOT_GIVEN ,
892893 parallel_tool_calls : Optional [bool ] | NotGiven = NOT_GIVEN ,
893894 previous_response_id : Optional [str ] | NotGiven = NOT_GIVEN ,
895+ prompt : Optional [ResponsePromptParam ] | NotGiven = NOT_GIVEN ,
896+ prompt_cache_key : str | NotGiven = NOT_GIVEN ,
894897 reasoning : Optional [Reasoning ] | NotGiven = NOT_GIVEN ,
898+ safety_identifier : str | NotGiven = NOT_GIVEN ,
899+ service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
895900 store : Optional [bool ] | NotGiven = NOT_GIVEN ,
896901 stream_options : Optional [response_create_params .StreamOptions ] | NotGiven = NOT_GIVEN ,
897902 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
898- text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
903+ text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
899904 tool_choice : response_create_params .ToolChoice | NotGiven = NOT_GIVEN ,
905+ top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
900906 top_p : Optional [float ] | NotGiven = NOT_GIVEN ,
901907 truncation : Optional [Literal ["auto" , "disabled" ]] | NotGiven = NOT_GIVEN ,
902908 user : str | NotGiven = NOT_GIVEN ,
@@ -913,22 +919,29 @@ def stream(
913919 * ,
914920 response_id : str | NotGiven = NOT_GIVEN ,
915921 input : Union [str , ResponseInputParam ] | NotGiven = NOT_GIVEN ,
916- model : Union [ str , ChatModel ] | NotGiven = NOT_GIVEN ,
922+ model : ResponsesModel | NotGiven = NOT_GIVEN ,
917923 background : Optional [bool ] | NotGiven = NOT_GIVEN ,
918924 text_format : type [TextFormatT ] | NotGiven = NOT_GIVEN ,
919925 tools : Iterable [ParseableToolParam ] | NotGiven = NOT_GIVEN ,
926+ conversation : Optional [response_create_params .Conversation ] | NotGiven = NOT_GIVEN ,
920927 include : Optional [List [ResponseIncludable ]] | NotGiven = NOT_GIVEN ,
921928 instructions : Optional [str ] | NotGiven = NOT_GIVEN ,
922929 max_output_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
930+ max_tool_calls : Optional [int ] | NotGiven = NOT_GIVEN ,
923931 metadata : Optional [Metadata ] | NotGiven = NOT_GIVEN ,
924932 parallel_tool_calls : Optional [bool ] | NotGiven = NOT_GIVEN ,
925933 previous_response_id : Optional [str ] | NotGiven = NOT_GIVEN ,
934+ prompt : Optional [ResponsePromptParam ] | NotGiven = NOT_GIVEN ,
935+ prompt_cache_key : str | NotGiven = NOT_GIVEN ,
926936 reasoning : Optional [Reasoning ] | NotGiven = NOT_GIVEN ,
937+ safety_identifier : str | NotGiven = NOT_GIVEN ,
938+ service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
927939 store : Optional [bool ] | NotGiven = NOT_GIVEN ,
928940 stream_options : Optional [response_create_params .StreamOptions ] | NotGiven = NOT_GIVEN ,
929941 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
930942 text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
931943 tool_choice : response_create_params .ToolChoice | NotGiven = NOT_GIVEN ,
944+ top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
932945 top_p : Optional [float ] | NotGiven = NOT_GIVEN ,
933946 truncation : Optional [Literal ["auto" , "disabled" ]] | NotGiven = NOT_GIVEN ,
934947 user : str | NotGiven = NOT_GIVEN ,
@@ -943,18 +956,25 @@ def stream(
943956 new_response_args = {
944957 "input" : input ,
945958 "model" : model ,
959+ "conversation" : conversation ,
946960 "include" : include ,
947961 "instructions" : instructions ,
948962 "max_output_tokens" : max_output_tokens ,
963+ "max_tool_calls" : max_tool_calls ,
949964 "metadata" : metadata ,
950965 "parallel_tool_calls" : parallel_tool_calls ,
951966 "previous_response_id" : previous_response_id ,
967+ "prompt" : prompt ,
968+ "prompt_cache_key" : prompt_cache_key ,
952969 "reasoning" : reasoning ,
970+ "safety_identifier" : safety_identifier ,
971+ "service_tier" : service_tier ,
953972 "store" : store ,
954973 "stream_options" : stream_options ,
955974 "temperature" : temperature ,
956975 "text" : text ,
957976 "tool_choice" : tool_choice ,
977+ "top_logprobs" : top_logprobs ,
958978 "top_p" : top_p ,
959979 "truncation" : truncation ,
960980 "user" : user ,
@@ -989,19 +1009,26 @@ def stream(
9891009 input = input ,
9901010 model = model ,
9911011 tools = tools ,
1012+ conversation = conversation ,
9921013 include = include ,
9931014 instructions = instructions ,
9941015 max_output_tokens = max_output_tokens ,
1016+ max_tool_calls = max_tool_calls ,
9951017 metadata = metadata ,
9961018 parallel_tool_calls = parallel_tool_calls ,
9971019 previous_response_id = previous_response_id ,
1020+ prompt = prompt ,
1021+ prompt_cache_key = prompt_cache_key ,
9981022 store = store ,
9991023 stream_options = stream_options ,
10001024 stream = True ,
10011025 temperature = temperature ,
10021026 text = text ,
10031027 tool_choice = tool_choice ,
10041028 reasoning = reasoning ,
1029+ safety_identifier = safety_identifier ,
1030+ service_tier = service_tier ,
1031+ top_logprobs = top_logprobs ,
10051032 top_p = top_p ,
10061033 truncation = truncation ,
10071034 user = user ,
@@ -1057,7 +1084,7 @@ def parse(
10571084 stream : Optional [Literal [False ]] | Literal [True ] | NotGiven = NOT_GIVEN ,
10581085 stream_options : Optional [response_create_params .StreamOptions ] | NotGiven = NOT_GIVEN ,
10591086 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
1060- text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
1087+ text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
10611088 tool_choice : response_create_params .ToolChoice | NotGiven = NOT_GIVEN ,
10621089 tools : Iterable [ParseableToolParam ] | NotGiven = NOT_GIVEN ,
10631090 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
@@ -2275,22 +2302,29 @@ def stream(
22752302 self ,
22762303 * ,
22772304 input : Union [str , ResponseInputParam ],
2278- model : Union [ str , ChatModel ] ,
2305+ model : ResponsesModel ,
22792306 background : Optional [bool ] | NotGiven = NOT_GIVEN ,
22802307 text_format : type [TextFormatT ] | NotGiven = NOT_GIVEN ,
22812308 tools : Iterable [ParseableToolParam ] | NotGiven = NOT_GIVEN ,
2309+ conversation : Optional [response_create_params .Conversation ] | NotGiven = NOT_GIVEN ,
22822310 include : Optional [List [ResponseIncludable ]] | NotGiven = NOT_GIVEN ,
22832311 instructions : Optional [str ] | NotGiven = NOT_GIVEN ,
22842312 max_output_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
2313+ max_tool_calls : Optional [int ] | NotGiven = NOT_GIVEN ,
22852314 metadata : Optional [Metadata ] | NotGiven = NOT_GIVEN ,
22862315 parallel_tool_calls : Optional [bool ] | NotGiven = NOT_GIVEN ,
22872316 previous_response_id : Optional [str ] | NotGiven = NOT_GIVEN ,
2317+ prompt : Optional [ResponsePromptParam ] | NotGiven = NOT_GIVEN ,
2318+ prompt_cache_key : str | NotGiven = NOT_GIVEN ,
22882319 reasoning : Optional [Reasoning ] | NotGiven = NOT_GIVEN ,
2320+ safety_identifier : str | NotGiven = NOT_GIVEN ,
2321+ service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
22892322 store : Optional [bool ] | NotGiven = NOT_GIVEN ,
22902323 stream_options : Optional [response_create_params .StreamOptions ] | NotGiven = NOT_GIVEN ,
22912324 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2292- text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
2325+ text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
22932326 tool_choice : response_create_params .ToolChoice | NotGiven = NOT_GIVEN ,
2327+ top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
22942328 top_p : Optional [float ] | NotGiven = NOT_GIVEN ,
22952329 truncation : Optional [Literal ["auto" , "disabled" ]] | NotGiven = NOT_GIVEN ,
22962330 user : str | NotGiven = NOT_GIVEN ,
@@ -2307,22 +2341,29 @@ def stream(
23072341 * ,
23082342 response_id : str | NotGiven = NOT_GIVEN ,
23092343 input : Union [str , ResponseInputParam ] | NotGiven = NOT_GIVEN ,
2310- model : Union [ str , ChatModel ] | NotGiven = NOT_GIVEN ,
2344+ model : ResponsesModel | NotGiven = NOT_GIVEN ,
23112345 background : Optional [bool ] | NotGiven = NOT_GIVEN ,
23122346 text_format : type [TextFormatT ] | NotGiven = NOT_GIVEN ,
23132347 tools : Iterable [ParseableToolParam ] | NotGiven = NOT_GIVEN ,
2348+ conversation : Optional [response_create_params .Conversation ] | NotGiven = NOT_GIVEN ,
23142349 include : Optional [List [ResponseIncludable ]] | NotGiven = NOT_GIVEN ,
23152350 instructions : Optional [str ] | NotGiven = NOT_GIVEN ,
23162351 max_output_tokens : Optional [int ] | NotGiven = NOT_GIVEN ,
2352+ max_tool_calls : Optional [int ] | NotGiven = NOT_GIVEN ,
23172353 metadata : Optional [Metadata ] | NotGiven = NOT_GIVEN ,
23182354 parallel_tool_calls : Optional [bool ] | NotGiven = NOT_GIVEN ,
23192355 previous_response_id : Optional [str ] | NotGiven = NOT_GIVEN ,
2356+ prompt : Optional [ResponsePromptParam ] | NotGiven = NOT_GIVEN ,
2357+ prompt_cache_key : str | NotGiven = NOT_GIVEN ,
23202358 reasoning : Optional [Reasoning ] | NotGiven = NOT_GIVEN ,
2359+ safety_identifier : str | NotGiven = NOT_GIVEN ,
2360+ service_tier : Optional [Literal ["auto" , "default" , "flex" , "scale" , "priority" ]] | NotGiven = NOT_GIVEN ,
23212361 store : Optional [bool ] | NotGiven = NOT_GIVEN ,
23222362 stream_options : Optional [response_create_params .StreamOptions ] | NotGiven = NOT_GIVEN ,
23232363 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2324- text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
2364+ text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
23252365 tool_choice : response_create_params .ToolChoice | NotGiven = NOT_GIVEN ,
2366+ top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
23262367 top_p : Optional [float ] | NotGiven = NOT_GIVEN ,
23272368 truncation : Optional [Literal ["auto" , "disabled" ]] | NotGiven = NOT_GIVEN ,
23282369 user : str | NotGiven = NOT_GIVEN ,
@@ -2337,18 +2378,25 @@ def stream(
23372378 new_response_args = {
23382379 "input" : input ,
23392380 "model" : model ,
2381+ "conversation" : conversation ,
23402382 "include" : include ,
23412383 "instructions" : instructions ,
23422384 "max_output_tokens" : max_output_tokens ,
2385+ "max_tool_calls" : max_tool_calls ,
23432386 "metadata" : metadata ,
23442387 "parallel_tool_calls" : parallel_tool_calls ,
23452388 "previous_response_id" : previous_response_id ,
2389+ "prompt" : prompt ,
2390+ "prompt_cache_key" : prompt_cache_key ,
23462391 "reasoning" : reasoning ,
2392+ "safety_identifier" : safety_identifier ,
2393+ "service_tier" : service_tier ,
23472394 "store" : store ,
23482395 "stream_options" : stream_options ,
23492396 "temperature" : temperature ,
23502397 "text" : text ,
23512398 "tool_choice" : tool_choice ,
2399+ "top_logprobs" : top_logprobs ,
23522400 "top_p" : top_p ,
23532401 "truncation" : truncation ,
23542402 "user" : user ,
@@ -2384,21 +2432,29 @@ def stream(
23842432 model = model ,
23852433 stream = True ,
23862434 tools = tools ,
2435+ conversation = conversation ,
23872436 include = include ,
23882437 instructions = instructions ,
23892438 max_output_tokens = max_output_tokens ,
2439+ max_tool_calls = max_tool_calls ,
23902440 metadata = metadata ,
23912441 parallel_tool_calls = parallel_tool_calls ,
23922442 previous_response_id = previous_response_id ,
2443+ prompt = prompt ,
2444+ prompt_cache_key = prompt_cache_key ,
23932445 store = store ,
23942446 stream_options = stream_options ,
23952447 temperature = temperature ,
23962448 text = text ,
23972449 tool_choice = tool_choice ,
23982450 reasoning = reasoning ,
2451+ safety_identifier = safety_identifier ,
2452+ service_tier = service_tier ,
2453+ top_logprobs = top_logprobs ,
23992454 top_p = top_p ,
24002455 truncation = truncation ,
24012456 user = user ,
2457+ background = background ,
24022458 extra_headers = extra_headers ,
24032459 extra_query = extra_query ,
24042460 extra_body = extra_body ,
@@ -2455,7 +2511,7 @@ async def parse(
24552511 stream : Optional [Literal [False ]] | Literal [True ] | NotGiven = NOT_GIVEN ,
24562512 stream_options : Optional [response_create_params .StreamOptions ] | NotGiven = NOT_GIVEN ,
24572513 temperature : Optional [float ] | NotGiven = NOT_GIVEN ,
2458- text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
2514+ text : ResponseTextConfigParam | NotGiven = NOT_GIVEN ,
24592515 tool_choice : response_create_params .ToolChoice | NotGiven = NOT_GIVEN ,
24602516 tools : Iterable [ParseableToolParam ] | NotGiven = NOT_GIVEN ,
24612517 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
0 commit comments