@@ -129,21 +129,26 @@ class CallModelData(Generic[TContext]):
129129@dataclass
130130class _ServerConversationTracker :
131131 """Tracks server-side conversation state for either conversation_id or
132- previous_response_id modes."""
132+ previous_response_id modes.
133+
134+ Note: When auto_previous_response_id=True is used, response chaining is enabled
135+ automatically for the first turn, even when there's no actual previous response ID yet.
136+ """
133137
134138 conversation_id : str | None = None
135139 previous_response_id : str | None = None
140+ auto_previous_response_id : bool = False
136141 sent_items : set [int ] = field (default_factory = set )
137142 server_items : set [int ] = field (default_factory = set )
138143
139144 def track_server_items (self , model_response : ModelResponse ) -> None :
140145 for output_item in model_response .output :
141146 self .server_items .add (id (output_item ))
142147
143- # Update previous_response_id only when using previous_response_id
148+ # Update previous_response_id when using previous_response_id mode or auto mode
144149 if (
145150 self .conversation_id is None
146- and self .previous_response_id is not None
151+ and ( self .previous_response_id is not None or self . auto_previous_response_id )
147152 and model_response .response_id is not None
148153 ):
149154 self .previous_response_id = model_response .response_id
@@ -284,6 +289,9 @@ class RunOptions(TypedDict, Generic[TContext]):
284289 previous_response_id : NotRequired [str | None ]
285290 """The ID of the previous response, if any."""
286291
292+ auto_previous_response_id : NotRequired [bool ]
293+ """Enable automatic response chaining for the first turn."""
294+
287295 conversation_id : NotRequired [str | None ]
288296 """The ID of the stored conversation, if any."""
289297
@@ -303,6 +311,7 @@ async def run(
303311 hooks : RunHooks [TContext ] | None = None ,
304312 run_config : RunConfig | None = None ,
305313 previous_response_id : str | None = None ,
314+ auto_previous_response_id : bool = False ,
306315 conversation_id : str | None = None ,
307316 session : Session | None = None ,
308317 ) -> RunResult :
@@ -363,6 +372,7 @@ async def run(
363372 hooks = hooks ,
364373 run_config = run_config ,
365374 previous_response_id = previous_response_id ,
375+ auto_previous_response_id = auto_previous_response_id ,
366376 conversation_id = conversation_id ,
367377 session = session ,
368378 )
@@ -378,6 +388,7 @@ def run_sync(
378388 hooks : RunHooks [TContext ] | None = None ,
379389 run_config : RunConfig | None = None ,
380390 previous_response_id : str | None = None ,
391+ auto_previous_response_id : bool = False ,
381392 conversation_id : str | None = None ,
382393 session : Session | None = None ,
383394 ) -> RunResult :
@@ -438,6 +449,7 @@ def run_sync(
438449 previous_response_id = previous_response_id ,
439450 conversation_id = conversation_id ,
440451 session = session ,
452+ auto_previous_response_id = auto_previous_response_id ,
441453 )
442454
443455 @classmethod
@@ -450,6 +462,7 @@ def run_streamed(
450462 hooks : RunHooks [TContext ] | None = None ,
451463 run_config : RunConfig | None = None ,
452464 previous_response_id : str | None = None ,
465+ auto_previous_response_id : bool = False ,
453466 conversation_id : str | None = None ,
454467 session : Session | None = None ,
455468 ) -> RunResultStreaming :
@@ -505,6 +518,7 @@ def run_streamed(
505518 hooks = hooks ,
506519 run_config = run_config ,
507520 previous_response_id = previous_response_id ,
521+ auto_previous_response_id = auto_previous_response_id ,
508522 conversation_id = conversation_id ,
509523 session = session ,
510524 )
@@ -527,14 +541,23 @@ async def run(
527541 hooks = cast (RunHooks [TContext ], self ._validate_run_hooks (kwargs .get ("hooks" )))
528542 run_config = kwargs .get ("run_config" )
529543 previous_response_id = kwargs .get ("previous_response_id" )
544+ auto_previous_response_id = kwargs .get ("auto_previous_response_id" , False )
530545 conversation_id = kwargs .get ("conversation_id" )
531546 session = kwargs .get ("session" )
547+
532548 if run_config is None :
533549 run_config = RunConfig ()
534550
535- if conversation_id is not None or previous_response_id is not None :
551+ # Check whether to enable OpenAI server-managed conversation
552+ if (
553+ conversation_id is not None
554+ or previous_response_id is not None
555+ or auto_previous_response_id
556+ ):
536557 server_conversation_tracker = _ServerConversationTracker (
537- conversation_id = conversation_id , previous_response_id = previous_response_id
558+ conversation_id = conversation_id ,
559+ previous_response_id = previous_response_id ,
560+ auto_previous_response_id = auto_previous_response_id ,
538561 )
539562 else :
540563 server_conversation_tracker = None
@@ -773,6 +796,7 @@ def run_sync(
773796 hooks = kwargs .get ("hooks" )
774797 run_config = kwargs .get ("run_config" )
775798 previous_response_id = kwargs .get ("previous_response_id" )
799+ auto_previous_response_id = kwargs .get ("auto_previous_response_id" , False )
776800 conversation_id = kwargs .get ("conversation_id" )
777801 session = kwargs .get ("session" )
778802
@@ -819,6 +843,7 @@ def run_sync(
819843 hooks = hooks ,
820844 run_config = run_config ,
821845 previous_response_id = previous_response_id ,
846+ auto_previous_response_id = auto_previous_response_id ,
822847 conversation_id = conversation_id ,
823848 )
824849 )
@@ -852,6 +877,7 @@ def run_streamed(
852877 hooks = cast (RunHooks [TContext ], self ._validate_run_hooks (kwargs .get ("hooks" )))
853878 run_config = kwargs .get ("run_config" )
854879 previous_response_id = kwargs .get ("previous_response_id" )
880+ auto_previous_response_id = kwargs .get ("auto_previous_response_id" , False )
855881 conversation_id = kwargs .get ("conversation_id" )
856882 session = kwargs .get ("session" )
857883
@@ -907,6 +933,7 @@ def run_streamed(
907933 context_wrapper = context_wrapper ,
908934 run_config = run_config ,
909935 previous_response_id = previous_response_id ,
936+ auto_previous_response_id = auto_previous_response_id ,
910937 conversation_id = conversation_id ,
911938 session = session ,
912939 )
@@ -1035,6 +1062,7 @@ async def _start_streaming(
10351062 context_wrapper : RunContextWrapper [TContext ],
10361063 run_config : RunConfig ,
10371064 previous_response_id : str | None ,
1065+ auto_previous_response_id : bool ,
10381066 conversation_id : str | None ,
10391067 session : Session | None ,
10401068 ):
@@ -1047,9 +1075,16 @@ async def _start_streaming(
10471075 should_run_agent_start_hooks = True
10481076 tool_use_tracker = AgentToolUseTracker ()
10491077
1050- if conversation_id is not None or previous_response_id is not None :
1078+ # Check whether to enable OpenAI server-managed conversation
1079+ if (
1080+ conversation_id is not None
1081+ or previous_response_id is not None
1082+ or auto_previous_response_id
1083+ ):
10511084 server_conversation_tracker = _ServerConversationTracker (
1052- conversation_id = conversation_id , previous_response_id = previous_response_id
1085+ conversation_id = conversation_id ,
1086+ previous_response_id = previous_response_id ,
1087+ auto_previous_response_id = auto_previous_response_id ,
10531088 )
10541089 else :
10551090 server_conversation_tracker = None
@@ -1376,6 +1411,7 @@ async def _run_single_turn_streamed(
13761411 previous_response_id = (
13771412 server_conversation_tracker .previous_response_id
13781413 if server_conversation_tracker
1414+ and server_conversation_tracker .previous_response_id is not None
13791415 else None
13801416 )
13811417 conversation_id = (
@@ -1814,6 +1850,7 @@ async def _get_new_response(
18141850 previous_response_id = (
18151851 server_conversation_tracker .previous_response_id
18161852 if server_conversation_tracker
1853+ and server_conversation_tracker .previous_response_id is not None
18171854 else None
18181855 )
18191856 conversation_id = (
0 commit comments