From d7a0b9b200e4eab2b5b98c2e845b7a76cfa3911f Mon Sep 17 00:00:00 2001 From: Alexander Alderman Webb Date: Thu, 12 Feb 2026 11:09:33 +0100 Subject: [PATCH 1/3] fix(openai-agents): Patch execute_final_output functions following library refactor --- .../integrations/openai_agents/__init__.py | 29 +++++++-- .../openai_agents/patches/__init__.py | 2 +- .../openai_agents/patches/agent_run.py | 63 +++++++------------ 3 files changed, 47 insertions(+), 47 deletions(-) diff --git a/sentry_sdk/integrations/openai_agents/__init__.py b/sentry_sdk/integrations/openai_agents/__init__.py index 6bf62c06b6..fe6f822ad5 100644 --- a/sentry_sdk/integrations/openai_agents/__init__.py +++ b/sentry_sdk/integrations/openai_agents/__init__.py @@ -11,7 +11,7 @@ _execute_handoffs, _create_run_wrapper, _create_run_streamed_wrapper, - _patch_agent_run, + _execute_final_output, _patch_error_tracing, ) @@ -58,9 +58,6 @@ def _patch_runner() -> None: agents.run.DEFAULT_AGENT_RUNNER.run_streamed ) - # Creating the actual spans for each agent run (works for both streaming and non-streaming). - _patch_agent_run() - class OpenAIAgentsIntegration(Integration): """ @@ -151,6 +148,16 @@ async def new_wrapped_execute_handoffs( new_wrapped_execute_handoffs ) + @wraps(turn_resolution.execute_final_output) + async def new_wrapped_final_output(*args: "Any", **kwargs: "Any") -> "Any": + return await _execute_final_output( + turn_resolution.execute_final_output, *args, **kwargs + ) + + agents.run_internal.turn_resolution.execute_final_output = ( + new_wrapped_final_output + ) + return original_get_all_tools = AgentRunner._get_all_tools @@ -212,3 +219,17 @@ async def old_wrapped_execute_handoffs( agents._run_impl.RunImpl.execute_handoffs = classmethod( old_wrapped_execute_handoffs ) + + original_execute_final_output = agents._run_impl.RunImpl.execute_final_output + + @wraps(agents._run_impl.RunImpl.execute_final_output.__func__) + async def old_wrapped_final_output( + cls: "agents.Runner", *args: "Any", **kwargs: "Any" + ) -> "Any": + return await _execute_final_output( + original_execute_final_output, *args, **kwargs + ) + + agents._run_impl.RunImpl.execute_final_output = classmethod( + old_wrapped_final_output + ) diff --git a/sentry_sdk/integrations/openai_agents/patches/__init__.py b/sentry_sdk/integrations/openai_agents/patches/__init__.py index ce2ba58d85..e3be2776b3 100644 --- a/sentry_sdk/integrations/openai_agents/patches/__init__.py +++ b/sentry_sdk/integrations/openai_agents/patches/__init__.py @@ -5,6 +5,6 @@ _run_single_turn, _run_single_turn_streamed, _execute_handoffs, - _patch_agent_run, + _execute_final_output, ) # noqa: F401 from .error_tracing import _patch_error_tracing # noqa: F401 diff --git a/sentry_sdk/integrations/openai_agents/patches/agent_run.py b/sentry_sdk/integrations/openai_agents/patches/agent_run.py index 506e169337..c341b75c82 100644 --- a/sentry_sdk/integrations/openai_agents/patches/agent_run.py +++ b/sentry_sdk/integrations/openai_agents/patches/agent_run.py @@ -204,49 +204,28 @@ async def _execute_handoffs( return result -def _patch_agent_run() -> None: +async def _execute_final_output( + original_execute_final_output: "Callable[..., SingleStepResult]", + *args: "Any", + **kwargs: "Any", +) -> "SingleStepResult": """ - Patches AgentRunner methods to create agent invocation spans. - This directly patches the execution flow to track when agents start and stop. + Patched execute_final_output that + - ends the agent invocation span. + - ends the workflow span if the response is streamed. """ - # Store original methods - original_execute_final_output = agents._run_impl.RunImpl.execute_final_output + agent = kwargs.get("agent") + context_wrapper = kwargs.get("context_wrapper") + final_output = kwargs.get("final_output") - @wraps( - original_execute_final_output.__func__ - if hasattr(original_execute_final_output, "__func__") - else original_execute_final_output - ) - async def patched_execute_final_output( - cls: "agents.Runner", *args: "Any", **kwargs: "Any" - ) -> "Any": - """ - Patched execute_final_output that - - ends the agent invocation span. - - ends the workflow span if the response is streamed. - """ - - agent = kwargs.get("agent") - context_wrapper = kwargs.get("context_wrapper") - final_output = kwargs.get("final_output") - - try: - result = await original_execute_final_output(*args, **kwargs) - finally: - with capture_internal_exceptions(): - if ( - agent - and context_wrapper - and _has_active_agent_span(context_wrapper) - ): - end_invoke_agent_span(context_wrapper, agent, final_output) - # For streaming, close the workflow span (non-streaming uses context manager in _create_run_wrapper) - _close_streaming_workflow_span(agent) - - return result - - # Apply patches - agents._run_impl.RunImpl.execute_final_output = classmethod( - patched_execute_final_output - ) + try: + result = await original_execute_final_output(*args, **kwargs) + finally: + with capture_internal_exceptions(): + if agent and context_wrapper and _has_active_agent_span(context_wrapper): + end_invoke_agent_span(context_wrapper, agent, final_output) + # For streaming, close the workflow span (non-streaming uses context manager in _create_run_wrapper) + _close_streaming_workflow_span(agent) + + return result From 26492738e0e10aef76fa6fc14e62215e42f12bde Mon Sep 17 00:00:00 2001 From: Alexander Alderman Webb Date: Thu, 12 Feb 2026 11:32:14 +0100 Subject: [PATCH 2/3] . --- sentry_sdk/integrations/openai_agents/__init__.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/openai_agents/__init__.py b/sentry_sdk/integrations/openai_agents/__init__.py index e26988755c..cf3977c4f8 100644 --- a/sentry_sdk/integrations/openai_agents/__init__.py +++ b/sentry_sdk/integrations/openai_agents/__init__.py @@ -152,10 +152,14 @@ async def new_wrapped_execute_handoffs( new_wrapped_execute_handoffs ) + original_execute_final_output = turn_resolution.execute_final_output + @wraps(turn_resolution.execute_final_output) - async def new_wrapped_final_output(*args: "Any", **kwargs: "Any") -> "Any": + async def new_wrapped_final_output( + *args: "Any", **kwargs: "Any" + ) -> "SingleStepResult": return await _execute_final_output( - turn_resolution.execute_final_output, *args, **kwargs + original_execute_final_output, *args, **kwargs ) agents.run_internal.turn_resolution.execute_final_output = ( @@ -229,7 +233,7 @@ async def old_wrapped_execute_handoffs( @wraps(agents._run_impl.RunImpl.execute_final_output.__func__) async def old_wrapped_final_output( cls: "agents.Runner", *args: "Any", **kwargs: "Any" - ) -> "Any": + ) -> "SingleStepResult": return await _execute_final_output( original_execute_final_output, *args, **kwargs ) From e94acfafad1a63b0e0f05969ceb31582143600aa Mon Sep 17 00:00:00 2001 From: Alexander Alderman Webb Date: Thu, 12 Feb 2026 13:50:56 +0100 Subject: [PATCH 3/3] update docstring --- sentry_sdk/integrations/openai_agents/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/openai_agents/__init__.py b/sentry_sdk/integrations/openai_agents/__init__.py index 83f03378e0..4d778676ac 100644 --- a/sentry_sdk/integrations/openai_agents/__init__.py +++ b/sentry_sdk/integrations/openai_agents/__init__.py @@ -79,7 +79,7 @@ class OpenAIAgentsIntegration(Integration): - A Model instance is created at the start of the loop by calling the `Runner._get_model()`. We patch the Model instance using `patches._get_model()`. - Available tools are also deteremined at the start of the loop, with `Runner._get_all_tools()`. We patch Tool instances by iterating through the returned tools in `patches._get_all_tools()`. - In each loop iteration, `run_single_turn()` or `run_single_turn_streamed()` is responsible for calling the Responses API, patched with `patches._run_single_turn()` and `patches._run_single_turn_streamed()`. - 4. On loop termination, `RunImpl.execute_final_output()` is called. The function is patched with `patched_execute_final_output()`. + 4. On loop termination, `RunImpl.execute_final_output()` is called. The function is patched with `patches._execute_final_output()`. Local tools are run based on the return value from the Responses API as a post-API call step in the above loop. Hosted MCP Tools are run as part of the Responses API call, and involve OpenAI reaching out to an external MCP server.