Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 2 additions & 22 deletions sentry_sdk/integrations/httpx.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.tracing import BAGGAGE_HEADER_NAME
from sentry_sdk.tracing_utils import (
Baggage,
should_propagate_trace,
add_http_request_source,
add_sentry_baggage_to_headers,
)
from sentry_sdk.utils import (
SENSITIVE_DATA_SUBSTITUTE,
Expand All @@ -19,7 +19,6 @@
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from collections.abc import MutableMapping
from typing import Any


Expand Down Expand Up @@ -81,7 +80,7 @@ def send(self: "Client", request: "Request", **kwargs: "Any") -> "Response":
)

if key == BAGGAGE_HEADER_NAME:
_add_sentry_baggage_to_headers(request.headers, value)
add_sentry_baggage_to_headers(request.headers, value)
else:
request.headers[key] = value

Expand Down Expand Up @@ -155,22 +154,3 @@ async def send(
return rv

AsyncClient.send = send


def _add_sentry_baggage_to_headers(
headers: "MutableMapping[str, str]", sentry_baggage: str
) -> None:
"""Add the Sentry baggage to the headers.

This function directly mutates the provided headers. The provided sentry_baggage
is appended to the existing baggage. If the baggage already contains Sentry items,
they are stripped out first.
"""
existing_baggage = headers.get(BAGGAGE_HEADER_NAME, "")
stripped_existing_baggage = Baggage.strip_sentry_baggage(existing_baggage)

separator = "," if len(stripped_existing_baggage) > 0 else ""

headers[BAGGAGE_HEADER_NAME] = (
stripped_existing_baggage + separator + sentry_baggage
)
49 changes: 48 additions & 1 deletion sentry_sdk/integrations/openai_agents/patches/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,57 @@
from sentry_sdk.integrations import DidNotEnable

from ..spans import ai_client_span, update_ai_client_span

import sentry_sdk
from sentry_sdk.consts import SPANDATA
from sentry_sdk.utils import logger
from sentry_sdk.tracing import BAGGAGE_HEADER_NAME
from sentry_sdk.tracing_utils import (
should_propagate_trace,
add_sentry_baggage_to_headers,
)

from typing import TYPE_CHECKING

if TYPE_CHECKING:
from typing import Any, Callable

from sentry_sdk.tracing import Span

try:
import agents
from agents.tool import HostedMCPTool
except ImportError:
raise DidNotEnable("OpenAI Agents not installed")


def _inject_trace_propagation_headers(
hosted_tool: "HostedMCPTool", span: "Span"
) -> None:
headers = hosted_tool.tool_config.get("headers")
if headers is None:
headers = {}
hosted_tool.tool_config["headers"] = headers

mcp_url = hosted_tool.tool_config.get("server_url")
if not mcp_url:
return

if should_propagate_trace(sentry_sdk.get_client(), mcp_url):
for (
key,
value,
) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(span=span):
logger.debug(
"[Tracing] Adding `{key}` header {value} to outgoing request to {mcp_url}.".format(
key=key, value=value, mcp_url=mcp_url
)
)
if key == BAGGAGE_HEADER_NAME:
add_sentry_baggage_to_headers(headers, value)
else:
headers[key] = value


def _create_get_model_wrapper(
original_get_model: "Callable[..., Any]",
) -> "Callable[..., Any]":
Expand Down Expand Up @@ -54,7 +91,17 @@ async def wrapped_fetch_response(*args: "Any", **kwargs: "Any") -> "Any":

@wraps(original_get_response)
async def wrapped_get_response(*args: "Any", **kwargs: "Any") -> "Any":
mcp_tools = kwargs.get("tools")
hosted_tools = []
if mcp_tools is not None:
hosted_tools = [
tool for tool in mcp_tools if isinstance(tool, HostedMCPTool)
]

with ai_client_span(agent, kwargs) as span:
for hosted_tool in hosted_tools:
_inject_trace_propagation_headers(hosted_tool, span=span)

result = await original_get_response(*args, **kwargs)

response_model = getattr(agent, "_sentry_raw_response_model", None)
Expand Down
21 changes: 20 additions & 1 deletion sentry_sdk/tracing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import os
import re
import sys
from collections.abc import Mapping
from collections.abc import Mapping, MutableMapping
from datetime import timedelta
from random import Random
from urllib.parse import quote, unquote
Expand Down Expand Up @@ -1285,6 +1285,25 @@ def _should_continue_trace(baggage: "Optional[Baggage]") -> bool:
return True


def add_sentry_baggage_to_headers(
headers: "MutableMapping[str, str]", sentry_baggage: str
) -> None:
"""Add the Sentry baggage to the headers.

This function directly mutates the provided headers. The provided sentry_baggage
is appended to the existing baggage. If the baggage already contains Sentry items,
they are stripped out first.
"""
existing_baggage = headers.get(BAGGAGE_HEADER_NAME, "")
stripped_existing_baggage = Baggage.strip_sentry_baggage(existing_baggage)

separator = "," if len(stripped_existing_baggage) > 0 else ""

headers[BAGGAGE_HEADER_NAME] = (
stripped_existing_baggage + separator + sentry_baggage
)


# Circular imports
from sentry_sdk.tracing import (
BAGGAGE_HEADER_NAME,
Expand Down
127 changes: 127 additions & 0 deletions tests/integrations/openai_agents/test_openai_agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@
from sentry_sdk.integrations.openai_agents.utils import _set_input_data, safe_serialize
from sentry_sdk.utils import parse_version

from openai import AsyncOpenAI
from agents.models.openai_responses import OpenAIResponsesModel

from unittest import mock
from unittest.mock import AsyncMock

import agents
from agents import (
Agent,
Expand All @@ -25,16 +31,54 @@
ResponseOutputText,
ResponseFunctionToolCall,
)
from agents.tool import HostedMCPTool
from agents.exceptions import MaxTurnsExceeded, ModelBehaviorError
from agents.version import __version__ as OPENAI_AGENTS_VERSION

from openai.types.responses import Response, ResponseUsage
from openai.types.responses.response_usage import (
InputTokensDetails,
OutputTokensDetails,
)

test_run_config = agents.RunConfig(tracing_disabled=True)

EXAMPLE_RESPONSE = Response(
id="chat-id",
output=[
ResponseOutputMessage(
id="message-id",
content=[
ResponseOutputText(
annotations=[],
text="the model response",
type="output_text",
),
],
role="assistant",
status="completed",
type="message",
),
],
parallel_tool_calls=False,
tool_choice="none",
tools=[],
created_at=10000000,
model="response-model-id",
object="response",
usage=ResponseUsage(
input_tokens=20,
input_tokens_details=InputTokensDetails(
cached_tokens=5,
),
output_tokens=10,
output_tokens_details=OutputTokensDetails(
reasoning_tokens=8,
),
total_tokens=30,
),
)


@pytest.fixture
def mock_usage():
Expand Down Expand Up @@ -695,6 +739,89 @@ def simple_test_tool(message: str) -> str:
assert ai_client_span2["data"]["gen_ai.usage.total_tokens"] == 25


@pytest.mark.asyncio
async def test_hosted_mcp_tool_propagation_headers(sentry_init, test_agent):
"""
Test responses API is given trace propagation headers with HostedMCPTool.
"""

hosted_tool = HostedMCPTool(
tool_config={
"type": "mcp",
"server_label": "...",
"server_url": "...",
"require_approval": "never",
"headers": {
"baggage": "custom=data",
},
},
on_approval_request=None,
)

client = AsyncOpenAI(api_key="z")
client.responses._post = AsyncMock(return_value=EXAMPLE_RESPONSE)

model = OpenAIResponsesModel(model="gpt-4", openai_client=client)

agent_with_tool = test_agent.clone(
tools=[hosted_tool],
model=model,
)

sentry_init(
integrations=[OpenAIAgentsIntegration()],
traces_sample_rate=1.0,
release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
)

with patch.object(
model._client.responses,
"create",
wraps=model._client.responses.create,
) as create, mock.patch(
"sentry_sdk.tracing_utils.Random.randrange", return_value=500000
):
with sentry_sdk.start_transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
trace_id="01234567890123456789012345678901",
) as transaction:
await agents.Runner.run(
agent_with_tool,
"Please use the simple test tool",
run_config=test_run_config,
)

ai_client_span = transaction._span_recorder.spans[-1]

args, kwargs = create.call_args

assert "tools" in kwargs
assert len(kwargs["tools"]) == 1
hosted_mcp_tool = kwargs["tools"][0]

assert hosted_mcp_tool["headers"][
"sentry-trace"
] == "{trace_id}-{parent_span_id}-{sampled}".format(
trace_id=transaction.trace_id,
parent_span_id=ai_client_span.span_id,
sampled=1,
)

expected_outgoing_baggage = (
"custom=data,"
"sentry-trace_id=01234567890123456789012345678901,"
"sentry-sample_rand=0.500000,"
"sentry-environment=production,"
"sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,"
"sentry-transaction=/interactions/other-dogs/new-dog,"
"sentry-sample_rate=1.0,"
"sentry-sampled=true"
)

assert hosted_mcp_tool["headers"]["baggage"] == expected_outgoing_baggage


@pytest.mark.asyncio
async def test_model_behavior_error(sentry_init, capture_events, test_agent):
"""
Expand Down
Loading