Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 48 additions & 0 deletions sentry_sdk/ai/_opanai_completions_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
from collections.abc import Iterable

from typing import TYPE_CHECKING

if TYPE_CHECKING:
from sentry_sdk._types import TextPart

from openai.types.chat import (
ChatCompletionMessageParam,
ChatCompletionSystemMessageParam,
)


def _is_system_instruction(message: "ChatCompletionMessageParam") -> bool:
return isinstance(message, dict) and message.get("role") == "system"


def _get_system_instructions(
messages: "Iterable[ChatCompletionMessageParam]",
) -> "list[ChatCompletionMessageParam]":
if not isinstance(messages, Iterable):
return []

return [message for message in messages if _is_system_instruction(message)]


def _transform_system_instructions(
system_instructions: "list[ChatCompletionSystemMessageParam]",
) -> "list[TextPart]":
instruction_text_parts: "list[TextPart]" = []

for instruction in system_instructions:
if not isinstance(instruction, dict):
continue

content = instruction.get("content")

if isinstance(content, str):
instruction_text_parts.append({"type": "text", "content": content})

elif isinstance(content, list):
for part in content:
if isinstance(part, dict) and part.get("type") == "text":
text = part.get("text", "")
if text:
instruction_text_parts.append({"type": "text", "content": text})

return instruction_text_parts
23 changes: 23 additions & 0 deletions sentry_sdk/ai/_openai_responses_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from typing import Union

from openai.types.responses import ResponseInputParam, ResponseInputItemParam


def _is_system_instruction(message: "ResponseInputItemParam") -> bool:
return (
isinstance(message, dict)
and message.get("type") == "message"
and message.get("role") == "system"
)


def _get_system_instructions(
messages: "Union[str, ResponseInputParam]",
) -> "list[ResponseInputItemParam]":
if not isinstance(messages, list):
return []

return [message for message in messages if _is_system_instruction(message)]
71 changes: 11 additions & 60 deletions sentry_sdk/integrations/openai.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import sys
from functools import wraps
from collections.abc import Iterable

import sentry_sdk
from sentry_sdk import consts
Expand All @@ -10,6 +9,15 @@
normalize_message_roles,
truncate_and_annotate_messages,
)
from sentry_sdk.ai._opanai_completions_api import (
_is_system_instruction as _is_system_instruction_completions,
_get_system_instructions as _get_system_instructions_completions,
_transform_system_instructions,
)
from sentry_sdk.ai._openai_responses_api import (
_is_system_instruction as _is_system_instruction_responses,
_get_system_instructions as _get_system_instructions_responses,
)
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.scope import should_send_default_pii
Expand All @@ -32,11 +40,12 @@
AsyncIterator,
Iterator,
Union,
Iterable,
)
from sentry_sdk.tracing import Span
from sentry_sdk._types import TextPart

from openai.types.responses import ResponseInputParam, ResponseInputItemParam
from openai.types.responses import ResponseInputParam
from openai import Omit

try:
Expand Down Expand Up @@ -199,64 +208,6 @@ def _calculate_token_usage(
)


def _is_system_instruction_completions(message: "ChatCompletionMessageParam") -> bool:
return isinstance(message, dict) and message.get("role") == "system"


def _get_system_instructions_completions(
messages: "Iterable[ChatCompletionMessageParam]",
) -> "list[ChatCompletionMessageParam]":
if not isinstance(messages, Iterable):
return []

return [
message for message in messages if _is_system_instruction_completions(message)
]


def _is_system_instruction_responses(message: "ResponseInputItemParam") -> bool:
return (
isinstance(message, dict)
and message.get("type") == "message"
and message.get("role") == "system"
)


def _get_system_instructions_responses(
messages: "Union[str, ResponseInputParam]",
) -> "list[ResponseInputItemParam]":
if not isinstance(messages, list):
return []

return [
message for message in messages if _is_system_instruction_responses(message)
]


def _transform_system_instructions(
system_instructions: "list[ChatCompletionMessageParam]",
) -> "list[TextPart]":
instruction_text_parts: "list[TextPart]" = []

for instruction in system_instructions:
if not isinstance(instruction, dict):
continue

content = instruction.get("content")

if isinstance(content, str):
instruction_text_parts.append({"type": "text", "content": content})

elif isinstance(content, list):
for part in content:
if isinstance(part, dict) and part.get("type") == "text":
text = part.get("text", "")
if text:
instruction_text_parts.append({"type": "text", "content": text})

return instruction_text_parts


def _get_input_messages(
kwargs: "dict[str, Any]",
) -> "Optional[Union[Iterable[Any], list[str]]]":
Expand Down
52 changes: 39 additions & 13 deletions sentry_sdk/integrations/openai_agents/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,20 @@
from sentry_sdk.scope import should_send_default_pii
from sentry_sdk.tracing_utils import set_span_errored
from sentry_sdk.utils import event_from_exception, safe_serialize
from sentry_sdk.ai._opanai_completions_api import _transform_system_instructions
from sentry_sdk.ai._openai_responses_api import (
_is_system_instruction,
_get_system_instructions,
)

from typing import TYPE_CHECKING

if TYPE_CHECKING:
from typing import Any
from agents import Usage
from agents import Usage, TResponseInputItem

from sentry_sdk.tracing import Span
from sentry_sdk._types import TextPart

try:
import agents
Expand Down Expand Up @@ -115,19 +121,39 @@ def _set_input_data(
return
request_messages = []

system_instructions = get_response_kwargs.get("system_instructions")
if system_instructions:
request_messages.append(
{
"role": GEN_AI_ALLOWED_MESSAGE_ROLES.SYSTEM,
"content": [{"type": "text", "text": system_instructions}],
}
messages: "str | list[TResponseInputItem]" = get_response_kwargs.get("input", [])

explicit_instructions = get_response_kwargs.get("system_instructions")
system_instructions = _get_system_instructions(messages)

if system_instructions is not None or len(system_instructions) > 0:
instructions_text_parts: "list[TextPart]" = []
if explicit_instructions is not None:
instructions_text_parts.append(
{
"type": "text",
"content": explicit_instructions,
}
)

# Deliberate use of function accepting completions API type because
# of shared structure FOR THIS PURPOSE ONLY.
instructions_text_parts += _transform_system_instructions(system_instructions)

set_data_normalized(
span,
SPANDATA.GEN_AI_SYSTEM_INSTRUCTIONS,
instructions_text_parts,
unpack=False,
)

for message in get_response_kwargs.get("input", []):
non_system_messages = [
message for message in messages if not _is_system_instruction(message)
]
for message in non_system_messages:
if "role" in message:
normalized_role = normalize_message_role(message.get("role"))
content = message.get("content")
normalized_role = normalize_message_role(message.get("role")) # type: ignore
content = message.get("content") # type: ignore
request_messages.append(
{
"role": normalized_role,
Expand All @@ -139,14 +165,14 @@ def _set_input_data(
}
)
else:
if message.get("type") == "function_call":
if message.get("type") == "function_call": # type: ignore
request_messages.append(
{
"role": GEN_AI_ALLOWED_MESSAGE_ROLES.ASSISTANT,
"content": [message],
}
)
elif message.get("type") == "function_call_output":
elif message.get("type") == "function_call_output": # type: ignore
request_messages.append(
{
"role": GEN_AI_ALLOWED_MESSAGE_ROLES.TOOL,
Expand Down
Loading
Loading