Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## Unreleased
- Document official package metadata and README for the OpenAI Agents instrumentation.
([#3859](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3859))
- Populate instructions and tool definitions from Response obj.
([#4196](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/4196))

## Version 0.1.0 (2025-10-15)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1056,10 +1056,20 @@ def _build_content_payload(self, span: Span[Any]) -> ContentPayload:

elif _is_instance_of(span_data, ResponseSpanData):
span_input = getattr(span_data, "input", None)
response_obj = getattr(span_data, "response", None)
if capture_messages and span_input:
payload.input_messages = (
self._normalize_messages_to_role_parts(span_input)
)

if (
capture_system
and response_obj
and hasattr(response_obj, "instructions")
):
payload.system_instructions = self._normalize_to_text_parts(
response_obj.instructions
)
if capture_system and span_input:
sys_instr = self._collect_system_instructions(span_input)
if sys_instr:
Expand Down Expand Up @@ -2029,6 +2039,22 @@ def _get_attributes_from_response_span_data(
if output_tokens is not None:
yield GEN_AI_USAGE_OUTPUT_TOKENS, output_tokens

# Tool definitions from response
if self._capture_tool_definitions and hasattr(
span_data.response, "tools"
):
yield (
GEN_AI_TOOL_DEFINITIONS,
safe_json_dumps(
list(
map(
lambda tool: tool.to_dict(),
span_data.response.tools,
)
)
),
)

# Input/output messages
if (
self.include_sensitive_data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
set_trace_processors,
trace,
)
from openai.types.responses import FunctionTool # noqa: E402

from opentelemetry.instrumentation.openai_agents import ( # noqa: E402
OpenAIAgentsInstrumentor,
Expand Down Expand Up @@ -62,6 +63,9 @@
GEN_AI_OUTPUT_MESSAGES = getattr(
GenAI, "GEN_AI_OUTPUT_MESSAGES", "gen_ai.output.messages"
)
GEN_AI_TOOL_DEFINITIONS = getattr(
GenAI, "GEN_AI_TOOL_DEFINITIONS", "gen_ai.tool.definitions"
)


def _instrument_with_provider(**instrument_kwargs):
Expand Down Expand Up @@ -487,8 +491,26 @@ def __init__(self, input_tokens: int, output_tokens: int) -> None:
class _Response:
def __init__(self) -> None:
self.id = "resp-123"
self.instructions = "You are a helpful assistant."
self.model = "gpt-4o-mini"
self.usage = _Usage(42, 9)
self.tools = [
FunctionTool(
name="get_current_weather",
type="function",
description="Get the current weather in a given location",
parameters={
"type": "object",
"properties": {
"location": {
"title": "Location",
"type": "string",
},
},
"required": ["location"],
},
)
]
self.output = [{"finish_reason": "stop"}]

try:
Expand Down Expand Up @@ -516,6 +538,30 @@ def __init__(self) -> None:
assert response.attributes[GenAI.GEN_AI_RESPONSE_FINISH_REASONS] == (
"stop",
)

system_instructions = json.loads(
response.attributes[GenAI.GEN_AI_SYSTEM_INSTRUCTIONS]
)
assert system_instructions == [
{"type": "text", "content": "You are a helpful assistant."}
]
tool_definitions = json.loads(
response.attributes[GEN_AI_TOOL_DEFINITIONS]
)
assert tool_definitions == [
{
"type": "function",
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {"title": "Location", "type": "string"},
},
"required": ["location"],
},
}
]
finally:
instrumentor.uninstrument()
exporter.clear()