Skip to content

Commit a704fc6

Browse files
committed
cleanup docs and unclear code
1 parent e72a320 commit a704fc6

File tree

3 files changed

+51
-79
lines changed

3 files changed

+51
-79
lines changed

util/opentelemetry-util-genai/README.rst

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,8 @@ while providing standardization for generating both types of otel, "spans and me
88

99
This package relies on environment variables to configure capturing of message content.
1010
By default, message content will not be captured.
11-
Set the environment variable `OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT` to `SPAN_ONLY` or `SPAN_AND_EVENT` to capture message content in spans.
11+
Set the environment variable `OTEL_SEMCONV_STABILITY_OPT_IN` to `gen_ai_latest_experimental` to enable experimental features.
12+
And set the environment variable `OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT` to `SPAN_ONLY` or `SPAN_AND_EVENT` to capture message content in spans.
1213

1314
This package provides these span attributes.
1415
-> gen_ai.provider.name: Str(openai)

util/opentelemetry-util-genai/src/opentelemetry/util/genai/generators.py

Lines changed: 47 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
import json
3535
from contextlib import contextmanager
3636
from dataclasses import asdict, dataclass, field
37-
from typing import Any, Dict, List, Optional, Tuple
37+
from typing import Any, Dict, List, Optional
3838
from uuid import UUID
3939

4040
from opentelemetry import trace
@@ -68,23 +68,35 @@ class _SpanState:
6868
children: List[UUID] = field(default_factory=list)
6969

7070

71-
def _get_genai_attributes(
72-
request_model: Optional[str],
73-
response_model: Optional[str],
74-
operation_name: Optional[str],
75-
provider: Optional[str],
76-
) -> Dict[str, AttributeValue]:
77-
attributes: Dict[str, AttributeValue] = {}
78-
if provider:
79-
attributes[GenAI.GEN_AI_PROVIDER_NAME] = provider
80-
if operation_name:
81-
attributes[GenAI.GEN_AI_OPERATION_NAME] = operation_name
82-
if request_model:
83-
attributes[GenAI.GEN_AI_REQUEST_MODEL] = request_model
84-
if response_model:
85-
attributes[GenAI.GEN_AI_RESPONSE_MODEL] = response_model
71+
def _apply_common_span_attributes(
72+
span: Span, invocation: LLMInvocation
73+
) -> None:
74+
"""Apply attributes shared by finish() and error() and compute metrics.
75+
76+
Returns (genai_attributes) for use with metrics.
77+
"""
78+
request_model = invocation.attributes.get("request_model")
79+
provider = invocation.attributes.get("provider")
80+
81+
_set_initial_span_attributes(span, request_model, provider)
82+
83+
finish_reasons = _collect_finish_reasons(invocation.chat_generations)
84+
if finish_reasons:
85+
span.set_attribute(
86+
GenAI.GEN_AI_RESPONSE_FINISH_REASONS, finish_reasons
87+
)
8688

87-
return attributes
89+
response_model = invocation.attributes.get("response_model_name")
90+
response_id = invocation.attributes.get("response_id")
91+
prompt_tokens = invocation.attributes.get("input_tokens")
92+
completion_tokens = invocation.attributes.get("output_tokens")
93+
_set_response_and_usage_attributes(
94+
span,
95+
response_model,
96+
response_id,
97+
prompt_tokens,
98+
completion_tokens,
99+
)
88100

89101

90102
def _set_initial_span_attributes(
@@ -186,9 +198,13 @@ def _start_span(
186198
kind: SpanKind,
187199
parent_run_id: Optional[UUID] = None,
188200
) -> Span:
189-
if parent_run_id is not None and parent_run_id in self.spans:
190-
parent_span = self.spans[parent_run_id].span
191-
ctx = set_span_in_context(parent_span)
201+
parent_span = (
202+
self.spans.get(parent_run_id)
203+
if parent_run_id is not None
204+
else None
205+
)
206+
if parent_span is not None:
207+
ctx = set_span_in_context(parent_span.span)
192208
span = self._tracer.start_span(name=name, kind=kind, context=ctx)
193209
else:
194210
# top-level or missing parent
@@ -207,16 +223,16 @@ def _end_span(self, run_id: UUID):
207223
del self.spans[run_id]
208224

209225
def start(self, invocation: LLMInvocation):
210-
if (
211-
invocation.parent_run_id is not None
212-
and invocation.parent_run_id in self.spans
213-
):
214-
self.spans[invocation.parent_run_id].children.append(
215-
invocation.run_id
216-
)
226+
parent_state = (
227+
self.spans.get(invocation.parent_run_id)
228+
if invocation.parent_run_id is not None
229+
else None
230+
)
231+
if parent_state is not None:
232+
parent_state.children.append(invocation.run_id)
217233

218234
@contextmanager
219-
def _span_for_invocation(self, invocation: LLMInvocation):
235+
def _start_span_for_invocation(self, invocation: LLMInvocation):
220236
"""Create/register a span for the invocation and yield it.
221237
222238
The span is not ended automatically on exiting the context; callers
@@ -234,60 +250,20 @@ def _span_for_invocation(self, invocation: LLMInvocation):
234250
self.spans[invocation.run_id] = span_state
235251
yield span
236252

237-
@staticmethod
238-
def _apply_common_span_attributes(
239-
span: Span, invocation: LLMInvocation
240-
) -> Tuple[Dict[str, AttributeValue]]:
241-
"""Apply attributes shared by finish() and error() and compute metrics.
242-
243-
Returns (genai_attributes) for use with metrics.
244-
"""
245-
request_model = invocation.attributes.get("request_model")
246-
provider = invocation.attributes.get("provider")
247-
248-
_set_initial_span_attributes(span, request_model, provider)
249-
250-
finish_reasons = _collect_finish_reasons(invocation.chat_generations)
251-
if finish_reasons:
252-
span.set_attribute(
253-
GenAI.GEN_AI_RESPONSE_FINISH_REASONS, finish_reasons
254-
)
255-
256-
response_model = invocation.attributes.get("response_model_name")
257-
response_id = invocation.attributes.get("response_id")
258-
prompt_tokens = invocation.attributes.get("input_tokens")
259-
completion_tokens = invocation.attributes.get("output_tokens")
260-
_set_response_and_usage_attributes(
261-
span,
262-
response_model,
263-
response_id,
264-
prompt_tokens,
265-
completion_tokens,
266-
)
267-
genai_attributes = _get_genai_attributes(
268-
request_model,
269-
response_model,
270-
GenAI.GenAiOperationNameValues.CHAT.value,
271-
provider,
272-
)
273-
return (genai_attributes,)
274-
275253
def _finalize_invocation(self, invocation: LLMInvocation) -> None:
276254
"""End span(s) and record duration for the invocation."""
277255
self._end_span(invocation.run_id)
278256

279257
def finish(self, invocation: LLMInvocation):
280-
with self._span_for_invocation(invocation) as span:
281-
_ = self._apply_common_span_attributes(
282-
span, invocation
283-
) # return value to be used with metrics
258+
with self._start_span_for_invocation(invocation) as span:
259+
_apply_common_span_attributes(span, invocation)
284260
_maybe_set_span_messages(
285261
span, invocation.messages, invocation.chat_generations
286262
)
287263
self._finalize_invocation(invocation)
288264

289265
def error(self, error: Error, invocation: LLMInvocation):
290-
with self._span_for_invocation(invocation) as span:
266+
with self._start_span_for_invocation(invocation) as span:
291267
span.set_status(Status(StatusCode.ERROR, error.message))
292268
if span.is_recording():
293269
span.set_attribute(

util/opentelemetry-util-genai/src/opentelemetry/util/genai/utils.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,14 +42,9 @@ def get_content_capturing_mode() -> ContentCapturingMode:
4242
4343
When the GEN_AI stability mode is DEFAULT this function will raise a ValueError -- see the code below."""
4444
envvar = os.environ.get(OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT)
45-
if (
46-
_OpenTelemetrySemanticConventionStability._get_opentelemetry_stability_opt_in_mode(
47-
_OpenTelemetryStabilitySignalType.GEN_AI,
48-
)
49-
== _StabilityMode.DEFAULT
50-
):
45+
if not is_experimental_mode():
5146
raise ValueError(
52-
"This function should never be called when StabilityMode is default."
47+
"This function should never be called when StabilityMode is not experimental."
5348
)
5449
if not envvar:
5550
return ContentCapturingMode.NO_CONTENT

0 commit comments

Comments
 (0)