Skip to content

Commit a59ca1e

Browse files
committed
wip, refactor chat generation to output message
1 parent 3c94ecd commit a59ca1e

File tree

5 files changed

+49
-99
lines changed

5 files changed

+49
-99
lines changed

util/opentelemetry-util-genai/src/opentelemetry/util/genai/data.py

Lines changed: 0 additions & 69 deletions
This file was deleted.

util/opentelemetry-util-genai/src/opentelemetry/util/genai/generators.py

Lines changed: 26 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,8 @@
5858
from opentelemetry.trace.status import Status, StatusCode
5959
from opentelemetry.util.types import AttributeValue
6060

61-
from .data import ChatGeneration, Error
6261
from .instruments import Instruments
63-
from .types import InputMessage, LLMInvocation
62+
from .types import Error, InputMessage, LLMInvocation, OutputMessage, Text
6463

6564

6665
@dataclass
@@ -111,7 +110,7 @@ def _message_to_log_record(
111110

112111

113112
def _chat_generation_to_log_record(
114-
chat_generation: ChatGeneration,
113+
chat_generation: OutputMessage,
115114
index: int,
116115
provider_name: Optional[str],
117116
framework: Optional[str],
@@ -133,11 +132,18 @@ def _chat_generation_to_log_record(
133132
"event.name": "gen_ai.choice",
134133
}
135134

135+
# Extract message content from parts (first Text part if available)
136+
# TODO: use dataclass to dict
137+
content: Optional[str] = None
138+
for part in chat_generation.parts:
139+
if isinstance(part, Text):
140+
content = part.content
141+
break
136142
message = {
137-
"type": chat_generation.type,
143+
"type": chat_generation.role,
138144
}
139-
if capture_content and chat_generation.content:
140-
message["content"] = chat_generation.content
145+
if capture_content and content is not None:
146+
message["content"] = content
141147

142148
body = {
143149
"index": index,
@@ -218,7 +224,7 @@ def _set_response_and_usage_attributes(
218224

219225
def _emit_chat_generation_logs(
220226
logger: Optional[Logger],
221-
generations: List[ChatGeneration],
227+
generations: List[OutputMessage],
222228
provider_name: Optional[str],
223229
framework: Optional[str],
224230
capture_content: bool,
@@ -234,16 +240,14 @@ def _emit_chat_generation_logs(
234240
)
235241
if log and logger:
236242
logger.emit(log)
237-
if chat_generation.finish_reason is not None:
238-
finish_reasons.append(chat_generation.finish_reason)
243+
finish_reasons.append(chat_generation.finish_reason)
239244
return finish_reasons
240245

241246

242-
def _collect_finish_reasons(generations: List[ChatGeneration]) -> List[str]:
247+
def _collect_finish_reasons(generations: List[OutputMessage]) -> List[str]:
243248
finish_reasons: List[str] = []
244249
for gen in generations:
245-
if gen.finish_reason is not None:
246-
finish_reasons.append(gen.finish_reason)
250+
finish_reasons.append(gen.finish_reason)
247251
return finish_reasons
248252

249253

@@ -260,15 +264,20 @@ def _maybe_set_input_messages(
260264

261265

262266
def _set_chat_generation_attrs(
263-
span: Span, generations: List[ChatGeneration]
267+
span: Span, generations: List[OutputMessage]
264268
) -> None:
265269
for index, chat_generation in enumerate(generations):
270+
# Extract content
271+
# TODO: use dataclass to dict - Handle multiple responses
272+
content: Optional[str] = None
273+
for part in chat_generation.parts:
274+
if isinstance(part, Text):
275+
content = part.content
276+
break
266277
# Upcoming semconv fields
278+
span.set_attribute(f"gen_ai.completion.{index}.content", content or "")
267279
span.set_attribute(
268-
f"gen_ai.completion.{index}.content", chat_generation.content
269-
)
270-
span.set_attribute(
271-
f"gen_ai.completion.{index}.role", chat_generation.type
280+
f"gen_ai.completion.{index}.role", chat_generation.role
272281
)
273282

274283

util/opentelemetry-util-genai/src/opentelemetry/util/genai/handler.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,8 @@
4545
from opentelemetry.semconv.schemas import Schemas
4646
from opentelemetry.trace import get_tracer
4747

48-
from .data import ChatGeneration, Error
4948
from .generators import SpanMetricEventGenerator, SpanMetricGenerator
50-
from .types import InputMessage, LLMInvocation
49+
from .types import Error, InputMessage, LLMInvocation, OutputMessage
5150

5251
# TODO: Get the tool version for emitting spans, use GenAI Utils for now
5352
from .version import __version__
@@ -140,7 +139,7 @@ def start_llm(
140139
def stop_llm(
141140
self,
142141
run_id: UUID,
143-
chat_generations: List[ChatGeneration],
142+
chat_generations: List[OutputMessage],
144143
**attributes: Any,
145144
) -> LLMInvocation:
146145
with self._lock:
@@ -192,7 +191,7 @@ def llm_start(
192191

193192

194193
def llm_stop(
195-
run_id: UUID, chat_generations: List[ChatGeneration], **attributes: Any
194+
run_id: UUID, chat_generations: List[OutputMessage], **attributes: Any
196195
) -> LLMInvocation:
197196
return get_telemetry_handler().stop_llm(
198197
run_id=run_id, chat_generations=chat_generations, **attributes

util/opentelemetry-util-genai/src/opentelemetry/util/genai/types.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,9 @@
1616
import time
1717
from dataclasses import dataclass, field
1818
from enum import Enum
19-
from typing import Any, Dict, List, Literal, Optional, Union
19+
from typing import Any, Dict, List, Literal, Optional, Type, Union
2020
from uuid import UUID
2121

22-
from .data import ChatGeneration
23-
2422

2523
class ContentCapturingMode(Enum):
2624
# Do not capture content (default).
@@ -86,7 +84,14 @@ class LLMInvocation:
8684
start_time: float = field(default_factory=time.time)
8785
end_time: Optional[float] = None
8886
messages: List[InputMessage] = field(default_factory=list)
89-
chat_generations: List[ChatGeneration] = field(default_factory=list)
87+
chat_generations: List[OutputMessage] = field(default_factory=list)
9088
attributes: Dict[str, Any] = field(default_factory=dict)
9189
span_id: int = 0
9290
trace_id: int = 0
91+
92+
93+
# TODO: Do we need this?
94+
@dataclass
95+
class Error:
96+
message: str
97+
type: Type[BaseException]

util/opentelemetry-util-genai/tests/test_utils_old.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,11 @@
1717
llm_start,
1818
llm_stop,
1919
)
20-
from opentelemetry.util.genai.types import ChatGeneration, InputMessage, Text
20+
from opentelemetry.util.genai.types import (
21+
InputMessage,
22+
OutputMessage,
23+
Text,
24+
)
2125

2226

2327
class TestTelemetryHandler(unittest.TestCase):
@@ -42,7 +46,9 @@ def test_llm_start_and_stop_creates_span(self):
4246
message = InputMessage(
4347
role="Human", parts=[Text(content="hello world")]
4448
)
45-
chat_generation = ChatGeneration(content="hello back", type="AI")
49+
chat_generation = OutputMessage(
50+
role="AI", parts=[Text(content="hello back")], finish_reason="stop"
51+
)
4652

4753
# Start and stop LLM invocation
4854
llm_start(
@@ -98,9 +104,9 @@ def test_structured_logs_emitted(self):
98104
message = InputMessage(
99105
role="user", parts=[Text(content="hello world")]
100106
)
101-
generation = ChatGeneration(
102-
content="hello back",
103-
type="assistant",
107+
generation = OutputMessage(
108+
role="assistant",
109+
parts=[Text(content="hello back")],
104110
finish_reason="stop",
105111
)
106112

0 commit comments

Comments
 (0)