Skip to content

Commit 61d43e3

Browse files
committed
fix: handle None token counts in generation span
Guard against input_tokens or output_tokens being None when computing $ai_total_tokens to avoid TypeError.
1 parent fb4f1da commit 61d43e3

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

posthog/ai/openai_agents/processor.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -480,8 +480,8 @@ def _handle_generation_span(
480480
"""Handle LLM generation spans - maps to $ai_generation event."""
481481
# Extract token usage
482482
usage = span_data.usage or {}
483-
input_tokens = usage.get("input_tokens") or usage.get("prompt_tokens", 0)
484-
output_tokens = usage.get("output_tokens") or usage.get("completion_tokens", 0)
483+
input_tokens = usage.get("input_tokens") or usage.get("prompt_tokens") or 0
484+
output_tokens = usage.get("output_tokens") or usage.get("completion_tokens") or 0
485485

486486
# Extract model config parameters
487487
model_config = span_data.model_config or {}
@@ -506,7 +506,7 @@ def _handle_generation_span(
506506
"$ai_output_choices": self._with_privacy_mode(_safe_json(span_data.output)),
507507
"$ai_input_tokens": input_tokens,
508508
"$ai_output_tokens": output_tokens,
509-
"$ai_total_tokens": input_tokens + output_tokens,
509+
"$ai_total_tokens": (input_tokens or 0) + (output_tokens or 0),
510510
}
511511

512512
# Add optional token fields if present

0 commit comments

Comments
 (0)