File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed
Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -480,8 +480,8 @@ def _handle_generation_span(
480480 """Handle LLM generation spans - maps to $ai_generation event."""
481481 # Extract token usage
482482 usage = span_data .usage or {}
483- input_tokens = usage .get ("input_tokens" ) or usage .get ("prompt_tokens" , 0 )
484- output_tokens = usage .get ("output_tokens" ) or usage .get ("completion_tokens" , 0 )
483+ input_tokens = usage .get ("input_tokens" ) or usage .get ("prompt_tokens" ) or 0
484+ output_tokens = usage .get ("output_tokens" ) or usage .get ("completion_tokens" ) or 0
485485
486486 # Extract model config parameters
487487 model_config = span_data .model_config or {}
@@ -506,7 +506,7 @@ def _handle_generation_span(
506506 "$ai_output_choices" : self ._with_privacy_mode (_safe_json (span_data .output )),
507507 "$ai_input_tokens" : input_tokens ,
508508 "$ai_output_tokens" : output_tokens ,
509- "$ai_total_tokens" : input_tokens + output_tokens ,
509+ "$ai_total_tokens" : ( input_tokens or 0 ) + ( output_tokens or 0 ) ,
510510 }
511511
512512 # Add optional token fields if present
You can’t perform that action at this time.
0 commit comments