@@ -102,15 +102,15 @@ def create_step(
102
102
ConfigLlmData (
103
103
output_column_name = "output" ,
104
104
input_variable_names = input_variable_names ,
105
- ground_truth_column_name = "groundTruth" ,
106
105
latency_column_name = "latency" ,
107
106
cost_column_name = "cost" ,
108
107
timestamp_column_name = "inferenceTimestamp" ,
109
108
inference_id_column_name = "inferenceId" ,
110
109
num_of_token_column_name = "tokens" ,
111
110
)
112
111
)
113
-
112
+ if "groundTruth" in trace_data :
113
+ config .update ({"ground_truth_column_name" : "groundTruth" })
114
114
if "context" in trace_data :
115
115
config .update ({"context_column_name" : "context" })
116
116
@@ -386,13 +386,14 @@ def post_process_trace(
386
386
"inferenceTimestamp" : root_step .start_time ,
387
387
"inferenceId" : str (root_step .id ),
388
388
"output" : root_step .output ,
389
- "groundTruth" : root_step .ground_truth ,
390
389
"latency" : root_step .latency ,
391
390
"cost" : processed_steps [0 ].get ("cost" , 0 ),
392
391
"tokens" : processed_steps [0 ].get ("tokens" , 0 ),
393
392
"steps" : processed_steps ,
394
393
** root_step .metadata ,
395
394
}
395
+ if root_step .ground_truth :
396
+ trace_data ["groundTruth" ] = root_step .ground_truth
396
397
if input_variables :
397
398
trace_data .update (input_variables )
398
399
0 commit comments