@@ -73,7 +73,7 @@ describe('Vercel AI integration', () => {
73
73
'ai.pipeline.name' : 'generateText' ,
74
74
'ai.prompt' : '{"prompt":"Where is the second span?"}' ,
75
75
'ai.response.finishReason' : 'stop' ,
76
- 'ai .response.text' : expect . any ( String ) ,
76
+ 'gen_ai .response.text' : expect . any ( String ) ,
77
77
'ai.settings.maxRetries' : 2 ,
78
78
'ai.settings.maxSteps' : 1 ,
79
79
'ai.streaming' : false ,
@@ -108,10 +108,10 @@ describe('Vercel AI integration', () => {
108
108
'ai.response.finishReason' : 'stop' ,
109
109
'ai.response.model' : 'mock-model-id' ,
110
110
'ai.response.id' : expect . any ( String ) ,
111
- 'ai .response.text' : expect . any ( String ) ,
111
+ 'gen_ai .response.text' : expect . any ( String ) ,
112
112
'ai.response.timestamp' : expect . any ( String ) ,
113
113
'ai.prompt.format' : expect . any ( String ) ,
114
- 'ai.prompt .messages' : expect . any ( String ) ,
114
+ 'gen_ai.request .messages' : expect . any ( String ) ,
115
115
'gen_ai.response.finish_reasons' : [ 'stop' ] ,
116
116
'gen_ai.usage.input_tokens' : 10 ,
117
117
'gen_ai.usage.output_tokens' : 20 ,
@@ -210,7 +210,7 @@ describe('Vercel AI integration', () => {
210
210
'ai.pipeline.name' : 'generateText' ,
211
211
'ai.prompt' : '{"prompt":"Where is the first span?"}' ,
212
212
'ai.response.finishReason' : 'stop' ,
213
- 'ai .response.text' : 'First span here!' ,
213
+ 'gen_ai .response.text' : 'First span here!' ,
214
214
'ai.settings.maxRetries' : 2 ,
215
215
'ai.settings.maxSteps' : 1 ,
216
216
'ai.streaming' : false ,
@@ -236,11 +236,11 @@ describe('Vercel AI integration', () => {
236
236
'ai.operationId' : 'ai.generateText.doGenerate' ,
237
237
'ai.pipeline.name' : 'generateText.doGenerate' ,
238
238
'ai.prompt.format' : 'prompt' ,
239
- 'ai.prompt .messages' : '[{"role":"user","content":[{"type":"text","text":"Where is the first span?"}]}]' ,
239
+ 'gen_ai.request .messages' : '[{"role":"user","content":[{"type":"text","text":"Where is the first span?"}]}]' ,
240
240
'ai.response.finishReason' : 'stop' ,
241
241
'ai.response.id' : expect . any ( String ) ,
242
242
'ai.response.model' : 'mock-model-id' ,
243
- 'ai .response.text' : 'First span here!' ,
243
+ 'gen_ai .response.text' : 'First span here!' ,
244
244
'ai.response.timestamp' : expect . any ( String ) ,
245
245
'ai.settings.maxRetries' : 2 ,
246
246
'ai.streaming' : false ,
@@ -270,7 +270,7 @@ describe('Vercel AI integration', () => {
270
270
'ai.pipeline.name' : 'generateText' ,
271
271
'ai.prompt' : '{"prompt":"Where is the second span?"}' ,
272
272
'ai.response.finishReason' : 'stop' ,
273
- 'ai .response.text' : expect . any ( String ) ,
273
+ 'gen_ai .response.text' : expect . any ( String ) ,
274
274
'ai.settings.maxRetries' : 2 ,
275
275
'ai.settings.maxSteps' : 1 ,
276
276
'ai.streaming' : false ,
@@ -305,10 +305,10 @@ describe('Vercel AI integration', () => {
305
305
'ai.response.finishReason' : 'stop' ,
306
306
'ai.response.model' : 'mock-model-id' ,
307
307
'ai.response.id' : expect . any ( String ) ,
308
- 'ai .response.text' : expect . any ( String ) ,
308
+ 'gen_ai .response.text' : expect . any ( String ) ,
309
309
'ai.response.timestamp' : expect . any ( String ) ,
310
310
'ai.prompt.format' : expect . any ( String ) ,
311
- 'ai.prompt .messages' : expect . any ( String ) ,
311
+ 'gen_ai.request .messages' : expect . any ( String ) ,
312
312
'gen_ai.response.finish_reasons' : [ 'stop' ] ,
313
313
'gen_ai.usage.input_tokens' : 10 ,
314
314
'gen_ai.usage.output_tokens' : 20 ,
@@ -330,8 +330,8 @@ describe('Vercel AI integration', () => {
330
330
'ai.pipeline.name' : 'generateText' ,
331
331
'ai.prompt' : '{"prompt":"What is the weather in San Francisco?"}' ,
332
332
'ai.response.finishReason' : 'tool-calls' ,
333
- 'ai .response.text' : 'Tool call completed!' ,
334
- 'ai .response.toolCalls ' : expect . any ( String ) ,
333
+ 'gen_ai .response.text' : 'Tool call completed!' ,
334
+ 'gen_ai .response.tool_calls ' : expect . any ( String ) ,
335
335
'ai.settings.maxRetries' : 2 ,
336
336
'ai.settings.maxSteps' : 1 ,
337
337
'ai.streaming' : false ,
@@ -357,15 +357,15 @@ describe('Vercel AI integration', () => {
357
357
'ai.operationId' : 'ai.generateText.doGenerate' ,
358
358
'ai.pipeline.name' : 'generateText.doGenerate' ,
359
359
'ai.prompt.format' : expect . any ( String ) ,
360
- 'ai.prompt .messages' : expect . any ( String ) ,
360
+ 'gen_ai.request .messages' : expect . any ( String ) ,
361
361
'ai.prompt.toolChoice' : expect . any ( String ) ,
362
- 'ai.prompt.tools ' : expect . any ( Array ) ,
362
+ 'gen_ai.request.available_tools ' : expect . any ( Array ) ,
363
363
'ai.response.finishReason' : 'tool-calls' ,
364
364
'ai.response.id' : expect . any ( String ) ,
365
365
'ai.response.model' : 'mock-model-id' ,
366
- 'ai .response.text' : 'Tool call completed!' ,
366
+ 'gen_ai .response.text' : 'Tool call completed!' ,
367
367
'ai.response.timestamp' : expect . any ( String ) ,
368
- 'ai .response.toolCalls ' : expect . any ( String ) ,
368
+ 'gen_ai .response.tool_calls ' : expect . any ( String ) ,
369
369
'ai.settings.maxRetries' : 2 ,
370
370
'ai.streaming' : false ,
371
371
'gen_ai.request.model' : 'mock-model-id' ,
0 commit comments