Skip to content

Commit 90d9940

Browse files
committed
fix(node): No longer compute ai token attributes in Vercel AI
integration
1 parent dcdf074 commit 90d9940

File tree

2 files changed

+8
-48
lines changed
  • dev-packages/node-integration-tests/suites/tracing/ai
  • packages/node/src/integrations/tracing/vercelai

2 files changed

+8
-48
lines changed

dev-packages/node-integration-tests/suites/tracing/ai/test.ts

Lines changed: 8 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -12,20 +12,18 @@ describe('ai', () => {
1212
spans: expect.arrayContaining([
1313
expect.objectContaining({
1414
data: expect.objectContaining({
15-
'ai.completion_tokens.used': 20,
15+
'gen_ai.usage.output_tokens': 20,
1616
'ai.model.id': 'mock-model-id',
1717
'ai.model.provider': 'mock-provider',
1818
'ai.model_id': 'mock-model-id',
1919
'ai.operationId': 'ai.generateText',
2020
'ai.pipeline.name': 'generateText',
21-
'ai.prompt_tokens.used': 10,
21+
'gen_ai.usage.input_tokens': 10,
2222
'ai.response.finishReason': 'stop',
2323
'ai.settings.maxRetries': 2,
2424
'ai.settings.maxSteps': 1,
2525
'ai.streaming': false,
26-
'ai.total_tokens.used': 30,
27-
'ai.usage.completionTokens': 20,
28-
'ai.usage.promptTokens': 10,
26+
'gen_ai.usage.total_tokens': 30,
2927
'operation.name': 'ai.generateText',
3028
'sentry.op': 'ai.pipeline.generateText',
3129
'sentry.origin': 'auto.vercelai.otel',
@@ -51,14 +49,10 @@ describe('ai', () => {
5149
'ai.streaming': false,
5250
'ai.response.finishReason': 'stop',
5351
'ai.response.model': 'mock-model-id',
54-
'ai.usage.promptTokens': 10,
55-
'ai.usage.completionTokens': 20,
5652
'gen_ai.response.finish_reasons': ['stop'],
5753
'gen_ai.usage.input_tokens': 10,
5854
'gen_ai.usage.output_tokens': 20,
59-
'ai.completion_tokens.used': 20,
60-
'ai.prompt_tokens.used': 10,
61-
'ai.total_tokens.used': 30,
55+
'gen_ai.usage.total_tokens': 30,
6256
}),
6357
description: 'generateText.doGenerate',
6458
op: 'ai.run.doGenerate',
@@ -67,25 +61,23 @@ describe('ai', () => {
6761
}),
6862
expect.objectContaining({
6963
data: expect.objectContaining({
70-
'ai.completion_tokens.used': 20,
7164
'ai.model.id': 'mock-model-id',
7265
'ai.model.provider': 'mock-provider',
7366
'ai.model_id': 'mock-model-id',
7467
'ai.prompt': '{"prompt":"Where is the second span?"}',
7568
'ai.operationId': 'ai.generateText',
7669
'ai.pipeline.name': 'generateText',
77-
'ai.prompt_tokens.used': 10,
7870
'ai.response.finishReason': 'stop',
7971
'ai.input_messages': '{"prompt":"Where is the second span?"}',
8072
'ai.settings.maxRetries': 2,
8173
'ai.settings.maxSteps': 1,
8274
'ai.streaming': false,
83-
'ai.total_tokens.used': 30,
84-
'ai.usage.completionTokens': 20,
85-
'ai.usage.promptTokens': 10,
8675
'operation.name': 'ai.generateText',
8776
'sentry.op': 'ai.pipeline.generateText',
8877
'sentry.origin': 'auto.vercelai.otel',
78+
'gen_ai.usage.input_tokens': 10,
79+
'gen_ai.usage.output_tokens': 20,
80+
'gen_ai.usage.total_tokens': 30,
8981
}),
9082
description: 'generateText',
9183
op: 'ai.pipeline.generateText',
@@ -108,14 +100,10 @@ describe('ai', () => {
108100
'ai.streaming': false,
109101
'ai.response.finishReason': 'stop',
110102
'ai.response.model': 'mock-model-id',
111-
'ai.usage.promptTokens': 10,
112-
'ai.usage.completionTokens': 20,
113103
'gen_ai.response.finish_reasons': ['stop'],
114104
'gen_ai.usage.input_tokens': 10,
115105
'gen_ai.usage.output_tokens': 20,
116-
'ai.completion_tokens.used': 20,
117-
'ai.prompt_tokens.used': 10,
118-
'ai.total_tokens.used': 30,
106+
'gen_ai.usage.total_tokens': 30,
119107
}),
120108
description: 'generateText.doGenerate',
121109
op: 'ai.run.doGenerate',

packages/node/src/integrations/tracing/vercelai/index.ts

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -122,34 +122,6 @@ const _vercelAIIntegration = (() => {
122122
}
123123
span.setAttribute('ai.streaming', name.includes('stream'));
124124
});
125-
126-
client.addEventProcessor(event => {
127-
if (event.type === 'transaction' && event.spans?.length) {
128-
for (const span of event.spans) {
129-
const { data: attributes, description: name } = span;
130-
131-
if (!name || span.origin !== 'auto.vercelai.otel') {
132-
continue;
133-
}
134-
135-
if (attributes['ai.usage.completionTokens'] != undefined) {
136-
attributes['ai.completion_tokens.used'] = attributes['ai.usage.completionTokens'];
137-
}
138-
if (attributes['ai.usage.promptTokens'] != undefined) {
139-
attributes['ai.prompt_tokens.used'] = attributes['ai.usage.promptTokens'];
140-
}
141-
if (
142-
typeof attributes['ai.usage.completionTokens'] == 'number' &&
143-
typeof attributes['ai.usage.promptTokens'] == 'number'
144-
) {
145-
attributes['ai.total_tokens.used'] =
146-
attributes['ai.usage.completionTokens'] + attributes['ai.usage.promptTokens'];
147-
}
148-
}
149-
}
150-
151-
return event;
152-
});
153125
});
154126
},
155127
};

0 commit comments

Comments
 (0)