Skip to content

Commit d78faa7

Browse files
Backport: feat(provider/anthropic): expose stop_sequence in provider metadata (#9121)
This is an automated backport of #9110 to the release-v5.0 branch. Co-authored-by: josh <144584931+dancer@users.noreply.github.com>
1 parent 68be859 commit d78faa7

File tree

6 files changed

+160
-1
lines changed

6 files changed

+160
-1
lines changed

.changeset/soft-colts-exist.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@ai-sdk/anthropic': patch
3+
---
4+
5+
feat(provider/anthropic): expose stop_sequence in provider metadata
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import { anthropic } from '@ai-sdk/anthropic';
2+
import { generateText } from 'ai';
3+
import 'dotenv/config';
4+
5+
async function main() {
6+
const result = await generateText({
7+
model: anthropic('claude-sonnet-4-0'),
8+
prompt: 'Write a short story and end it with the word END.',
9+
stopSequences: ['END'],
10+
});
11+
12+
console.log(result.text);
13+
console.log();
14+
console.log('Token usage:', result.usage);
15+
console.log('Finish reason:', result.finishReason);
16+
console.log(
17+
'Stop sequence:',
18+
result.providerMetadata?.anthropic?.stopSequence,
19+
);
20+
}
21+
22+
main().catch(console.error);
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
import { anthropic } from '@ai-sdk/anthropic';
2+
import { streamText } from 'ai';
3+
import 'dotenv/config';
4+
5+
async function main() {
6+
const result = streamText({
7+
model: anthropic('claude-3-5-sonnet-20240620'),
8+
prompt: 'Write a short story and end it with the word END.',
9+
stopSequences: ['END'],
10+
});
11+
12+
for await (const textPart of result.textStream) {
13+
process.stdout.write(textPart);
14+
}
15+
16+
console.log();
17+
console.log('Token usage:', await result.usage);
18+
console.log('Finish reason:', await result.finishReason);
19+
console.log(
20+
'Stop sequence:',
21+
(await result.providerMetadata)?.anthropic?.stopSequence,
22+
);
23+
}
24+
25+
main().catch(console.error);

packages/anthropic/src/__snapshots__/anthropic-messages-language-model.test.ts.snap

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -836,6 +836,7 @@ The page also discusses",
836836
"providerMetadata": {
837837
"anthropic": {
838838
"cacheCreationInputTokens": 0,
839+
"stopSequence": null,
839840
"usage": {
840841
"cache_creation": {
841842
"ephemeral_1h_input_tokens": 0,
@@ -1574,6 +1575,7 @@ The main tech story today appears to be Apple's significant",
15741575
"providerMetadata": {
15751576
"anthropic": {
15761577
"cacheCreationInputTokens": 0,
1578+
"stopSequence": null,
15771579
"usage": {
15781580
"cache_creation": {
15791581
"ephemeral_1h_input_tokens": 0,

packages/anthropic/src/anthropic-messages-language-model.test.ts

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -408,6 +408,43 @@ describe('AnthropicMessagesLanguageModel', () => {
408408
`);
409409
});
410410

411+
it('should include stop_sequence in provider metadata', async () => {
412+
server.urls['https://api.anthropic.com/v1/messages'].response = {
413+
type: 'json-value',
414+
body: {
415+
id: 'msg_017TfcQ4AgGxKyBduUpqYPZn',
416+
type: 'message',
417+
role: 'assistant',
418+
content: [{ type: 'text', text: 'Hello, World!' }],
419+
model: 'claude-3-haiku-20240307',
420+
stop_reason: 'stop_sequence',
421+
stop_sequence: 'STOP',
422+
usage: {
423+
input_tokens: 4,
424+
output_tokens: 30,
425+
},
426+
},
427+
};
428+
429+
const result = await model.doGenerate({
430+
prompt: TEST_PROMPT,
431+
stopSequences: ['STOP'],
432+
});
433+
434+
expect(result.providerMetadata).toMatchInlineSnapshot(`
435+
{
436+
"anthropic": {
437+
"cacheCreationInputTokens": null,
438+
"stopSequence": "STOP",
439+
"usage": {
440+
"input_tokens": 4,
441+
"output_tokens": 30,
442+
},
443+
},
444+
}
445+
`);
446+
});
447+
411448
it('should expose the raw response headers', async () => {
412449
prepareJsonResponse({
413450
headers: {
@@ -623,6 +660,7 @@ describe('AnthropicMessagesLanguageModel', () => {
623660
"providerMetadata": {
624661
"anthropic": {
625662
"cacheCreationInputTokens": 10,
663+
"stopSequence": null,
626664
"usage": {
627665
"cache_creation_input_tokens": 10,
628666
"cache_read_input_tokens": 5,
@@ -756,6 +794,7 @@ describe('AnthropicMessagesLanguageModel', () => {
756794
"providerMetadata": {
757795
"anthropic": {
758796
"cacheCreationInputTokens": 10,
797+
"stopSequence": null,
759798
"usage": {
760799
"cache_creation": {
761800
"ephemeral_1h_input_tokens": 10,
@@ -2018,6 +2057,7 @@ describe('AnthropicMessagesLanguageModel', () => {
20182057
"providerMetadata": {
20192058
"anthropic": {
20202059
"cacheCreationInputTokens": null,
2060+
"stopSequence": null,
20212061
"usage": {
20222062
"input_tokens": 441,
20232063
"output_tokens": 65,
@@ -2095,6 +2135,7 @@ describe('AnthropicMessagesLanguageModel', () => {
20952135
"providerMetadata": {
20962136
"anthropic": {
20972137
"cacheCreationInputTokens": null,
2138+
"stopSequence": null,
20982139
"usage": {
20992140
"input_tokens": 17,
21002141
"output_tokens": 227,
@@ -2192,6 +2233,7 @@ describe('AnthropicMessagesLanguageModel', () => {
21922233
"providerMetadata": {
21932234
"anthropic": {
21942235
"cacheCreationInputTokens": null,
2236+
"stopSequence": null,
21952237
"usage": {
21962238
"input_tokens": 17,
21972239
"output_tokens": 227,
@@ -2271,6 +2313,7 @@ describe('AnthropicMessagesLanguageModel', () => {
22712313
"providerMetadata": {
22722314
"anthropic": {
22732315
"cacheCreationInputTokens": null,
2316+
"stopSequence": null,
22742317
"usage": {
22752318
"input_tokens": 17,
22762319
"output_tokens": 227,
@@ -2336,6 +2379,7 @@ describe('AnthropicMessagesLanguageModel', () => {
23362379
"providerMetadata": {
23372380
"anthropic": {
23382381
"cacheCreationInputTokens": null,
2382+
"stopSequence": null,
23392383
"usage": {
23402384
"input_tokens": 17,
23412385
"output_tokens": 227,
@@ -2473,6 +2517,7 @@ describe('AnthropicMessagesLanguageModel', () => {
24732517
"providerMetadata": {
24742518
"anthropic": {
24752519
"cacheCreationInputTokens": null,
2520+
"stopSequence": null,
24762521
"usage": {
24772522
"input_tokens": 441,
24782523
"output_tokens": 65,
@@ -2680,6 +2725,7 @@ describe('AnthropicMessagesLanguageModel', () => {
26802725
"providerMetadata": {
26812726
"anthropic": {
26822727
"cacheCreationInputTokens": 10,
2728+
"stopSequence": null,
26832729
"usage": {
26842730
"cache_creation_input_tokens": 10,
26852731
"cache_read_input_tokens": 5,
@@ -2752,6 +2798,7 @@ describe('AnthropicMessagesLanguageModel', () => {
27522798
"providerMetadata": {
27532799
"anthropic": {
27542800
"cacheCreationInputTokens": 10,
2801+
"stopSequence": null,
27552802
"usage": {
27562803
"cache_creation": {
27572804
"ephemeral_1h_input_tokens": 10,
@@ -2854,6 +2901,54 @@ describe('AnthropicMessagesLanguageModel', () => {
28542901
"providerMetadata": {
28552902
"anthropic": {
28562903
"cacheCreationInputTokens": null,
2904+
"stopSequence": null,
2905+
"usage": {
2906+
"input_tokens": 17,
2907+
"output_tokens": 227,
2908+
},
2909+
},
2910+
},
2911+
"type": "finish",
2912+
"usage": {
2913+
"cachedInputTokens": undefined,
2914+
"inputTokens": 17,
2915+
"outputTokens": 227,
2916+
"totalTokens": 244,
2917+
},
2918+
},
2919+
]
2920+
`);
2921+
});
2922+
2923+
it('should include stop_sequence in provider metadata', async () => {
2924+
server.urls['https://api.anthropic.com/v1/messages'].response = {
2925+
type: 'stream-chunks',
2926+
chunks: [
2927+
`data: {"type":"message_start","message":{"id":"msg_01KfpJoAEabmH2iHRRFjQMAG","type":"message","role":"assistant","content":[],"model":"claude-3-haiku-20240307","stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":17,"output_tokens":1}}}\n\n`,
2928+
`data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}\n\n`,
2929+
`data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello"}}\n\n`,
2930+
`data: {"type":"content_block_stop","index":0}\n\n`,
2931+
`data: {"type":"message_delta","delta":{"stop_reason":"stop_sequence","stop_sequence":"STOP"},"usage":{"output_tokens":227}}\n\n`,
2932+
`data: {"type":"message_stop"}\n\n`,
2933+
],
2934+
};
2935+
2936+
const result = await model.doStream({
2937+
prompt: TEST_PROMPT,
2938+
stopSequences: ['STOP'],
2939+
});
2940+
2941+
const chunks = await convertReadableStreamToArray(result.stream);
2942+
2943+
expect(chunks.filter(chunk => chunk.type === 'finish'))
2944+
.toMatchInlineSnapshot(`
2945+
[
2946+
{
2947+
"finishReason": "stop",
2948+
"providerMetadata": {
2949+
"anthropic": {
2950+
"cacheCreationInputTokens": null,
2951+
"stopSequence": "STOP",
28572952
"usage": {
28582953
"input_tokens": 17,
28592954
"output_tokens": 227,
@@ -3087,6 +3182,7 @@ describe('AnthropicMessagesLanguageModel', () => {
30873182
"providerMetadata": {
30883183
"anthropic": {
30893184
"cacheCreationInputTokens": null,
3185+
"stopSequence": null,
30903186
"usage": {
30913187
"input_tokens": 17,
30923188
"output_tokens": 227,

packages/anthropic/src/anthropic-messages-language-model.ts

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -665,6 +665,7 @@ export class AnthropicMessagesLanguageModel implements LanguageModelV2 {
665665
usage: response.usage as JSONObject,
666666
cacheCreationInputTokens:
667667
response.usage.cache_creation_input_tokens ?? null,
668+
stopSequence: response.stop_sequence ?? null,
668669
},
669670
},
670671
};
@@ -714,6 +715,7 @@ export class AnthropicMessagesLanguageModel implements LanguageModelV2 {
714715

715716
let rawUsage: JSONObject | undefined = undefined;
716717
let cacheCreationInputTokens: number | null = null;
718+
let stopSequence: string | null = null;
717719

718720
let blockType:
719721
| 'text'
@@ -1148,6 +1150,8 @@ export class AnthropicMessagesLanguageModel implements LanguageModelV2 {
11481150
isJsonResponseFromTool: usesJsonResponseTool,
11491151
});
11501152

1153+
stopSequence = value.delta.stop_sequence ?? null;
1154+
11511155
rawUsage = {
11521156
...rawUsage,
11531157
...(value.usage as JSONObject),
@@ -1165,6 +1169,7 @@ export class AnthropicMessagesLanguageModel implements LanguageModelV2 {
11651169
anthropic: {
11661170
usage: rawUsage ?? null,
11671171
cacheCreationInputTokens,
1172+
stopSequence,
11681173
},
11691174
},
11701175
});
@@ -1287,6 +1292,7 @@ const anthropicMessagesResponseSchema = z.object({
12871292
]),
12881293
),
12891294
stop_reason: z.string().nullish(),
1295+
stop_sequence: z.string().nullish(),
12901296
usage: z.looseObject({
12911297
input_tokens: z.number(),
12921298
output_tokens: z.number(),
@@ -1438,7 +1444,10 @@ const anthropicMessagesChunkSchema = z.discriminatedUnion('type', [
14381444
}),
14391445
z.object({
14401446
type: z.literal('message_delta'),
1441-
delta: z.object({ stop_reason: z.string().nullish() }),
1447+
delta: z.object({
1448+
stop_reason: z.string().nullish(),
1449+
stop_sequence: z.string().nullish(),
1450+
}),
14421451
usage: z.looseObject({
14431452
output_tokens: z.number(),
14441453
cache_creation_input_tokens: z.number().nullish(),

0 commit comments

Comments
 (0)