Skip to content

Commit 26978e9

Browse files
authored
Update How_to_count_tokens_with_tiktoken.ipynb (openai#511)
Add support for gpt-3.5-*-0613 models
1 parent b587f9b commit 26978e9

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

examples/How_to_count_tokens_with_tiktoken.ipynb

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -441,13 +441,16 @@
441441
" if model == \"gpt-3.5-turbo\":\n",
442442
" print(\"Warning: gpt-3.5-turbo may change over time. Returning num tokens assuming gpt-3.5-turbo-0301.\")\n",
443443
" return num_tokens_from_messages(messages, model=\"gpt-3.5-turbo-0301\")\n",
444+
" elif model == \"gpt-3.5-turbo-16k\":\n",
445+
" print(\"Warning: gpt-3.5-turbo-16k may change over time. Returning num tokens assuming gpt-3.5-turbo-16k-0613.\")\n",
446+
" return num_tokens_from_messages(messages, model=\"gpt-3.5-turbo-16k-0613\")\n",
444447
" elif model == \"gpt-4\":\n",
445448
" print(\"Warning: gpt-4 may change over time. Returning num tokens assuming gpt-4-0314.\")\n",
446449
" return num_tokens_from_messages(messages, model=\"gpt-4-0314\")\n",
447450
" elif model == \"gpt-3.5-turbo-0301\":\n",
448451
" tokens_per_message = 4 # every message follows <|start|>{role/name}\\n{content}<|end|>\\n\n",
449452
" tokens_per_name = -1 # if there's a name, the role is omitted\n",
450-
" elif model == \"gpt-4-0314\":\n",
453+
" elif model in {\"gpt-4-0314\", \"gpt-3.5-turbo-0613\", \"gpt-3.5-turbo-16k-0613\"}:\n",
451454
" tokens_per_message = 3\n",
452455
" tokens_per_name = 1\n",
453456
" else:\n",

0 commit comments

Comments
 (0)