Skip to content

Commit

Permalink
✨ v0.7.1 (danny-avila#2502)
Browse files Browse the repository at this point in the history
* chore: make openai package definition explicit

* ✨ v0.7.1

* chore: gpt-4-vision correct context length

* add `llava` to vision models list
  • Loading branch information
danny-avila authored Apr 23, 2024
1 parent 9ce859b commit b3dd5be
Show file tree
Hide file tree
Showing 5 changed files with 18 additions and 2 deletions.
2 changes: 2 additions & 0 deletions models/tx.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ const getValueKey = (model, endpoint) => {
return 'gpt-3.5-turbo-1106';
} else if (modelName.includes('gpt-3.5')) {
return '4k';
} else if (modelName.includes('gpt-4-vision')) {
return 'gpt-4-1106';
} else if (modelName.includes('gpt-4-1106')) {
return 'gpt-4-1106';
} else if (modelName.includes('gpt-4-0125')) {
Expand Down
7 changes: 7 additions & 0 deletions models/tx.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,13 @@ describe('getValueKey', () => {
expect(getValueKey('openai/gpt-4-1106')).toBe('gpt-4-1106');
expect(getValueKey('gpt-4-1106/openai/')).toBe('gpt-4-1106');
});

it('should return "gpt-4-1106" for model type of "gpt-4-1106"', () => {
expect(getValueKey('gpt-4-vision-preview')).toBe('gpt-4-1106');
expect(getValueKey('openai/gpt-4-1106')).toBe('gpt-4-1106');
expect(getValueKey('gpt-4-turbo')).toBe('gpt-4-1106');
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
});
});

describe('getMultiplier', () => {
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@librechat/backend",
"version": "0.7.0",
"version": "0.7.1",
"description": "",
"scripts": {
"start": "echo 'please run this from the root directory'",
Expand Down Expand Up @@ -74,7 +74,7 @@
"multer": "^1.4.5-lts.1",
"nodejs-gpt": "^1.37.4",
"nodemailer": "^6.9.4",
"openai": "^4.36.0",
"openai": "4.36.0",
"openai-chat-tokens": "^0.2.8",
"openid-client": "^5.4.2",
"passport": "^0.6.0",
Expand Down
1 change: 1 addition & 0 deletions utils/tokens.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ const openAIModels = {
'gpt-4-1106': 127990, // -10 from max
'gpt-4-0125': 127990, // -10 from max
'gpt-4-turbo': 127990, // -10 from max
'gpt-4-vision': 127990, // -10 from max
'gpt-3.5-turbo': 16375, // -10 from max
'gpt-3.5-turbo-0613': 4092, // -5 from max
'gpt-3.5-turbo-0301': 4092, // -5 from max
Expand Down
6 changes: 6 additions & 0 deletions utils/tokens.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,12 @@ describe('getModelMaxTokens', () => {
expect(getModelMaxTokens('gpt-4-1106')).toBe(maxTokensMap[EModelEndpoint.openAI]['gpt-4-1106']);
});

test('should return correct tokens for gpt-4-vision exact match', () => {
expect(getModelMaxTokens('gpt-4-vision')).toBe(
maxTokensMap[EModelEndpoint.openAI]['gpt-4-vision'],
);
});

test('should return correct tokens for gpt-3.5-turbo-1106 partial match', () => {
expect(getModelMaxTokens('something-/gpt-3.5-turbo-1106')).toBe(
maxTokensMap[EModelEndpoint.openAI]['gpt-3.5-turbo-1106'],
Expand Down

0 comments on commit b3dd5be

Please sign in to comment.