From 8bb61b5d4577da261eb722150cd52cfab3a3891d Mon Sep 17 00:00:00 2001 From: Daniel Rodriguez Date: Fri, 14 Apr 2023 20:35:10 -0400 Subject: [PATCH] Closes #2636 - Adds experimental.OpenAIModel --- CHANGELOG.md | 4 ++++ package.json | 7 +++++++ src/ai/openaiProvider.ts | 13 ++++++++++--- src/config.ts | 1 + 4 files changed, 22 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c0110fb10b41d..5117492460515 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p ## [Unreleased] +### Added + +- Adds a `gitlens.experimental.openAIModel` setting to specify the OpenAI model to use to generate commit messages when using the `GitLens: Generate Commit Message` command (defaults to `gpt-3.5-turbo`) — closes [#2636](https://github.com/gitkraken/vscode-gitlens/issues/2636) thanks to [PR #2637](https://github.com/gitkraken/vscode-gitlens/pull/2637) by Daniel Rodríguez ([@sadasant](https://github.com/sadasant)) + ## [13.5.0] - 2023-04-07 ### Added diff --git a/package.json b/package.json index b0861275afa82..24346cef84840 100644 --- a/package.json +++ b/package.json @@ -3684,6 +3684,13 @@ "scope": "window", "order": 55 }, + "gitlens.experimental.openAIModel": { + "type": "string", + "default": "gpt-3.5-turbo", + "markdownDescription": "Specifies the OpenAI model to use to generate commit messages when using the `GitLens: Generate Commit Message` command", + "scope": "window", + "order": 56 + }, "gitlens.advanced.externalDiffTool": { "type": [ "string", diff --git a/src/ai/openaiProvider.ts b/src/ai/openaiProvider.ts index 92c382994f6a7..020738b574de5 100644 --- a/src/ai/openaiProvider.ts +++ b/src/ai/openaiProvider.ts @@ -12,12 +12,15 @@ const maxCodeCharacters = 12000; export class OpenAIProvider implements AIProvider { readonly id = 'openai'; readonly name = 'OpenAI'; + private model: OpenAIChatCompletionModels = 'gpt-3.5-turbo'; constructor(private readonly container: Container) {} dispose() {} async generateCommitMessage(diff: string, options?: { context?: string }): Promise { + this.model = configuration.get('experimental.openAIModel') || 'gpt-3.5-turbo'; + const openaiApiKey = await getApiKey(this.container.storage); if (openaiApiKey == null) return undefined; @@ -34,7 +37,7 @@ export class OpenAIProvider implements AIProvider { } const data: OpenAIChatCompletionRequest = { - model: 'gpt-3.5-turbo', + model: this.model, messages: [ { role: 'system', @@ -79,6 +82,8 @@ export class OpenAIProvider implements AIProvider { } async explainChanges(message: string, diff: string): Promise { + this.model = configuration.get('experimental.openAIModel') || 'gpt-3.5-turbo'; + const openaiApiKey = await getApiKey(this.container.storage); if (openaiApiKey == null) return undefined; @@ -90,7 +95,7 @@ export class OpenAIProvider implements AIProvider { } const data: OpenAIChatCompletionRequest = { - model: 'gpt-3.5-turbo', + model: this.model, messages: [ { role: 'system', @@ -195,8 +200,10 @@ async function getApiKey(storage: Storage): Promise { return openaiApiKey; } +export type OpenAIChatCompletionModels = 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-32k' | 'gpt-4-32k-0314'; + interface OpenAIChatCompletionRequest { - model: 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301'; + model: OpenAIChatCompletionModels; messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; temperature?: number; top_p?: number; diff --git a/src/config.ts b/src/config.ts index 5c050037d9cd5..d4d2f0a4a4f1a 100644 --- a/src/config.ts +++ b/src/config.ts @@ -49,6 +49,7 @@ export interface Config { detectNestedRepositories: boolean; experimental: { generateCommitMessagePrompt: string; + openAIModel?: 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-32k' | 'gpt-4-32k-0314'; }; fileAnnotations: { command: string | null;