Skip to content

Commit

Permalink
feat: HTTP + HTTPS proxy support (Nutlope#139)
Browse files Browse the repository at this point in the history
Co-authored-by: Hiroki Osame <hiroki.osame@gmail.com>
  • Loading branch information
BuethSam and privatenumber authored Mar 27, 2023
1 parent 6e02dc8 commit a0db0f3
Show file tree
Hide file tree
Showing 7 changed files with 77 additions and 17 deletions.
10 changes: 9 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,15 @@ The number of commit messages to generate to pick from.

Note, this will use more tokens as it generates more results.

This can also be configured with the CLI flag `--generate`.
#### proxy

Set a HTTP/HTTPS proxy to use for requests.

To clear the proxy option, you can use the command (note the empty value after the equals sign):

```sh
aicommits config set proxy=
```

## How it works

Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
"eslint": "^8.35.0",
"execa": "^7.0.0",
"fs-fixture": "^1.2.0",
"https-proxy-agent": "^5.0.1",
"ini": "^3.0.1",
"kolorist": "^1.7.0",
"lint-staged": "^13.1.2",
Expand Down
21 changes: 21 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion src/commands/aicommits.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,10 @@ export default async (
staged.files.map(file => ` ${file}`).join('\n')
}`);

const { env } = process;
const config = await getConfig({
OPENAI_KEY: process.env.OPENAI_KEY ?? process.env.OPENAI_API_KEY,
OPENAI_KEY: env.OPENAI_KEY || env.OPENAI_API_KEY,
proxy: env.https_proxy || env.HTTPS_PROXY || env.http_proxy || env.HTTP_PROXY,
generate: generate?.toString(),
});

Expand All @@ -49,6 +51,7 @@ export default async (
config.locale,
staged.diff,
config.generate,
config.proxy,
);
} finally {
s.stop('Changes analyzed');
Expand Down
6 changes: 5 additions & 1 deletion src/commands/prepare-commit-msg-hook.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,10 @@ export default () => (async () => {

intro(bgCyan(black(' aicommits ')));

const config = await getConfig();
const { env } = process;
const config = await getConfig({
proxy: env.https_proxy || env.HTTPS_PROXY || env.http_proxy || env.HTTP_PROXY,
});

const s = spinner();
s.start('The AI is analyzing your changes');
Expand All @@ -41,6 +44,7 @@ export default () => (async () => {
config.locale,
staged!.diff,
config.generate,
config.proxy,
);
} finally {
s.stop('Changes analyzed');
Expand Down
9 changes: 9 additions & 0 deletions src/utils/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,15 @@ const configParsers = {

return parsed;
},
proxy(url?: string) {
if (!url || url.length === 0) {
return undefined;
}

parseAssert('proxy', /^https?:\/\//.test(url), 'Must be a valid URL');

return url;
},
} as const;

type ConfigKeys = keyof typeof configParsers;
Expand Down
42 changes: 28 additions & 14 deletions src/utils/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@ import https from 'https';
import type { ClientRequest, IncomingMessage } from 'http';
import type { CreateChatCompletionRequest, CreateChatCompletionResponse } from 'openai';
import { encoding_for_model as encodingForModel } from '@dqbd/tiktoken';
import createHttpsProxyAgent from 'https-proxy-agent';
import { KnownError } from './error.js';

const httpsPost = async (
hostname: string,
path: string,
headers: Record<string, string>,
json: unknown,
proxy?: string,
) => new Promise<{
request: ClientRequest;
response: IncomingMessage;
Expand All @@ -27,6 +29,11 @@ const httpsPost = async (
'Content-Length': Buffer.byteLength(postContent),
},
timeout: 10_000, // 10s
agent: (
proxy
? createHttpsProxyAgent(proxy)
: undefined
),
},
(response) => {
const body: Buffer[] = [];
Expand All @@ -53,6 +60,7 @@ const httpsPost = async (
const createChatCompletion = async (
apiKey: string,
json: CreateChatCompletionRequest,
proxy?: string,
) => {
const { response, data } = await httpsPost(
'api.openai.com',
Expand All @@ -61,6 +69,7 @@ const createChatCompletion = async (
Authorization: `Bearer ${apiKey}`,
},
json,
proxy,
);

if (
Expand Down Expand Up @@ -97,6 +106,7 @@ export const generateCommitMessage = async (
locale: string,
diff: string,
completions: number,
proxy?: string,
) => {
const prompt = getPrompt(locale, diff);

Expand All @@ -109,20 +119,24 @@ export const generateCommitMessage = async (
}

try {
const completion = await createChatCompletion(apiKey, {
model,
messages: [{
role: 'user',
content: prompt,
}],
temperature: 0.7,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
max_tokens: 200,
stream: false,
n: completions,
});
const completion = await createChatCompletion(
apiKey,
{
model,
messages: [{
role: 'user',
content: prompt,
}],
temperature: 0.7,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
max_tokens: 200,
stream: false,
n: completions,
},
proxy,
);

return deduplicateMessages(
completion.choices
Expand Down

0 comments on commit a0db0f3

Please sign in to comment.