Skip to content

Commit 6932178

Browse files
committed
polish
1 parent ff18300 commit 6932178

File tree

7 files changed

+445
-158
lines changed

7 files changed

+445
-158
lines changed

Ask.mjs

Lines changed: 0 additions & 141 deletions
This file was deleted.

Chat.mjs

Lines changed: 159 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,159 @@
1+
import fs from 'fs/promises';
2+
import os from 'os';
3+
import path from 'path';
4+
import { OpenAI } from "openai";
5+
import { Anthropic } from '@anthropic-ai/sdk';
6+
import { Groq } from "groq-sdk";
7+
import { GoogleGenerativeAI } from "@google/generative-ai";
8+
import { encode } from "gpt-tokenizer/esm/model/davinci-codex"; // tokenizer
9+
10+
// Map of model shortcodes to full model names
11+
export const MODELS = {
12+
g: 'gpt-4o',
13+
G: 'gpt-4-32k-0314',
14+
h: 'claude-3-haiku-20240307',
15+
s: 'claude-3-sonnet-20240229',
16+
o: 'claude-3-opus-20240229',
17+
l: 'llama3-8b-8192',
18+
L: 'llama3-70b-8192',
19+
i: 'gemini-1.5-flash-latest',
20+
I: 'gemini-1.5-pro-latest'
21+
};
22+
23+
// Factory function to create a stateful OpenAI chat
24+
export function openAIChat(clientClass) {
25+
const messages = [];
26+
27+
async function ask(userMessage, { system, model, temperature = 0.0, max_tokens = 4096, stream = true }) {
28+
model = MODELS[model] || model;
29+
const client = new clientClass({ apiKey: await getToken(clientClass.name.toLowerCase()) });
30+
31+
if (messages.length === 0) {
32+
messages.push({ role: "system", content: system });
33+
}
34+
35+
messages.push({ role: "user", content: userMessage });
36+
37+
const params = { messages, model, temperature, max_tokens, stream };
38+
39+
let result = "";
40+
const response = await client.chat.completions.create(params);
41+
42+
for await (const chunk of response) {
43+
const text = chunk.choices[0]?.delta?.content || "";
44+
process.stdout.write(text);
45+
result += text;
46+
}
47+
48+
messages.push({ role: 'assistant', content: result });
49+
50+
return result;
51+
}
52+
53+
return ask;
54+
}
55+
56+
// Factory function to create a stateful Anthropic chat
57+
export function anthropicChat(clientClass) {
58+
const messages = [];
59+
60+
async function ask(userMessage, { system, model, temperature = 0.0, max_tokens = 4096, stream = true }) {
61+
model = MODELS[model] || model;
62+
const client = new clientClass({ apiKey: await getToken(clientClass.name.toLowerCase()) });
63+
64+
messages.push({ role: "user", content: userMessage });
65+
66+
const params = { system, model, temperature, max_tokens, stream };
67+
68+
let result = "";
69+
const response = client.messages
70+
.stream({ ...params, messages })
71+
.on('text', (text) => {
72+
process.stdout.write(text);
73+
result += text;
74+
});
75+
await response.finalMessage();
76+
77+
messages.push({ role: 'assistant', content: result });
78+
79+
return result;
80+
}
81+
82+
return ask;
83+
}
84+
85+
export function geminiChat(clientClass) {
86+
const messages = [];
87+
88+
async function ask(userMessage, { system, model, temperature = 0.0, max_tokens = 4096, stream = true }) {
89+
model = MODELS[model] || model;
90+
const client = new clientClass(await getToken(clientClass.name.toLowerCase()));
91+
92+
const generationConfig = {
93+
maxOutputTokens: max_tokens,
94+
temperature,
95+
};
96+
97+
const chat = client.getGenerativeModel({ model, systemInstruction: system, generationConfig })
98+
.startChat({ history: messages });
99+
100+
messages.push({ role: "user", parts: [{ text: userMessage }] });
101+
102+
let result = "";
103+
if (stream) {
104+
const response = await chat.sendMessageStream(userMessage);
105+
for await (const chunk of response.stream) {
106+
const text = chunk.text();
107+
process.stdout.write(text);
108+
result += text;
109+
}
110+
} else {
111+
const response = await chat.sendMessage(userMessage);
112+
result = (await response.response).text();
113+
}
114+
115+
messages.push({ role: 'model', parts: [{ text: result }] });
116+
117+
return result;
118+
}
119+
120+
return ask;
121+
}
122+
123+
// Generic asker function that dispatches to the correct asker based on the model name
124+
export function chat(model) {
125+
model = MODELS[model] || model;
126+
if (model.startsWith('gpt')) {
127+
return openAIChat(OpenAI);
128+
} else if (model.startsWith('claude')) {
129+
return anthropicChat(Anthropic);
130+
} else if (model.startsWith('llama')) {
131+
return openAIChat(Groq);
132+
} else if (model.startsWith('gemini')) {
133+
return geminiChat(GoogleGenerativeAI);
134+
} else {
135+
throw new Error(`Unsupported model: ${model}`);
136+
}
137+
}
138+
139+
// Utility function to read the API token for a given vendor
140+
async function getToken(vendor) {
141+
const tokenPath = path.join(os.homedir(), '.config', `${vendor}.token`);
142+
try {
143+
return (await fs.readFile(tokenPath, 'utf8')).trim();
144+
} catch (err) {
145+
console.error(`Error reading ${vendor}.token file:`, err.message);
146+
process.exit(1);
147+
}
148+
}
149+
150+
export function tokenCount(inputText) {
151+
// Encode the input string into tokens
152+
const tokens = encode(inputText);
153+
154+
// Get the number of tokens
155+
const numberOfTokens = tokens.length;
156+
157+
// Return the number of tokens
158+
return numberOfTokens;
159+
}

aiemu.mjs

100644100755
Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1+
#!/usr/bin/env node
2+
13
import process from "process";
24
import fs from 'fs/promises';
3-
import { asker, MODELS } from './Ask.mjs';
5+
import { chat, MODELS } from './Chat.mjs';
46

5-
//const MODEL = "claude-3-opus-20240229";
6-
const MODEL = "g";
7+
const MODEL = process.argv[2] || "g";
78

89
const SYSTEM = `
910
You're a game emulator. You can emulate ANY game, but text-based. Your goal is
@@ -226,15 +227,15 @@ If the player provides feedback after a '#', use it to improve the experience.
226227
console.log(ASCII_ART);
227228

228229
console.log("");
229-
console.log(`\x1b[32mUsing \x1b[1m${MODEL}\x1b[0m`);
230+
console.log(`\x1b[32mUsing \x1b[1m${MODELS[MODEL]||MODEL}\x1b[0m`);
230231
console.log("");
231232

232233
process.stdout.write("Game: ");
233234
const game = (await new Promise(resolve => process.stdin.once('data', data => resolve(data.toString())))).trim();
234235

235236
console.log(`Emulating ${game}...\n\n`);
236237

237-
const ask = asker();
238+
const ask = chat(MODEL);
238239
let messages = [
239240
{role: "user", content: `# GAME: ${game}`},
240241
];

chatsh.mjs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import readline from 'readline';
44
import { exec } from 'child_process';
55
import { promisify } from 'util';
6-
import { asker, MODELS } from './Ask.mjs';
6+
import { chat, MODELS } from './Chat.mjs';
77

88
const execAsync = promisify(exec);
99

@@ -43,7 +43,7 @@ const rl = readline.createInterface({
4343
});
4444

4545
// Create a stateful asker
46-
const ask = asker();
46+
const ask = chat(MODEL);
4747

4848
// Utility function to prompt the user for input
4949
async function prompt(query) {

holefill.mjs

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
11
#!/usr/bin/env node
2-
import { asker, MODELS, token_count } from './Ask.mjs';
2+
import { chat, MODELS, tokenCount } from './Chat.mjs';
33
import process from "process";
44
import fs from 'fs/promises';
55
import os from 'os';
66
import path from 'path';
77

8-
const ask = asker();
9-
108
const system = `
119
You are a HOLE FILLER. You are provided with a file containing holes, formatted
1210
as '{{HOLE_NAME}}'. Your TASK is to complete with a string to replace this hole
@@ -98,9 +96,10 @@ function hypothenuse(a, b) {
9896
- Answer ONLY with the <COMPLETION/> block. Do NOT include anything outside it.
9997
`;
10098

101-
var file = process.argv[2];
102-
var mini = process.argv[3];
99+
var file = process.argv[2];
100+
var mini = process.argv[3];
103101
var model = process.argv[4] || "g";
102+
var ask = chat(model);
104103

105104
if (!file) {
106105
console.log("Usage: holefill <file> [<shortened_file>] [<model_name>]");
@@ -130,7 +129,7 @@ while ((match = regex.exec(mini_code)) !== null) {
130129

131130
await fs.writeFile(mini, mini_code, 'utf-8');
132131

133-
var tokens = token_count(mini_code);
132+
var tokens = tokenCount(mini_code);
134133
var holes = mini_code.match(/{{\w+}}/g) || [];
135134

136135
if (holes.length === 0 && mini_code.indexOf("??") !== -1 && (mini_code.match(/\?\?/g) || []).length == 1) {

0 commit comments

Comments
 (0)