-
-
Notifications
You must be signed in to change notification settings - Fork 303
feature about responses api, also introduces a new configuration system, structured logging #142
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
a57c238
87899a1
4fc0fa0
2b9733b
505f648
9477b45
44551f9
708ae33
47fb3e4
2800ed3
619d482
5c6e4c6
32cb10a
9051a21
eeeb820
3f69f13
4c0d775
1ec12db
174e868
83cdfde
6f47926
01d4adb
3cdc32c
f7835a4
318855e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,142 @@ | ||||||||||||||
| import consola from "consola" | ||||||||||||||
| import fs from "node:fs" | ||||||||||||||
|
|
||||||||||||||
| import { PATHS } from "./paths" | ||||||||||||||
|
|
||||||||||||||
| export interface AppConfig { | ||||||||||||||
| extraPrompts?: Record<string, string> | ||||||||||||||
| smallModel?: string | ||||||||||||||
| modelReasoningEfforts?: Record< | ||||||||||||||
| string, | ||||||||||||||
| "none" | "minimal" | "low" | "medium" | "high" | "xhigh" | ||||||||||||||
| > | ||||||||||||||
| } | ||||||||||||||
|
|
||||||||||||||
| const gpt5ExplorationPrompt = `## Exploration and reading files | ||||||||||||||
| - **Think first.** Before any tool call, decide ALL files/resources you will need. | ||||||||||||||
| - **Batch everything.** If you need multiple files (even from different places), read them together. | ||||||||||||||
| - **multi_tool_use.parallel** Use multi_tool_use.parallel to parallelize tool calls and only this. | ||||||||||||||
| - **Only make sequential calls if you truly cannot know the next file without seeing a result first.** | ||||||||||||||
| - **Workflow:** (a) plan all needed reads → (b) issue one parallel batch → (c) analyze results → (d) repeat if new, unpredictable reads arise.` | ||||||||||||||
|
|
||||||||||||||
| const defaultConfig: AppConfig = { | ||||||||||||||
| extraPrompts: { | ||||||||||||||
| "gpt-5-mini": gpt5ExplorationPrompt, | ||||||||||||||
| "gpt-5.1-codex-max": gpt5ExplorationPrompt, | ||||||||||||||
| }, | ||||||||||||||
| smallModel: "gpt-5-mini", | ||||||||||||||
| modelReasoningEfforts: { | ||||||||||||||
| "gpt-5-mini": "low", | ||||||||||||||
| }, | ||||||||||||||
| } | ||||||||||||||
|
|
||||||||||||||
| let cachedConfig: AppConfig | null = null | ||||||||||||||
|
|
||||||||||||||
| function ensureConfigFile(): void { | ||||||||||||||
| try { | ||||||||||||||
| fs.accessSync(PATHS.CONFIG_PATH, fs.constants.R_OK | fs.constants.W_OK) | ||||||||||||||
| } catch { | ||||||||||||||
| fs.mkdirSync(PATHS.APP_DIR, { recursive: true }) | ||||||||||||||
| fs.writeFileSync( | ||||||||||||||
| PATHS.CONFIG_PATH, | ||||||||||||||
| `${JSON.stringify(defaultConfig, null, 2)}\n`, | ||||||||||||||
| "utf8", | ||||||||||||||
| ) | ||||||||||||||
| try { | ||||||||||||||
| fs.chmodSync(PATHS.CONFIG_PATH, 0o600) | ||||||||||||||
| } catch { | ||||||||||||||
|
||||||||||||||
| } catch { | |
| } catch (error) { | |
| consola.warn( | |
| `Failed to set secure permissions (0o600) on config file at ${PATHS.CONFIG_PATH}. File may have overly permissive permissions.`, | |
| error | |
| ) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,182 @@ | ||
| import consola, { type ConsolaInstance } from "consola" | ||
| import fs from "node:fs" | ||
| import path from "node:path" | ||
| import util from "node:util" | ||
|
|
||
| import { PATHS } from "./paths" | ||
| import { state } from "./state" | ||
|
|
||
| const LOG_RETENTION_DAYS = 7 | ||
| const LOG_RETENTION_MS = LOG_RETENTION_DAYS * 24 * 60 * 60 * 1000 | ||
| const CLEANUP_INTERVAL_MS = 24 * 60 * 60 * 1000 | ||
| const LOG_DIR = path.join(PATHS.APP_DIR, "logs") | ||
| const FLUSH_INTERVAL_MS = 1000 | ||
| const MAX_BUFFER_SIZE = 100 | ||
|
|
||
| const logStreams = new Map<string, fs.WriteStream>() | ||
| const logBuffers = new Map<string, Array<string>>() | ||
|
|
||
| const ensureLogDirectory = () => { | ||
| if (!fs.existsSync(LOG_DIR)) { | ||
| fs.mkdirSync(LOG_DIR, { recursive: true }) | ||
| } | ||
| } | ||
|
|
||
| const cleanupOldLogs = () => { | ||
| if (!fs.existsSync(LOG_DIR)) { | ||
| return | ||
| } | ||
|
|
||
| const now = Date.now() | ||
|
|
||
| for (const entry of fs.readdirSync(LOG_DIR)) { | ||
| const filePath = path.join(LOG_DIR, entry) | ||
|
|
||
| let stats: fs.Stats | ||
| try { | ||
| stats = fs.statSync(filePath) | ||
| } catch { | ||
| continue | ||
| } | ||
|
|
||
| if (!stats.isFile()) { | ||
| continue | ||
| } | ||
|
|
||
| if (now - stats.mtimeMs > LOG_RETENTION_MS) { | ||
| try { | ||
| fs.rmSync(filePath) | ||
| } catch { | ||
| continue | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| const formatArgs = (args: Array<unknown>) => | ||
| args | ||
| .map((arg) => | ||
| typeof arg === "string" ? arg : ( | ||
| util.inspect(arg, { depth: null, colors: false }) | ||
| ), | ||
| ) | ||
| .join(" ") | ||
|
|
||
| const sanitizeName = (name: string) => { | ||
| const normalized = name | ||
| .toLowerCase() | ||
| .replaceAll(/[^a-z0-9]+/g, "-") | ||
| .replaceAll(/^-+|-+$/g, "") | ||
|
|
||
| return normalized === "" ? "handler" : normalized | ||
| } | ||
|
|
||
| const getLogStream = (filePath: string): fs.WriteStream => { | ||
| let stream = logStreams.get(filePath) | ||
| if (!stream || stream.destroyed) { | ||
| stream = fs.createWriteStream(filePath, { flags: "a" }) | ||
| logStreams.set(filePath, stream) | ||
|
|
||
| stream.on("error", (error: unknown) => { | ||
| console.warn("Log stream error", error) | ||
| logStreams.delete(filePath) | ||
| }) | ||
| } | ||
| return stream | ||
| } | ||
|
|
||
| const flushBuffer = (filePath: string) => { | ||
| const buffer = logBuffers.get(filePath) | ||
| if (!buffer || buffer.length === 0) { | ||
| return | ||
| } | ||
|
|
||
| const stream = getLogStream(filePath) | ||
| const content = buffer.join("\n") + "\n" | ||
| stream.write(content, (error) => { | ||
| if (error) { | ||
| console.warn("Failed to write handler log", error) | ||
| } | ||
| }) | ||
|
|
||
| logBuffers.set(filePath, []) | ||
| } | ||
|
|
||
| const flushAllBuffers = () => { | ||
| for (const filePath of logBuffers.keys()) { | ||
| flushBuffer(filePath) | ||
| } | ||
| } | ||
|
|
||
| const appendLine = (filePath: string, line: string) => { | ||
| let buffer = logBuffers.get(filePath) | ||
| if (!buffer) { | ||
| buffer = [] | ||
| logBuffers.set(filePath, buffer) | ||
| } | ||
|
|
||
| buffer.push(line) | ||
|
|
||
| if (buffer.length >= MAX_BUFFER_SIZE) { | ||
| flushBuffer(filePath) | ||
| } | ||
| } | ||
|
|
||
| setInterval(flushAllBuffers, FLUSH_INTERVAL_MS) | ||
|
|
||
| const cleanup = () => { | ||
| flushAllBuffers() | ||
| for (const stream of logStreams.values()) { | ||
| stream.end() | ||
| } | ||
| logStreams.clear() | ||
| logBuffers.clear() | ||
| } | ||
|
|
||
| process.on("exit", cleanup) | ||
| process.on("SIGINT", () => { | ||
| cleanup() | ||
| process.exit(0) | ||
| }) | ||
| process.on("SIGTERM", () => { | ||
| cleanup() | ||
| process.exit(0) | ||
| }) | ||
|
|
||
| let lastCleanup = 0 | ||
|
|
||
| export const createHandlerLogger = (name: string): ConsolaInstance => { | ||
| ensureLogDirectory() | ||
|
|
||
| const sanitizedName = sanitizeName(name) | ||
| const instance = consola.withTag(name) | ||
|
|
||
| if (state.verbose) { | ||
| instance.level = 5 | ||
| } | ||
| instance.setReporters([]) | ||
|
|
||
| instance.addReporter({ | ||
| log(logObj) { | ||
| ensureLogDirectory() | ||
|
|
||
| if (Date.now() - lastCleanup > CLEANUP_INTERVAL_MS) { | ||
| cleanupOldLogs() | ||
| lastCleanup = Date.now() | ||
| } | ||
|
Comment on lines
+163
to
+166
|
||
|
|
||
| const date = logObj.date | ||
| const dateKey = date.toLocaleDateString("sv-SE") | ||
| const timestamp = date.toLocaleString("sv-SE", { hour12: false }) | ||
| const filePath = path.join(LOG_DIR, `${sanitizedName}-${dateKey}.log`) | ||
| const message = formatArgs(logObj.args as Array<unknown>) | ||
| const line = `[${timestamp}] [${logObj.type}] [${logObj.tag || name}]${ | ||
| message ? ` ${message}` : "" | ||
| }` | ||
|
|
||
| appendLine(filePath, line) | ||
| }, | ||
| }) | ||
|
|
||
| return instance | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The README table formatting is inconsistent. Line 212 has "OpenAI Most advanced interface" which appears to be missing proper capitalization and could be clearer. Consider: "OpenAI's most advanced interface for generating model responses" or simply "Generates model responses using the Responses API".