Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions src/main/presenter/configPresenter/modelConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ export class ModelConfigHelper {
vision: config.vision,
functionCall: config.functionCall || false,
reasoning: config.reasoning || false,
type: config.type || ModelType.Chat
type: config.type || ModelType.Chat,
thinkingBudget: config.thinkingBudget
}
break
}
Expand All @@ -158,7 +159,8 @@ export class ModelConfigHelper {
vision: false,
functionCall: false,
reasoning: false,
type: ModelType.Chat
type: ModelType.Chat,
thinkingBudget: undefined
}
}
}
Expand Down
9 changes: 6 additions & 3 deletions src/main/presenter/configPresenter/modelDefaultSettings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,8 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['gemini-2.5-pro'],
vision: true,
functionCall: true,
reasoning: true
reasoning: true,
thinkingBudget: -1 // 动态思维
},
{
id: 'models/gemini-2.5-flash',
Expand All @@ -356,7 +357,8 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['models/gemini-2.5-flash', 'gemini-2.5-flash'],
vision: true,
functionCall: true,
reasoning: true
reasoning: true,
thinkingBudget: -1 // 动态思维
},
{
id: 'models/gemini-2.5-flash-lite-preview-06-17',
Expand All @@ -367,7 +369,8 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['models/gemini-2.5-flash-lite-preview-06-17', 'gemini-2.5-flash-lite-preview'],
vision: true,
functionCall: true,
reasoning: true
reasoning: true,
thinkingBudget: 0 // 默认不思考
},
{
id: 'models/gemini-2.0-flash',
Expand Down
7 changes: 6 additions & 1 deletion src/main/presenter/llmProviderPresenter/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,8 @@ export class LLMProviderPresenter implements ILlmProviderPresenter {
eventId: string,
temperature: number = 0.6,
maxTokens: number = 4096,
enabledMcpTools?: string[]
enabledMcpTools?: string[],
thinkingBudget?: number
): AsyncGenerator<LLMAgentEvent, void, unknown> {
console.log(`[Agent Loop] Starting agent loop for event: ${eventId} with model: ${modelId}`)
if (!this.canStartNewStream()) {
Expand All @@ -321,6 +322,10 @@ export class LLMProviderPresenter implements ILlmProviderPresenter {
const abortController = new AbortController()
const modelConfig = this.configPresenter.getModelConfig(modelId, providerId)

if (thinkingBudget !== undefined) {
modelConfig.thinkingBudget = thinkingBudget
}

this.activeStreams.set(eventId, {
isGenerating: true,
providerId,
Expand Down
7 changes: 4 additions & 3 deletions src/main/presenter/sqlitePresenter/importData.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,8 @@ export class DataImporter {
`INSERT INTO conversations (
conv_id, title, created_at, updated_at, system_prompt,
temperature, context_length, max_tokens, provider_id,
model_id, is_pinned, is_new, artifacts
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
model_id, is_pinned, is_new, artifacts, thinking_budget
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
)
.run(
conv.conv_id,
Expand All @@ -136,7 +136,8 @@ export class DataImporter {
conv.model_id,
conv.is_pinned || 0,
conv.is_new || 0,
conv.artifacts || 0
conv.artifacts || 0,
conv.thinking_budget !== undefined ? conv.thinking_budget : null
)

// 导入该会话的所有消息
Expand Down
33 changes: 25 additions & 8 deletions src/main/presenter/sqlitePresenter/tables/conversations.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ type ConversationRow = {
is_new: number
is_pinned: number
enabled_mcp_tools: string | null
thinking_budget: number | null
}

// 解析 JSON 字段
Expand Down Expand Up @@ -81,12 +82,18 @@ export class ConversationsTable extends BaseTable {
ALTER TABLE conversations ADD COLUMN enabled_mcp_tools TEXT DEFAULT '[]';
`
}
if (version === 4) {
return `
-- 添加 thinking_budget 字段
ALTER TABLE conversations ADD COLUMN thinking_budget INTEGER DEFAULT NULL;
`
}

return null
}

getLatestVersion(): number {
return 3
return 4
}

async create(title: string, settings: Partial<CONVERSATION_SETTINGS> = {}): Promise<string> {
Expand All @@ -105,9 +112,10 @@ export class ConversationsTable extends BaseTable {
is_new,
artifacts,
is_pinned,
enabled_mcp_tools
enabled_mcp_tools,
thinking_budget
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ,?)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`)
const conv_id = nanoid()
const now = Date.now()
Expand All @@ -125,7 +133,8 @@ export class ConversationsTable extends BaseTable {
1,
settings.artifacts || 0,
0, // Default is_pinned to 0
settings.enabledMcpTools ? JSON.stringify(settings.enabledMcpTools) : '[]'
settings.enabledMcpTools ? JSON.stringify(settings.enabledMcpTools) : '[]',
settings.thinkingBudget !== undefined ? settings.thinkingBudget : null
)
return conv_id
}
Expand All @@ -148,7 +157,8 @@ export class ConversationsTable extends BaseTable {
is_new,
artifacts,
is_pinned,
enabled_mcp_tools
enabled_mcp_tools,
thinking_budget
FROM conversations
WHERE conv_id = ?
`
Expand All @@ -174,7 +184,8 @@ export class ConversationsTable extends BaseTable {
providerId: result.providerId,
modelId: result.modelId,
artifacts: result.artifacts as 0 | 1,
enabledMcpTools: getJsonField(result.enabled_mcp_tools, [])
enabledMcpTools: getJsonField(result.enabled_mcp_tools, []),
thinkingBudget: result.thinking_budget !== null ? result.thinking_budget : undefined
}
}
}
Expand Down Expand Up @@ -231,6 +242,10 @@ export class ConversationsTable extends BaseTable {
updates.push('enabled_mcp_tools = ?')
params.push(JSON.stringify(data.settings.enabledMcpTools))
}
if (data.settings.thinkingBudget !== undefined) {
updates.push('thinking_budget = ?')
params.push(data.settings.thinkingBudget)
}
}
if (updates.length > 0 || data.updatedAt) {
updates.push('updated_at = ?')
Expand Down Expand Up @@ -275,7 +290,8 @@ export class ConversationsTable extends BaseTable {
is_new,
artifacts,
is_pinned,
enabled_mcp_tools
enabled_mcp_tools,
thinking_budget
FROM conversations
ORDER BY updated_at DESC
LIMIT ? OFFSET ?
Expand All @@ -300,7 +316,8 @@ export class ConversationsTable extends BaseTable {
providerId: row.providerId,
modelId: row.modelId,
artifacts: row.artifacts as 0 | 1,
enabledMcpTools: getJsonField(row.enabled_mcp_tools, [])
enabledMcpTools: getJsonField(row.enabled_mcp_tools, []),
thinkingBudget: row.thinking_budget !== null ? row.thinking_budget : undefined
}
}))
}
Expand Down
25 changes: 18 additions & 7 deletions src/main/presenter/threadPresenter/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -738,11 +738,16 @@ export class ThreadPresenter implements IThreadPresenter {
}
})
const mergedSettings = { ...defaultSettings, ...settings }
const defaultModelsSettings = this.configPresenter.getModelConfig(mergedSettings.modelId)
const defaultModelsSettings = this.configPresenter.getModelConfig(
mergedSettings.modelId,
mergedSettings.providerId
)
if (defaultModelsSettings) {
mergedSettings.maxTokens = defaultModelsSettings.maxTokens
mergedSettings.contextLength = defaultModelsSettings.contextLength
mergedSettings.temperature = defaultModelsSettings.temperature
// 重置 thinkingBudget 为模型默认配置,如果模型配置中没有则设为 undefined
mergedSettings.thinkingBudget = defaultModelsSettings.thinkingBudget
}
if (settings.artifacts) {
mergedSettings.artifacts = settings.artifacts
Expand Down Expand Up @@ -1467,7 +1472,8 @@ export class ThreadPresenter implements IThreadPresenter {
modelId: currentModelId,
temperature: currentTemperature,
maxTokens: currentMaxTokens,
enabledMcpTools: crrentEnabledMcpTools
enabledMcpTools: currentEnabledMcpTools,
thinkingBudget: currentThinkingBudget
} = currentConversation.settings
const stream = this.llmProviderPresenter.startStreamCompletion(
currentProviderId, // 使用最新的设置
Expand All @@ -1476,7 +1482,8 @@ export class ThreadPresenter implements IThreadPresenter {
state.message.id,
currentTemperature, // 使用最新的设置
currentMaxTokens, // 使用最新的设置
crrentEnabledMcpTools
currentEnabledMcpTools,
currentThinkingBudget
)
for await (const event of stream) {
const msg = event.data
Expand Down Expand Up @@ -1574,7 +1581,8 @@ export class ThreadPresenter implements IThreadPresenter {
this.throwIfCancelled(state.message.id)

// 7. 准备提示内容
const { providerId, modelId, temperature, maxTokens, enabledMcpTools } = conversation.settings
const { providerId, modelId, temperature, maxTokens, enabledMcpTools, thinkingBudget } =
conversation.settings
const modelConfig = this.configPresenter.getModelConfig(modelId, providerId)

const { finalContent, promptTokens } = await this.preparePromptContent(
Expand Down Expand Up @@ -1642,7 +1650,8 @@ export class ThreadPresenter implements IThreadPresenter {
state.message.id,
temperature,
maxTokens,
enabledMcpTools
enabledMcpTools,
thinkingBudget
)
for await (const event of stream) {
const msg = event.data
Expand Down Expand Up @@ -3650,7 +3659,8 @@ export class ThreadPresenter implements IThreadPresenter {
throw new Error(errorMsg)
}

const { providerId, modelId, temperature, maxTokens, enabledMcpTools } = conversation.settings
const { providerId, modelId, temperature, maxTokens, enabledMcpTools, thinkingBudget } =
conversation.settings
const modelConfig = this.configPresenter.getModelConfig(modelId, providerId)

if (!modelConfig) {
Expand Down Expand Up @@ -3704,7 +3714,8 @@ export class ThreadPresenter implements IThreadPresenter {
messageId,
temperature,
maxTokens,
enabledMcpTools
enabledMcpTools,
thinkingBudget
)

for await (const event of stream) {
Expand Down
Loading