Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/tasty-mails-take.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"kilo-code": minor
---

only fetch router models for providers with configured profiles
38 changes: 36 additions & 2 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,37 @@
"VSCODE_DEBUG_MODE": "true"
},
"resolveSourceMapLocations": ["${workspaceFolder}/**", "!**/node_modules/**"],
"presentation": { "hidden": false, "group": "tasks", "order": 1 }
"presentation": {
"hidden": false,
"group": "tasks",
"order": 1
}
},
{
"name": "Run Extension [Isolated,Proxy]",
"type": "extensionHost",
"request": "launch",
"runtimeExecutable": "${execPath}",
"args": [
"--extensionDevelopmentPath=${workspaceFolder}/src",
"--disable-extensions",
"${input:extensionLaunchDir}"
],
"sourceMaps": true,
"outFiles": ["${workspaceFolder}/dist/**/*.js"],
"preLaunchTask": "${defaultBuildTask}",
"env": {
"NODE_ENV": "development",
"VSCODE_DEBUG_MODE": "true",
"http_proxy": "http://localhost:8080",
"https_proxy": "http://localhost:8080"
},
"resolveSourceMapLocations": ["${workspaceFolder}/**", "!**/node_modules/**"],
"presentation": {
"hidden": false,
"group": "tasks",
"order": 1
}
},
{
"name": "Run Extension [Local Backend]",
Expand All @@ -60,7 +90,11 @@
"KILOCODE_BACKEND_BASE_URL": "${input:kilocodeBackendBaseUrl}"
},
"resolveSourceMapLocations": ["${workspaceFolder}/**", "!**/node_modules/**"],
"presentation": { "hidden": false, "group": "tasks", "order": 2 }
"presentation": {
"hidden": false,
"group": "tasks",
"order": 2
}
}
],
"inputs": [
Expand Down
3 changes: 1 addition & 2 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion src/api/providers/fetchers/modelCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ export const getModels = async (options: GetModelsOptions): Promise<ModelRecord>
break
case "litellm":
// Type safety ensures apiKey and baseUrl are always provided for LiteLLM.
models = await getLiteLLMModels(options.apiKey, options.baseUrl)
models = await getLiteLLMModels(options.apiKey || "", options.baseUrl || "")
break
// kilocode_change start
case "kilocode": {
Expand Down
1 change: 0 additions & 1 deletion src/api/providers/synthetic.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
// kilocode_change - provider added

import { Anthropic } from "@anthropic-ai/sdk"
import OpenAI from "openai"

import { type SyntheticModelId, syntheticDefaultModelId, syntheticModels } from "@roo-code/types"

Expand Down
106 changes: 61 additions & 45 deletions src/core/webview/__tests__/ClineProvider.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2739,6 +2739,25 @@ describe("ClineProvider - Router Models", () => {
inceptionLabsBaseUrl: "https://api.inceptionlabs.ai/v1/",
// kilocode_change end
},
// Need to include listApiConfigMeta for hasConfiguredProfile check
listApiConfigMeta: [
{ apiProvider: "openrouter" },
{ apiProvider: "gemini" },
{ apiProvider: "requesty" },
{ apiProvider: "glama" },
{ apiProvider: "unbound" },
{ apiProvider: "kilocode" },
{ apiProvider: "ollama" },
{ apiProvider: "vercel-ai-gateway" },
{ apiProvider: "deepinfra" },
{ apiProvider: "nano-gpt" },
{ apiProvider: "ovhcloud" },
{ apiProvider: "inception" },
{ apiProvider: "synthetic" },
{ apiProvider: "roo" },
{ apiProvider: "chutes" },
{ apiProvider: "litellm" },
],
} as any)

const mockModels = {
Expand Down Expand Up @@ -2776,11 +2795,11 @@ describe("ClineProvider - Router Models", () => {
baseUrl: "https://api.inceptionlabs.ai/v1/",
})
// kilocode_change end
expect(getModels).toHaveBeenCalledWith({ provider: "requesty", apiKey: "requesty-key" })
expect(getModels).toHaveBeenCalledWith({ provider: "glama" })
expect(getModels).toHaveBeenCalledWith({ provider: "requesty", apiKey: "requesty-key", baseUrl: undefined })
expect(getModels).toHaveBeenCalledWith({ provider: "glama", apiKey: "glama-key" })
expect(getModels).toHaveBeenCalledWith({ provider: "unbound", apiKey: "unbound-key" })
expect(getModels).toHaveBeenCalledWith({ provider: "vercel-ai-gateway" })
expect(getModels).toHaveBeenCalledWith({ provider: "deepinfra" })
expect(getModels).toHaveBeenCalledWith({ provider: "vercel-ai-gateway", apiKey: undefined })
expect(getModels).toHaveBeenCalledWith({ provider: "deepinfra", apiKey: undefined, baseUrl: undefined })
expect(getModels).toHaveBeenCalledWith(
expect.objectContaining({
provider: "roo",
Expand Down Expand Up @@ -2846,6 +2865,25 @@ describe("ClineProvider - Router Models", () => {
syntheticApiKey: "synthetic-key",
// kilocode_change end
},
// Need to include listApiConfigMeta for hasConfiguredProfile check
listApiConfigMeta: [
{ apiProvider: "openrouter" },
{ apiProvider: "gemini" },
{ apiProvider: "requesty" },
{ apiProvider: "glama" },
{ apiProvider: "unbound" },
{ apiProvider: "kilocode" },
{ apiProvider: "ollama" },
{ apiProvider: "vercel-ai-gateway" },
{ apiProvider: "deepinfra" },
{ apiProvider: "nano-gpt" },
{ apiProvider: "ovhcloud" },
{ apiProvider: "inception" },
{ apiProvider: "synthetic" },
{ apiProvider: "roo" },
{ apiProvider: "chutes" },
{ apiProvider: "litellm" },
],
} as any)

const mockModels = {
Expand Down Expand Up @@ -2955,47 +2993,6 @@ describe("ClineProvider - Router Models", () => {
})
})

test("handles requestRouterModels with LiteLLM values from message", async () => {
await provider.resolveWebviewView(mockWebviewView)
const messageHandler = (mockWebviewView.webview.onDidReceiveMessage as any).mock.calls[0][0]

// Mock state without LiteLLM config
vi.spyOn(provider, "getState").mockResolvedValue({
apiConfiguration: {
openRouterApiKey: "openrouter-key",
requestyApiKey: "requesty-key",
glamaApiKey: "glama-key",
unboundApiKey: "unbound-key",
// kilocode_change start
ovhCloudAiEndpointsApiKey: "ovhcloud-key",
chutesApiKey: "chutes-key",
// kilocode_change end
// No litellm config
},
} as any)

const mockModels = {
"model-1": { maxTokens: 4096, contextWindow: 8192, description: "Test model", supportsPromptCache: false },
}
const { getModels } = await import("../../../api/providers/fetchers/modelCache")
vi.mocked(getModels).mockResolvedValue(mockModels)

await messageHandler({
type: "requestRouterModels",
values: {
litellmApiKey: "message-litellm-key",
litellmBaseUrl: "http://message-url:4000",
},
})

// Verify LiteLLM was called with values from message
expect(getModels).toHaveBeenCalledWith({
provider: "litellm",
apiKey: "message-litellm-key",
baseUrl: "http://message-url:4000",
})
})

test("skips LiteLLM when neither config nor message values are provided", async () => {
await provider.resolveWebviewView(mockWebviewView)
const messageHandler = (mockWebviewView.webview.onDidReceiveMessage as any).mock.calls[0][0]
Expand All @@ -3013,6 +3010,25 @@ describe("ClineProvider - Router Models", () => {
// kilocode_change end
// No litellm config
},
// Need to include listApiConfigMeta for hasConfiguredProfile check - all providers except litellm
listApiConfigMeta: [
{ apiProvider: "openrouter" },
{ apiProvider: "gemini" },
{ apiProvider: "requesty" },
{ apiProvider: "glama" },
{ apiProvider: "unbound" },
{ apiProvider: "kilocode" },
{ apiProvider: "ollama" },
{ apiProvider: "vercel-ai-gateway" },
{ apiProvider: "deepinfra" },
{ apiProvider: "nano-gpt" },
{ apiProvider: "ovhcloud" },
{ apiProvider: "inception" },
{ apiProvider: "synthetic" },
{ apiProvider: "roo" },
{ apiProvider: "chutes" },
// Note: litellm NOT in list - testing that it's skipped
],
} as any)

const mockModels = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,12 @@ describe("webviewMessageHandler - requestRouterModels provider filter", () => {
})

it("fetches only requested provider when values.provider is present ('roo')", async () => {
// Need to include the provider in listApiConfigMeta so hasConfiguredProfile returns true
mockProvider.getState.mockResolvedValue({
apiConfiguration: {},
listApiConfigMeta: [{ apiProvider: "roo" }],
})

await webviewMessageHandler(
mockProvider as any,
{
Expand Down Expand Up @@ -148,6 +154,12 @@ describe("webviewMessageHandler - requestRouterModels provider filter", () => {
})

it("supports filtering another single provider ('openrouter')", async () => {
// Need to include the provider in listApiConfigMeta so hasConfiguredProfile returns true
mockProvider.getState.mockResolvedValue({
apiConfiguration: {},
listApiConfigMeta: [{ apiProvider: "openrouter" }],
})

await webviewMessageHandler(
mockProvider as any,
{
Expand Down
Loading