Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 18 additions & 7 deletions frontend/src/components/settings/EmbeddingFormModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@ const PROVIDER_DEFAULTS: Record<
},
};

const isLLMProvider = (v: string): v is LLMProvider =>
["openai", "anthropic", "gemini", "ollama"].includes(v);

export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialData }: Props) {
const [name, setName] = useState("");
const [provider, setProvider] = useState<LLMProvider>("openai");
Expand All @@ -58,14 +61,18 @@ export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialDat
setApiKey(initialData.api_key || "");
setModelName(initialData.model_name);
setDimensions(initialData.dimensions?.toString() || "");
} else {
// set defaults for new model
const defaults = PROVIDER_DEFAULTS[provider];
} else if (isOpen) {
// set defaults for new model only when opening
const defaultProvider: LLMProvider = "openai";
const defaults = PROVIDER_DEFAULTS[defaultProvider];
setName("");
setProvider(defaultProvider);
setEndpoint(defaults.endpoint);
setModelName(defaults.model);
setDimensions(defaults.dimensions?.toString() || "");
setApiKey("");
}
}, [initialData, provider]);
}, [isOpen, initialData]);

const handleProviderChange = (newProvider: LLMProvider) => {
setProvider(newProvider);
Expand All @@ -92,8 +99,8 @@ export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialDat
if (provider !== "ollama" && !apiKey.trim()) {
newErrors.apiKey = "api key is required for this provider";
}
if (dimensions && isNaN(parseInt(dimensions))) {
newErrors.dimensions = "dimensions must be a number";
if (dimensions && (isNaN(Number(dimensions)) || Number(dimensions) < 1)) {
newErrors.dimensions = "dimensions must be a number greater than 0";
}
Comment on lines +102 to 104
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Fractional dimensions silently truncated by parseInt.

A value like "1.5" passes validation (1.5 >= 1) but parseInt("1.5") on line 119 yields 1, silently dropping the fractional part. Dimensions should be a positive integer — tighten the check:

🐛 Proposed fix
-    if (dimensions && (isNaN(Number(dimensions)) || Number(dimensions) < 1)) {
-      newErrors.dimensions = "dimensions must be a number greater than 0";
+    if (dimensions && (isNaN(Number(dimensions)) || !Number.isInteger(Number(dimensions)) || Number(dimensions) < 1)) {
+      newErrors.dimensions = "dimensions must be a positive integer";
     }

Also applies to: 119-119

🤖 Prompt for AI Agents
In `@frontend/src/components/settings/EmbeddingFormModal.tsx` around lines 102 -
104, The current validation allows fractional strings like "1.5" because it only
checks Number(dimensions) >= 1 and later parseInt truncates; update the
validation in EmbeddingFormModal to require an integer by checking
Number.isInteger(Number(dimensions)) && Number(dimensions) > 0 and set
newErrors.dimensions accordingly (e.g., "dimensions must be a positive
integer"); ensure any subsequent conversion uses parseInt(dimensions, 10) only
after this integer check so fractional inputs are rejected rather than silently
truncated.


setErrors(newErrors);
Expand All @@ -110,6 +117,7 @@ export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialDat
api_key: apiKey.trim() || null,
model_name: modelName.trim(),
dimensions: dimensions ? parseInt(dimensions) : null,
is_default: initialData?.is_default ?? false,
};

setSaving(true);
Expand Down Expand Up @@ -163,7 +171,10 @@ export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialDat
<FormControl.Label sx={{ color: "fg.default" }}>Provider</FormControl.Label>
<Select
value={provider}
onChange={(e) => handleProviderChange(e.target.value as LLMProvider)}
onChange={(e) => {
const val = e.target.value;
if (isLLMProvider(val)) handleProviderChange(val);
}}
block
>
{PROVIDERS.map((p) => (
Expand Down
47 changes: 29 additions & 18 deletions frontend/src/components/settings/LLMFormModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,8 @@ import { useState, useEffect } from "react";
import { Box, Button, TextInput, FormControl, Select, Dialog } from "@primer/react";
import type { LLMModelConfig, LLMProvider } from "../../types";

interface Props {
isOpen: boolean;
onClose: () => void;
onSave: (config: LLMModelConfig) => Promise<void>;
initialData?: LLMModelConfig;
}

const PROVIDERS: { value: LLMProvider; label: string }[] = [
{ value: "openai", label: "OpenAI" },
{ value: "anthropic", label: "Anthropic" },
{ value: "gemini", label: "Google Gemini" },
{ value: "ollama", label: "Ollama" },
];
const isLLMProvider = (v: string): v is LLMProvider =>
["openai", "anthropic", "gemini", "ollama"].includes(v);

const PROVIDER_DEFAULTS: Record<LLMProvider, { endpoint: string; model: string }> = {
openai: {
Expand All @@ -35,6 +24,20 @@ const PROVIDER_DEFAULTS: Record<LLMProvider, { endpoint: string; model: string }
},
};

interface Props {
isOpen: boolean;
onClose: () => void;
onSave: (config: LLMModelConfig) => Promise<void>;
initialData?: LLMModelConfig;
}

const PROVIDERS: { value: LLMProvider; label: string }[] = [
{ value: "openai", label: "OpenAI" },
{ value: "anthropic", label: "Anthropic" },
{ value: "gemini", label: "Google Gemini" },
{ value: "ollama", label: "Ollama" },
];

export default function LLMFormModal({ isOpen, onClose, onSave, initialData }: Props) {
const [name, setName] = useState("");
const [provider, setProvider] = useState<LLMProvider>("openai");
Expand All @@ -51,13 +54,17 @@ export default function LLMFormModal({ isOpen, onClose, onSave, initialData }: P
setEndpoint(initialData.endpoint);
setApiKey(initialData.api_key || "");
setModelName(initialData.model_name);
} else {
// set defaults for new model
const defaults = PROVIDER_DEFAULTS[provider];
} else if (isOpen) {
// set defaults for new model only when opening
const defaultProvider: LLMProvider = "openai";
const defaults = PROVIDER_DEFAULTS[defaultProvider];
setName("");
setProvider(defaultProvider);
setEndpoint(defaults.endpoint);
setModelName(defaults.model);
setApiKey("");
}
}, [initialData, provider]);
}, [isOpen, initialData]);

const handleProviderChange = (newProvider: LLMProvider) => {
setProvider(newProvider);
Expand Down Expand Up @@ -97,6 +104,7 @@ export default function LLMFormModal({ isOpen, onClose, onSave, initialData }: P
endpoint: endpoint.trim(),
api_key: apiKey.trim() || null,
model_name: modelName.trim(),
is_default: initialData?.is_default ?? false,
};

setSaving(true);
Expand Down Expand Up @@ -149,7 +157,10 @@ export default function LLMFormModal({ isOpen, onClose, onSave, initialData }: P
<FormControl.Label sx={{ color: "fg.default" }}>Provider</FormControl.Label>
<Select
value={provider}
onChange={(e) => handleProviderChange(e.target.value as LLMProvider)}
onChange={(e) => {
const val = e.target.value;
if (isLLMProvider(val)) handleProviderChange(val);
}}
block
>
{PROVIDERS.map((p) => (
Expand Down
13 changes: 6 additions & 7 deletions frontend/src/pages/Settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ export default function Settings() {
const isMountedRef = useRef(true);

useEffect(() => {
// Reset to true on each mount (important for React StrictMode double-mount)
isMountedRef.current = true;

loadLlmModels();
loadEmbeddingModels();
loadLangfuseStatus();
Expand Down Expand Up @@ -64,14 +67,10 @@ export default function Settings() {

const loadLangfuseStatus = async () => {
try {
const res = await fetch("/api/langfuse/status");
if (!res.ok) {
throw new Error(`http ${res.status}`);
}
const data = await res.json();
const data = await llmConfigApi.getLangfuseStatus();
if (isMountedRef.current) {
setLangfuseEnabled(data.enabled);
setLangfuseHost(data.host);
setLangfuseHost(data.host ?? null);
}
} catch (error) {
const message = error instanceof Error ? error.message : "Unknown error";
Expand Down Expand Up @@ -214,7 +213,7 @@ export default function Settings() {
loadEmbeddingModels();
} catch (error) {
const message = error instanceof Error ? error.message : "Unknown error";
toast.error(`Failed to save LLM model: ${message}`);
toast.error(`Failed to save embedding model: ${message}`);
throw error;
}
};
Expand Down
11 changes: 11 additions & 0 deletions frontend/src/services/llmConfigApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,17 @@ class LLMConfigApi {
if (!response.ok) throw new Error(`http ${response.status}`);
return response.json();
}

async getLangfuseStatus(): Promise<{
enabled: boolean;
host?: string;
public_key?: string;
error?: string;
}> {
const response = await fetch(`${API_BASE}/langfuse/status`);
if (!response.ok) throw new Error(`http ${response.status}`);
return response.json();
}
}

export const llmConfigApi = new LLMConfigApi();
6 changes: 6 additions & 0 deletions lib/entities/llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,12 @@ def validate_str_fields(cls, v: str | None) -> str:
"""convert None to empty string for database compatibility"""
return v if v is not None else ""

@field_validator("dimensions", mode="before")
@classmethod
def validate_dimensions(cls, v: int | None) -> int:
"""coerce None to 0"""
return v if v is not None else 0


class ConnectionTestResult(BaseModel):
success: bool
Expand Down
Loading
Loading