Skip to content

Commit 78ff19c

Browse files
committed
Bump AI client timeout to 60s
1 parent bfda893 commit 78ff19c

File tree

1 file changed

+32
-32
lines changed

1 file changed

+32
-32
lines changed

internal/ai/openai.go

Lines changed: 32 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -13,13 +13,13 @@ import (
1313

1414
// OpenAIClient handles OpenAI API compatible requests
1515
type OpenAIClient struct {
16-
APIKey string
17-
BaseURL string
18-
Model string
19-
HTTPClient *http.Client
20-
Validated bool
21-
ValidationErr string
22-
ServiceName string
16+
APIKey string
17+
BaseURL string
18+
Model string
19+
HTTPClient *http.Client
20+
Validated bool
21+
ValidationErr string
22+
ServiceName string
2323
AvailableModels []string
2424
AutoSelectModel bool // True if no model was specified, should auto-select
2525
}
@@ -38,8 +38,8 @@ type Message struct {
3838

3939
// OpenAIResponse represents the API response with flexible error handling
4040
type OpenAIResponse struct {
41-
Choices []Choice `json:"choices"`
42-
Error any `json:"error,omitempty"` // Can be string or object
41+
Choices []Choice `json:"choices"`
42+
Error any `json:"error,omitempty"` // Can be string or object
4343
}
4444

4545
// Choice represents a response choice
@@ -94,12 +94,12 @@ func (r *OpenAIResponse) getErrorMessage() string {
9494
if r.Error == nil {
9595
return ""
9696
}
97-
97+
9898
// Handle string error format (like LM Studio)
9999
if errStr, ok := r.Error.(string); ok {
100100
return errStr
101101
}
102-
102+
103103
// Handle object error format (like OpenAI)
104104
if errMap, ok := r.Error.(map[string]any); ok {
105105
if message, exists := errMap["message"]; exists {
@@ -111,7 +111,7 @@ func (r *OpenAIResponse) getErrorMessage() string {
111111
jsonBytes, _ := json.Marshal(r.Error)
112112
return string(jsonBytes)
113113
}
114-
114+
115115
// Fallback to string representation
116116
return fmt.Sprintf("%v", r.Error)
117117
}
@@ -157,7 +157,7 @@ func NewOpenAIClient(model string) *OpenAIClient {
157157
ServiceName: serviceName,
158158
AutoSelectModel: autoSelectModel,
159159
HTTPClient: &http.Client{
160-
Timeout: 30 * time.Second,
160+
Timeout: 60 * time.Second,
161161
},
162162
}
163163

@@ -227,7 +227,7 @@ func (c *OpenAIClient) AnalyzeLog(logMessage, severity, timestamp string, attrib
227227
if flexErr := json.Unmarshal(bodyBytes, &flexResponse); flexErr != nil {
228228
return "", fmt.Errorf("failed to decode response: %v (body: %s)", err, string(bodyBytes))
229229
}
230-
230+
231231
// Try to extract response manually from flexible structure
232232
if choices, ok := flexResponse["choices"].([]any); ok && len(choices) > 0 {
233233
if choice, ok := choices[0].(map[string]any); ok {
@@ -238,7 +238,7 @@ func (c *OpenAIClient) AnalyzeLog(logMessage, severity, timestamp string, attrib
238238
}
239239
}
240240
}
241-
241+
242242
return "", fmt.Errorf("failed to parse response structure: %v (body: %s)", err, string(bodyBytes))
243243
}
244244

@@ -332,7 +332,7 @@ Log Details (for reference):
332332
if flexErr := json.Unmarshal(bodyBytes, &flexResponse); flexErr != nil {
333333
return "", fmt.Errorf("failed to decode response: %v (body: %s)", err, string(bodyBytes))
334334
}
335-
335+
336336
// Try to extract response manually from flexible structure
337337
if choices, ok := flexResponse["choices"].([]any); ok && len(choices) > 0 {
338338
if choice, ok := choices[0].(map[string]any); ok {
@@ -343,7 +343,7 @@ Log Details (for reference):
343343
}
344344
}
345345
}
346-
346+
347347
return "", fmt.Errorf("failed to parse response structure: %v (body: %s)", err, string(bodyBytes))
348348
}
349349

@@ -415,7 +415,7 @@ func (c *OpenAIClient) ValidateConfiguration() {
415415
c.ValidationErr = "No models available from AI service"
416416
return
417417
}
418-
418+
419419
// Smart model selection: prefer common models or pick first available
420420
selectedModel := c.selectBestDefaultModel(models)
421421
c.Model = selectedModel
@@ -496,7 +496,7 @@ func (c *OpenAIClient) GetAvailableModels() ([]string, error) {
496496
func (c *OpenAIClient) getOllamaModels() ([]string, error) {
497497
// Remove /v1 suffix if present for Ollama native API
498498
baseURL := strings.TrimSuffix(c.BaseURL, "/v1")
499-
499+
500500
req, err := http.NewRequest("GET", baseURL+"/api/tags", nil)
501501
if err != nil {
502502
return nil, fmt.Errorf("failed to create request: %v", err)
@@ -534,7 +534,7 @@ func (c *OpenAIClient) getOllamaModels() ([]string, error) {
534534
func (c *OpenAIClient) analyzeWithOllama(prompt string) (string, error) {
535535
// Remove /v1 suffix if present for Ollama native API
536536
baseURL := strings.TrimSuffix(c.BaseURL, "/v1")
537-
537+
538538
request := OllamaGenerateRequest{
539539
Model: c.Model,
540540
Prompt: prompt,
@@ -581,17 +581,17 @@ func (c *OpenAIClient) selectBestDefaultModel(availableModels []string) string {
581581
if len(availableModels) == 0 {
582582
return ""
583583
}
584-
584+
585585
// Preferred models in order of preference
586586
preferredModels := []string{
587-
"gpt-4", "gpt-4-turbo", "gpt-4o", "gpt-4o-mini", // OpenAI GPT-4 variants
588-
"gpt-3.5-turbo", "gpt-3.5-turbo-16k", // OpenAI GPT-3.5 variants
589-
"gpt-oss:20b", "gpt-oss:7b", "gpt-oss", // OSS GPT models (common in Ollama)
590-
"llama3", "llama3.1", "llama3:8b", "llama3:70b", // Ollama Llama variants
591-
"mistral", "mistral:7b", "mistral:latest", // Ollama Mistral variants
592-
"codellama", "codellama:7b", "codellama:13b", // Ollama CodeLlama variants
593-
}
594-
587+
"gpt-4", "gpt-4-turbo", "gpt-4o", "gpt-4o-mini", // OpenAI GPT-4 variants
588+
"gpt-3.5-turbo", "gpt-3.5-turbo-16k", // OpenAI GPT-3.5 variants
589+
"gpt-oss:20b", "gpt-oss:7b", "gpt-oss", // OSS GPT models (common in Ollama)
590+
"llama3", "llama3.1", "llama3:8b", "llama3:70b", // Ollama Llama variants
591+
"mistral", "mistral:7b", "mistral:latest", // Ollama Mistral variants
592+
"codellama", "codellama:7b", "codellama:13b", // Ollama CodeLlama variants
593+
}
594+
595595
// First, try to find any preferred model (exact match)
596596
for _, preferred := range preferredModels {
597597
for _, available := range availableModels {
@@ -600,7 +600,7 @@ func (c *OpenAIClient) selectBestDefaultModel(availableModels []string) string {
600600
}
601601
}
602602
}
603-
603+
604604
// Second, try case-insensitive partial matches with preferred models
605605
for _, preferred := range preferredModels {
606606
lowerPreferred := strings.ToLower(preferred)
@@ -611,7 +611,7 @@ func (c *OpenAIClient) selectBestDefaultModel(availableModels []string) string {
611611
}
612612
}
613613
}
614-
614+
615615
// Fallback: return the first available model
616616
return availableModels[0]
617617
}
@@ -657,4 +657,4 @@ func (c *OpenAIClient) GetValidationStatus() (bool, string, string, string) {
657657
return false, "No API key configured", "None", ""
658658
}
659659
return c.Validated, c.ValidationErr, c.ServiceName, c.Model
660-
}
660+
}

0 commit comments

Comments
 (0)