@@ -13,13 +13,13 @@ import (
13
13
14
14
// OpenAIClient handles OpenAI API compatible requests
15
15
type OpenAIClient struct {
16
- APIKey string
17
- BaseURL string
18
- Model string
19
- HTTPClient * http.Client
20
- Validated bool
21
- ValidationErr string
22
- ServiceName string
16
+ APIKey string
17
+ BaseURL string
18
+ Model string
19
+ HTTPClient * http.Client
20
+ Validated bool
21
+ ValidationErr string
22
+ ServiceName string
23
23
AvailableModels []string
24
24
AutoSelectModel bool // True if no model was specified, should auto-select
25
25
}
@@ -38,8 +38,8 @@ type Message struct {
38
38
39
39
// OpenAIResponse represents the API response with flexible error handling
40
40
type OpenAIResponse struct {
41
- Choices []Choice `json:"choices"`
42
- Error any `json:"error,omitempty"` // Can be string or object
41
+ Choices []Choice `json:"choices"`
42
+ Error any `json:"error,omitempty"` // Can be string or object
43
43
}
44
44
45
45
// Choice represents a response choice
@@ -94,12 +94,12 @@ func (r *OpenAIResponse) getErrorMessage() string {
94
94
if r .Error == nil {
95
95
return ""
96
96
}
97
-
97
+
98
98
// Handle string error format (like LM Studio)
99
99
if errStr , ok := r .Error .(string ); ok {
100
100
return errStr
101
101
}
102
-
102
+
103
103
// Handle object error format (like OpenAI)
104
104
if errMap , ok := r .Error .(map [string ]any ); ok {
105
105
if message , exists := errMap ["message" ]; exists {
@@ -111,7 +111,7 @@ func (r *OpenAIResponse) getErrorMessage() string {
111
111
jsonBytes , _ := json .Marshal (r .Error )
112
112
return string (jsonBytes )
113
113
}
114
-
114
+
115
115
// Fallback to string representation
116
116
return fmt .Sprintf ("%v" , r .Error )
117
117
}
@@ -157,7 +157,7 @@ func NewOpenAIClient(model string) *OpenAIClient {
157
157
ServiceName : serviceName ,
158
158
AutoSelectModel : autoSelectModel ,
159
159
HTTPClient : & http.Client {
160
- Timeout : 30 * time .Second ,
160
+ Timeout : 60 * time .Second ,
161
161
},
162
162
}
163
163
@@ -227,7 +227,7 @@ func (c *OpenAIClient) AnalyzeLog(logMessage, severity, timestamp string, attrib
227
227
if flexErr := json .Unmarshal (bodyBytes , & flexResponse ); flexErr != nil {
228
228
return "" , fmt .Errorf ("failed to decode response: %v (body: %s)" , err , string (bodyBytes ))
229
229
}
230
-
230
+
231
231
// Try to extract response manually from flexible structure
232
232
if choices , ok := flexResponse ["choices" ].([]any ); ok && len (choices ) > 0 {
233
233
if choice , ok := choices [0 ].(map [string ]any ); ok {
@@ -238,7 +238,7 @@ func (c *OpenAIClient) AnalyzeLog(logMessage, severity, timestamp string, attrib
238
238
}
239
239
}
240
240
}
241
-
241
+
242
242
return "" , fmt .Errorf ("failed to parse response structure: %v (body: %s)" , err , string (bodyBytes ))
243
243
}
244
244
@@ -332,7 +332,7 @@ Log Details (for reference):
332
332
if flexErr := json .Unmarshal (bodyBytes , & flexResponse ); flexErr != nil {
333
333
return "" , fmt .Errorf ("failed to decode response: %v (body: %s)" , err , string (bodyBytes ))
334
334
}
335
-
335
+
336
336
// Try to extract response manually from flexible structure
337
337
if choices , ok := flexResponse ["choices" ].([]any ); ok && len (choices ) > 0 {
338
338
if choice , ok := choices [0 ].(map [string ]any ); ok {
@@ -343,7 +343,7 @@ Log Details (for reference):
343
343
}
344
344
}
345
345
}
346
-
346
+
347
347
return "" , fmt .Errorf ("failed to parse response structure: %v (body: %s)" , err , string (bodyBytes ))
348
348
}
349
349
@@ -415,7 +415,7 @@ func (c *OpenAIClient) ValidateConfiguration() {
415
415
c .ValidationErr = "No models available from AI service"
416
416
return
417
417
}
418
-
418
+
419
419
// Smart model selection: prefer common models or pick first available
420
420
selectedModel := c .selectBestDefaultModel (models )
421
421
c .Model = selectedModel
@@ -496,7 +496,7 @@ func (c *OpenAIClient) GetAvailableModels() ([]string, error) {
496
496
func (c * OpenAIClient ) getOllamaModels () ([]string , error ) {
497
497
// Remove /v1 suffix if present for Ollama native API
498
498
baseURL := strings .TrimSuffix (c .BaseURL , "/v1" )
499
-
499
+
500
500
req , err := http .NewRequest ("GET" , baseURL + "/api/tags" , nil )
501
501
if err != nil {
502
502
return nil , fmt .Errorf ("failed to create request: %v" , err )
@@ -534,7 +534,7 @@ func (c *OpenAIClient) getOllamaModels() ([]string, error) {
534
534
func (c * OpenAIClient ) analyzeWithOllama (prompt string ) (string , error ) {
535
535
// Remove /v1 suffix if present for Ollama native API
536
536
baseURL := strings .TrimSuffix (c .BaseURL , "/v1" )
537
-
537
+
538
538
request := OllamaGenerateRequest {
539
539
Model : c .Model ,
540
540
Prompt : prompt ,
@@ -581,17 +581,17 @@ func (c *OpenAIClient) selectBestDefaultModel(availableModels []string) string {
581
581
if len (availableModels ) == 0 {
582
582
return ""
583
583
}
584
-
584
+
585
585
// Preferred models in order of preference
586
586
preferredModels := []string {
587
- "gpt-4" , "gpt-4-turbo" , "gpt-4o" , "gpt-4o-mini" , // OpenAI GPT-4 variants
588
- "gpt-3.5-turbo" , "gpt-3.5-turbo-16k" , // OpenAI GPT-3.5 variants
589
- "gpt-oss:20b" , "gpt-oss:7b" , "gpt-oss" , // OSS GPT models (common in Ollama)
590
- "llama3" , "llama3.1" , "llama3:8b" , "llama3:70b" , // Ollama Llama variants
591
- "mistral" , "mistral:7b" , "mistral:latest" , // Ollama Mistral variants
592
- "codellama" , "codellama:7b" , "codellama:13b" , // Ollama CodeLlama variants
593
- }
594
-
587
+ "gpt-4" , "gpt-4-turbo" , "gpt-4o" , "gpt-4o-mini" , // OpenAI GPT-4 variants
588
+ "gpt-3.5-turbo" , "gpt-3.5-turbo-16k" , // OpenAI GPT-3.5 variants
589
+ "gpt-oss:20b" , "gpt-oss:7b" , "gpt-oss" , // OSS GPT models (common in Ollama)
590
+ "llama3" , "llama3.1" , "llama3:8b" , "llama3:70b" , // Ollama Llama variants
591
+ "mistral" , "mistral:7b" , "mistral:latest" , // Ollama Mistral variants
592
+ "codellama" , "codellama:7b" , "codellama:13b" , // Ollama CodeLlama variants
593
+ }
594
+
595
595
// First, try to find any preferred model (exact match)
596
596
for _ , preferred := range preferredModels {
597
597
for _ , available := range availableModels {
@@ -600,7 +600,7 @@ func (c *OpenAIClient) selectBestDefaultModel(availableModels []string) string {
600
600
}
601
601
}
602
602
}
603
-
603
+
604
604
// Second, try case-insensitive partial matches with preferred models
605
605
for _ , preferred := range preferredModels {
606
606
lowerPreferred := strings .ToLower (preferred )
@@ -611,7 +611,7 @@ func (c *OpenAIClient) selectBestDefaultModel(availableModels []string) string {
611
611
}
612
612
}
613
613
}
614
-
614
+
615
615
// Fallback: return the first available model
616
616
return availableModels [0 ]
617
617
}
@@ -657,4 +657,4 @@ func (c *OpenAIClient) GetValidationStatus() (bool, string, string, string) {
657
657
return false , "No API key configured" , "None" , ""
658
658
}
659
659
return c .Validated , c .ValidationErr , c .ServiceName , c .Model
660
- }
660
+ }
0 commit comments