Skip to content

Commit

Permalink
add experimenting openai
Browse files Browse the repository at this point in the history
  • Loading branch information
BennyThink committed Feb 25, 2024
1 parent a2522d7 commit d7a0208
Show file tree
Hide file tree
Showing 4 changed files with 96 additions and 4 deletions.
2 changes: 1 addition & 1 deletion ai.go → ai_gemini.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ type Data struct {
Contents []Content `json:"contents"`
}

func askAI(userID int64) string {
func askGemini(userID int64) string {
var data Data
chats := getChats(userID)
if len(chats) > 0 {
Expand Down
92 changes: 92 additions & 0 deletions ai_openai.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
package main

import (
"bytes"
"encoding/json"
log "github.com/sirupsen/logrus"
"io"
"net/http"
)

type ChatMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}

type ChatRequest struct {
Model string `json:"model"`
Messages []ChatMessage `json:"messages"`
}

type ChatResponse struct {
Choices []struct {
Message struct {
Content string `json:"content"`
} `json:"message"`
} `json:"choices"`
}

func mapRole(originalRole string) string {
switch originalRole {
case "USER":
return "user"
case "MODEL":
return "assistant"
default:
return "user"
}
}

const openAI = "https://gptmos.com/v1/chat/completions"

func askOpenAI(userID int64) string {
var chatReq ChatRequest
chats := getChats(userID)
if len(chats) > 0 {
for _, chat := range chats {
chatReq.Messages = append(chatReq.Messages, ChatMessage{
Role: mapRole(chat.Role),
Content: chat.Text,
})
}
}

chatReq.Model = "gpt-4-0125-preview"

jsonData, err := json.Marshal(chatReq)
if err != nil {
log.Errorf("Failed to marshal request: %v", err)
return err.Error()
}
log.Infoln(string(jsonData))

// Replace with the actual URL and add authorization headers as needed
req, _ := http.NewRequest("POST", openAI, bytes.NewBuffer(jsonData))
req.Header.Set("Authorization", "Bearer "+OpenAIKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
log.Errorf("Failed to make request: %v", err)
return err.Error()
}
defer resp.Body.Close()

if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
text := string(body)
log.Errorf("Request failed, %s", text)
return text
}

var chatResp ChatResponse
if err := json.NewDecoder(resp.Body).Decode(&chatResp); err != nil {
log.Errorf("Failed to decode response: %v", err)
return err.Error()
}

if len(chatResp.Choices) > 0 && len(chatResp.Choices[0].Message.Content) > 0 {
return chatResp.Choices[0].Message.Content
}

return "no response found"
}
2 changes: 1 addition & 1 deletion config.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ const (

var apiKey = os.Getenv("GEMINI_API_KEY")
var geminiURL = "https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key=" + apiKey

var OpenAIKey = os.Getenv("OPENAI_API_KEY")
var (
selector = &tb.ReplyMarkup{}
btnPrev = selector.Data("Ask AI", "ai-init", "1")
Expand Down
4 changes: 2 additions & 2 deletions handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ func mainEntrance(c tb.Context) error {
} else {
addChat(c.Sender().ID, userRole, c.Message().Text)
}
aiResponse := askAI(c.Sender().ID)
aiResponse := askGemini(c.Sender().ID)
addChat(c.Sender().ID, modelRole, aiResponse)
return c.Send(aiResponse, tb.NoPreview)
} else {
Expand Down Expand Up @@ -84,7 +84,7 @@ func testEntrance(c tb.Context) error {
} else {
addChat(c.Sender().ID, userRole, c.Message().Text)
}
aiResponse := askAI(c.Sender().ID)
aiResponse := askGemini(c.Sender().ID)
addChat(c.Sender().ID, modelRole, aiResponse)
return c.Send(aiResponse, tb.NoPreview)
} else {
Expand Down

0 comments on commit d7a0208

Please sign in to comment.