Skip to content

Commit

Permalink
Merge branch 'StanGirard:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
GabriielEger authored Aug 11, 2023
2 parents 281da03 + 59ddfb4 commit 3898d78
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions backend/core/routes/chat_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
from fastapi.responses import StreamingResponse
from llm.openai import OpenAIBrainPicking
from models.brains import Brain
from models.chat import Chat
from models.brain_entity import BrainEntity
from models.chat import Chat, ChatHistory
from models.chats import ChatQuestion
from models.databases.supabase.supabase import SupabaseDB
from models.settings import LLMSettings, get_supabase_db
Expand Down Expand Up @@ -232,7 +233,7 @@ async def create_stream_question_handler(
# Retrieve user's OpenAI API key
current_user.user_openai_api_key = request.headers.get("Openai-Api-Key")
brain = Brain(id=brain_id)

brain_details: BrainEntity = None
if not current_user.user_openai_api_key and brain_id:
brain_details = get_brain_details(brain_id)
if brain_details:
Expand All @@ -247,7 +248,7 @@ async def create_stream_question_handler(
# Retrieve chat model (temperature, max_tokens, model)
if (
not chat_question.model
or not chat_question.temperature
or chat_question.temperature is None
or not chat_question.max_tokens
):
# TODO: create ChatConfig class (pick config from brain or user or chat) and use it here
Expand All @@ -263,9 +264,9 @@ async def create_stream_question_handler(

gpt_answer_generator = OpenAIBrainPicking(
chat_id=str(chat_id),
model=chat_question.model,
max_tokens=chat_question.max_tokens,
temperature=chat_question.temperature,
model=(brain_details or chat_question).model if current_user.user_openai_api_key else "gpt-3.5-turbo",
max_tokens=(brain_details or chat_question).max_tokens if current_user.user_openai_api_key else 0,
temperature=(brain_details or chat_question).temperature if current_user.user_openai_api_key else 256,
brain_id=str(brain_id),
user_openai_api_key=current_user.user_openai_api_key, # pyright: ignore reportPrivateUsage=none
streaming=True,
Expand Down

0 comments on commit 3898d78

Please sign in to comment.