Skip to content

Commit d78e501

Browse files
authored
Merge pull request #189 from restackio/fix-agent-chat
Fix agent_chat example appending ChatCompletion instead of Message
2 parents 32715a3 + 440bca3 commit d78e501

File tree

1 file changed

+13
-5
lines changed

1 file changed

+13
-5
lines changed

agent_chat/src/functions/llm_chat.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
from dotenv import load_dotenv
55
from openai import OpenAI
6-
from openai.types.chat.chat_completion import ChatCompletion
76
from pydantic import BaseModel
87
from restack_ai.function import FunctionFailure, function, log
98

@@ -27,7 +26,7 @@ def raise_exception(message: str) -> None:
2726

2827

2928
@function.defn()
30-
async def llm_chat(agent_input: LlmChatInput) -> ChatCompletion:
29+
async def llm_chat(agent_input: LlmChatInput) -> dict[str, str]:
3130
try:
3231
log.info("llm_chat function started", agent_input=agent_input)
3332

@@ -44,14 +43,23 @@ async def llm_chat(agent_input: LlmChatInput) -> ChatCompletion:
4443
{"role": "system", "content": agent_input.system_content}
4544
)
4645

47-
response = client.chat.completions.create(
46+
assistant_raw_response = client.chat.completions.create(
4847
model=agent_input.model or "gpt-4o-mini",
4948
messages=agent_input.messages,
5049
)
5150
except Exception as e:
5251
log.error("llm_chat function failed", error=e)
5352
raise
5453
else:
55-
log.info("llm_chat function completed", response=response)
54+
log.info(
55+
"llm_chat function completed", assistant_raw_response=assistant_raw_response
56+
)
57+
58+
assistant_response = {
59+
"role": assistant_raw_response.choices[0].message.role,
60+
"content": assistant_raw_response.choices[0].message.content,
61+
}
62+
63+
log.info("assistant_response", assistant_response=assistant_response)
5664

57-
return response
65+
return assistant_response

0 commit comments

Comments
 (0)