Skip to content
2 changes: 1 addition & 1 deletion agent_apis/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ dependencies = [
"python-dotenv==1.0.1",
"openai>=1.61.0",
"aiohttp>=3.11.12",
"restack-ai>=0.0.77",
"restack-ai>=0.0.78",
]

[project.scripts]
Expand Down
4 changes: 2 additions & 2 deletions agent_apis/src/functions/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from dotenv import load_dotenv
from openai import OpenAI
from restack_ai.function import FunctionFailure, function, log
from restack_ai.function import NonRetryableError, function, log

load_dotenv()

Expand Down Expand Up @@ -47,4 +47,4 @@ async def llm(function_input: FunctionInputParams) -> str:
return response.choices[0].message.content
except Exception as e:
error_message = "llm function failed"
raise FunctionFailure(error_message, non_retryable=True) from e
raise NonRetryableError(error_message) from e
8 changes: 4 additions & 4 deletions agent_apis/src/functions/weather.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import aiohttp
from restack_ai.function import function, log
from restack_ai.function import NonRetryableError, function, log

HTTP_OK = 200

Expand All @@ -21,6 +21,6 @@ async def weather() -> str:
return str(data)
error_message = f"Error: {response.status}"
raise_exception(error_message)
except Exception:
log.error("Error: {e}")
raise
except Exception as e:
error_message = f"Error: {e}"
raise NonRetryableError(error_message) from e
44 changes: 26 additions & 18 deletions agent_apis/src/workflows/multistep.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from datetime import timedelta

from pydantic import BaseModel, Field
from restack_ai.workflow import import_functions, log, workflow
from restack_ai.workflow import NonRetryableError, import_functions, log, workflow

with import_functions():
from src.functions.llm import FunctionInputParams, llm
Expand All @@ -20,20 +20,28 @@ async def run(self, workflow_input: WorkflowInputParams) -> dict:
user_content = f"Greet this person {workflow_input.name}"

# Step 1 get weather data
weather_data = await workflow.step(
function=weather, start_to_close_timeout=timedelta(seconds=120)
)

# Step 2 Generate greeting with LLM based on name and weather data

llm_message = await workflow.step(
function=llm,
function_input=FunctionInputParams(
system_content=f"You are a personal assitant and have access to weather data {weather_data}. Always greet person with relevant info from weather data",
user_content=user_content,
model="gpt-4o-mini",
),
start_to_close_timeout=timedelta(seconds=120),
)
log.info("MultistepWorkflow completed", llm_message=llm_message)
return {"message": llm_message, "weather": weather_data}
try:
weather_data = await workflow.step(
weather, start_to_close_timeout=timedelta(seconds=120)
)
except Exception as e:
error_message = f"Error during weather: {e}"
raise NonRetryableError(error_message) from e
else:
# Step 2 Generate greeting with LLM based on name and weather data
try:
llm_message = await workflow.step(
function=llm,
function_input=FunctionInputParams(
system_content=f"You are a personal assitant and have access to weather data {weather_data}. Always greet person with relevant info from weather data",
user_content=user_content,
model="gpt-4o-mini",
),
start_to_close_timeout=timedelta(seconds=120),
)
except Exception as e:
error_message = f"Error during llm: {e}"
raise NonRetryableError(error_message) from e
else:
log.info("MultistepWorkflow completed", llm_message=llm_message)
return {"message": llm_message, "weather": weather_data}
2 changes: 1 addition & 1 deletion agent_chat/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ dependencies = [
"watchfiles>=1.0.4",
"python-dotenv==1.0.1",
"openai>=1.61.0",
"restack-ai>=0.0.77",
"restack-ai>=0.0.78",
]

[project.scripts]
Expand Down
18 changes: 9 additions & 9 deletions agent_chat/src/agents/agent.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from datetime import timedelta

from pydantic import BaseModel
from restack_ai.agent import agent, import_functions, log, AgentError
from restack_ai.agent import NonRetryableError, agent, import_functions, log

with import_functions():
from src.functions.llm_chat import LlmChatInput, Message, llm_chat
Expand All @@ -23,22 +23,22 @@ def __init__(self) -> None:

@agent.event
async def messages(self, messages_event: MessagesEvent) -> list[Message]:
log.info(f"Received messages: {messages_event.messages}")
self.messages.extend(messages_event.messages)

log.info(f"Calling llm_chat with messages: {self.messages}")
try:
log.info(f"Received messages: {messages_event.messages}")
self.messages.extend(messages_event.messages)

log.info(f"Calling llm_chat with messages: {self.messages}")
assistant_message = await agent.step(
function=llm_chat,
function_input=LlmChatInput(messages=self.messages),
start_to_close_timeout=timedelta(seconds=120),
)

except Exception as e:
error_message = f"Error during llm_chat: {e}"
raise NonRetryableError(error_message) from e
else:
self.messages.append(assistant_message)
return self.messages
except Exception as e:
log.error(f"Error in messages: {e}")
raise AgentError(f"Error in messages: {e}")

@agent.event
async def end(self, end: EndEvent) -> EndEvent:
Expand Down
8 changes: 4 additions & 4 deletions agent_chat/src/functions/llm_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from dotenv import load_dotenv
from openai import OpenAI
from pydantic import BaseModel
from restack_ai.function import FunctionFailure, function, log
from restack_ai.function import NonRetryableError, function, log

load_dotenv()

Expand All @@ -22,7 +22,7 @@ class LlmChatInput(BaseModel):

def raise_exception(message: str) -> None:
log.error(message)
raise FunctionFailure(message, non_retryable=True)
raise NonRetryableError(message)


@function.defn()
Expand All @@ -48,8 +48,8 @@ async def llm_chat(agent_input: LlmChatInput) -> dict[str, str]:
messages=agent_input.messages,
)
except Exception as e:
log.error("llm_chat function failed", error=e)
raise
error_message = f"LLM chat failed: {e}"
raise NonRetryableError(error_message) from e
else:
log.info(
"llm_chat function completed", assistant_raw_response=assistant_raw_response
Expand Down
2 changes: 1 addition & 1 deletion agent_rag/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ dependencies = [
"watchfiles>=1.0.4",
"requests==2.32.3",
"python-dotenv==1.0.1",
"restack-ai>=0.0.77",
"restack-ai>=0.0.78",
]

[project.scripts]
Expand Down
54 changes: 30 additions & 24 deletions agent_rag/src/agents/chat_rag.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from datetime import timedelta

from pydantic import BaseModel
from restack_ai.agent import agent, import_functions, log
from restack_ai.agent import NonRetryableError, agent, import_functions, log

with import_functions():
from src.functions.llm_chat import LlmChatInput, Message, llm_chat
Expand All @@ -26,30 +26,36 @@ def __init__(self) -> None:
async def messages(self, messages_event: MessagesEvent) -> list[Message]:
log.info(f"Received messages: {messages_event.messages}")
self.messages.extend(messages_event.messages)

sales_info = await agent.step(
function=lookup_sales, start_to_close_timeout=timedelta(seconds=120)
)

system_content = f"You are a helpful assistant that can help with sales data. Here is the sales information: {sales_info}"

completion = await agent.step(
function=llm_chat,
function_input=LlmChatInput(
messages=self.messages, system_content=system_content
),
start_to_close_timeout=timedelta(seconds=120),
)

log.info(f"completion: {completion}")

self.messages.append(
Message(
role="assistant", content=completion.choices[0].message.content or ""
try:
sales_info = await agent.step(
function=lookup_sales, start_to_close_timeout=timedelta(seconds=120)
)
)

return self.messages
except Exception as e:
error_message = f"Error during lookup_sales: {e}"
raise NonRetryableError(error_message) from e
else:
system_content = f"You are a helpful assistant that can help with sales data. Here is the sales information: {sales_info}"

try:
completion = await agent.step(
function=llm_chat,
function_input=LlmChatInput(
messages=self.messages, system_content=system_content
),
start_to_close_timeout=timedelta(seconds=120),
)
except Exception as e:
error_message = f"Error during llm_chat: {e}"
raise NonRetryableError(error_message) from e
else:
log.info(f"completion: {completion}")
self.messages.append(
Message(
role="assistant", content=completion.choices[0].message.content or ""
)
)

return self.messages

@agent.event
async def end(self) -> EndEvent:
Expand Down
8 changes: 4 additions & 4 deletions agent_rag/src/functions/llm_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from openai import OpenAI
from openai.types.chat.chat_completion import ChatCompletion
from pydantic import BaseModel
from restack_ai.function import FunctionFailure, function, log
from restack_ai.function import NonRetryableError, function, log

load_dotenv()

Expand All @@ -23,7 +23,7 @@ class LlmChatInput(BaseModel):

def raise_exception(message: str) -> None:
log.error(message)
raise FunctionFailure(message, non_retryable=True)
raise NonRetryableError(message)


@function.defn()
Expand All @@ -49,8 +49,8 @@ async def llm_chat(function_input: LlmChatInput) -> ChatCompletion:
messages=function_input.messages,
)
except Exception as e:
log.error("llm_chat function failed", error=e)
raise
error_message = f"LLM chat failed: {e}"
raise NonRetryableError(error_message) from e
else:
log.info("llm_chat function completed", response=response)
return response
6 changes: 3 additions & 3 deletions agent_rag/src/functions/lookup_sales.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from pydantic import BaseModel
from restack_ai.function import function, log
from restack_ai.function import NonRetryableError, function, log


class SalesItem(BaseModel):
Expand Down Expand Up @@ -85,5 +85,5 @@ async def lookup_sales() -> str:

return str(items)
except Exception as e:
log.error("lookup_sales function failed", error=e)
raise
error_message = f"lookup_sales function failed: {e}"
raise NonRetryableError(error_message) from e
3 changes: 2 additions & 1 deletion agent_stream/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ dependencies = [
"watchfiles>=1.0.4",
"python-dotenv==1.0.1",
"openai>=1.61.0",
"restack-ai>=0.0.77",
"livekit-api>=0.8.2",
"restack-ai>=0.0.78",
]

[project.scripts]
Expand All @@ -26,6 +26,7 @@ include = ["src"]
[tool.hatch.build.targets.wheel]
include = ["src"]


[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
21 changes: 13 additions & 8 deletions agent_stream/src/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from datetime import timedelta

from pydantic import BaseModel
from restack_ai.agent import agent, import_functions, log
from restack_ai.agent import NonRetryableError, agent, import_functions, log

with import_functions():
from src.functions.llm_chat import LlmChatInput, Message, llm_chat
Expand Down Expand Up @@ -32,13 +32,18 @@ async def messages(self, messages_event: MessagesEvent) -> list[Message]:
log.info(f"Received message: {messages_event.messages}")
self.messages.extend(messages_event.messages)

assistant_message = await agent.step(
function=llm_chat,
function_input=LlmChatInput(messages=self.messages),
start_to_close_timeout=timedelta(seconds=120),
)
self.messages.append(Message(role="assistant", content=str(assistant_message)))
return self.messages
try:
assistant_message = await agent.step(
function=llm_chat,
function_input=LlmChatInput(messages=self.messages),
start_to_close_timeout=timedelta(seconds=120),
)
except Exception as e:
error_message = f"Error during llm_chat: {e}"
raise NonRetryableError(error_message) from e
else:
self.messages.append(Message(role="assistant", content=str(assistant_message)))
return self.messages

@agent.event
async def end(self, end: EndEvent) -> EndEvent:
Expand Down
6 changes: 3 additions & 3 deletions agent_stream/src/functions/llm_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from openai import OpenAI
from pydantic import BaseModel, Field
from restack_ai.function import function, log, stream_to_websocket
from restack_ai.function import NonRetryableError, function, stream_to_websocket

from src.client import api_address

Expand Down Expand Up @@ -49,5 +49,5 @@ async def llm_chat(function_input: LlmChatInput) -> str:
return await stream_to_websocket(api_address=api_address, data=response)

except Exception as e:
log.error("llm_chat function failed", error=str(e))
raise
error_message = f"llm_chat function failed: {e}"
raise NonRetryableError(error_message) from e
2 changes: 1 addition & 1 deletion agent_telephony/twilio/agent_twilio/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ dependencies = [
"watchfiles>=1.0.4",
"python-dotenv==1.0.1",
"openai>=1.61.0",
"restack-ai>=0.0.77",
"livekit-api>=0.8.2",
"restack-ai>=0.0.78",
]

[project.scripts]
Expand Down
Loading