import railtracks as rt
# Define a tool (just a function!)
def get_weather(location: str) -> str:
return f"It's sunny in {location}!"
# Create an agent with tools
agent = rt.agent_node(
"Weather Assistant",
tool_nodes=(rt.function_node(get_weather)),
llm=rt.llm.OpenAILLM("gpt-4o"),
system_message="You help users with weather information."
)
# Run it
result = await rt.call(agent, "What's the weather in Paris?")
print(result.text) # "Based on the current data, it's sunny in Paris!"That's it. No complex configurations, no learning proprietary syntax. Just Python.
# Write agents like regular functions
@rt.function_node
def my_tool(text: str) -> str:
return process(text)
|
# Any function becomes a tool
agent = rt.agent_node(
"Assistant",
tool_nodes=(my_tool, api_call)
)
|
# Smart parallelization built-in
# with interface similar to asyncio
result = await rt.call(agent, query)
|
railtracks viz # See everything
|
π¦ Installation
# Write agents like regular functions
@rt.function_node
def my_tool(text: str) -> str:
return process(text)
|
# Any function becomes a tool
agent = rt.agent_node(
"Assistant",
tool_nodes=(my_tool, api_call)
)
|
|||||||||||||||||||||||||||||||
# Smart parallelization built-in with interface similar to asyncio
result = await rt.call(agent, query)
|
pip install railtracks railtracks-cliβ‘ Your First Agent in 5 Minimport railtracks as rt
# 1. Create tools (just functions with decorators!)
@rt.function_node
def count_characters(text: str, character: str) -> int:
"""Count occurrences of a character in text."""
return text.count(character)
@rt.function_node
def word_count(text: str) -> int:
"""Count words in text."""
return len(text.split())
# 2. Build an agent with tools
text_analyzer = rt.agent_node(
"Text Analyzer",
tool_nodes=(count_characters, word_count),
llm=rt.llm.OpenAILLM("gpt-4o"),
system_message="You analyze text using the available tools."
)
# 3. Use it to solve the classic "How many r's in strawberry?" problem
@rt.session
async def main():
result = await rt.call(text_analyzer, "How many 'r's are in 'strawberry'?")
print(result.text) # "There are 3 'r's in 'strawberry'!"
# Run it
import asyncio
asyncio.run(main())π Visualize Agent in 5 secondrailtracks init # Setup visualization (one-time)
railtracks viz # See your agent in actionπ Multi-Agent Research System# Research coordinator that uses specialized agents
researcher = rt.agent_node("Researcher", tool_nodes=(web_search, summarize))
analyst = rt.agent_node("Analyst", tool_nodes=(analyze_data, create_charts))
writer = rt.agent_node("Writer", tool_nodes=(draft_report, format_document))
coordinator = rt.agent_node(
"Research Coordinator",
tool_nodes=(researcher, analyst, writer), # Agents as tools!
system_message="Coordinate research tasks between specialists."
)π Complex Workflows Made Simple# Customer service system with context sharing
async def handle_customer_request(query: str):
with rt.Session() as session:
# Technical support first
technical_result = await rt.call(technical_agent, query)
# Share context with billing if needed
if "billing" in technical_result.text.lower():
session.context["technical_notes"] = technical_result.text
billing_result = await rt.call(billing_agent, query)
return billing_result
return technical_result
Switch between providers effortlessly: # OpenAI
rt.llm.OpenAILLM("gpt-4o")
# Anthropic
rt.llm.AnthropicLLM("claude-3-5-sonnet")
# Local models
rt.llm.OllamaLLM("llama3")Works with OpenAI, Anthropic, Google, Azure, and more! Check out our neatly crafted docs.
pip install railtracks railtracks-cliYou grow, we grow - Railtracks will expand with your ambitions. Made with lots of β€οΈ and β by the βRailtracksβ team β’ Licensed under MIT β’ Report Bug β’ Request Feature |
