Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -209,15 +209,12 @@ print(f'Structured Response: {response}')
Define your agents using YAML for easy configuration and deployment:

```yaml
apiVersion: flo/alpha-v1
kind: FloAgent
metadata:
name: email-summary-flo
version: 1.0.0
description: "Agent for analyzing email threads"
agent:
name: EmailSummaryAgent
kind: llm
role: Email communication expert
model:
provider: openai
Expand Down
7 changes: 2 additions & 5 deletions flo_ai/examples/yaml_agent_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,16 @@

# Example YAML configuration
yaml_config = """
apiVersion: flo/alpha-v1
metadata:
name: email-summary-flo
version: 1.0.0
description: "Agent for analyzing email threads between customers and support"
tags: ["email", "analysis", "support"]
agent:
name: EmailSummaryAgent
kind: llm
role: Email communication expert
model:
provider: openai # or claude
name: gpt-4o-mini # or claude-3-5-sonnet-20240620
provider: anthropic # or claude
name: claude-3-5-sonnet-20240620
settings:
temperature: 0
max_retries: 3
Expand Down
66 changes: 66 additions & 0 deletions flo_ai/flo_ai/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
"""
flo_ai - A flexible agent framework for LLM-powered applications
"""

# Models package - Agent framework components
from .models import Agent, AgentError, BaseAgent, AgentType, ReasoningPattern

from .builder.agent_builder import AgentBuilder

# LLM package - Language model integrations
from .llm import BaseLLM, Anthropic, OpenAI, OllamaLLM, Gemini

# Tool package - Tool framework components
from .tool import Tool, ToolExecutionError, flo_tool, create_tool_from_function

# Arium package - Workflow and memory components
from .arium import (
Arium,
BaseArium,
create_arium,
MessageMemory,
BaseMemory,
StartNode,
EndNode,
Edge,
)

# Utils package - Utility functions
from .utils import FloUtils

__all__ = [
# Models
'Agent',
'AgentError',
'BaseAgent',
'AgentType',
'ReasoningPattern',
# Utils
'FloUtils',
# LLM
'BaseLLM',
'Anthropic',
'OpenAI',
'OllamaLLM',
'Gemini',
# Tools
'Tool',
'ToolExecutionError',
'flo_tool',
'create_tool_from_function',
# Arium
'Arium',
'BaseArium',
'AriumBuilder',
'create_arium',
'MessageMemory',
'BaseMemory',
'StartNode',
'EndNode',
'Edge',
# Builder
'AgentBuilder',
'AriumBuilder',
]

__version__ = '1.0.0'
24 changes: 17 additions & 7 deletions flo_ai/flo_ai/builder/agent_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,10 @@ def build(self) -> Agent:

@classmethod
def from_yaml(
cls, yaml_str: str, tools: Optional[List[Tool]] = None
cls,
yaml_str: str,
tools: Optional[List[Tool]] = None,
base_llm: Optional[BaseLLM] = None,
) -> 'AgentBuilder':
"""Create an agent builder from a YAML configuration string

Expand All @@ -119,7 +122,8 @@ def from_yaml(
builder.with_role(agent_config.get('role'))

# Configure LLM based on model settings
if 'model' in agent_config:
if 'model' in agent_config and base_llm is None:
base_url = agent_config.get('base_url', None)
model_config = agent_config['model']
provider = model_config.get('provider', 'openai').lower()
model_name = model_config.get('name')
Expand All @@ -128,15 +132,21 @@ def from_yaml(
raise ValueError('Model name must be specified in YAML configuration')

if provider == 'openai':
builder.with_llm(OpenAI(model=model_name))
elif provider == 'claude':
builder.with_llm(Anthropic(model=model_name))
builder.with_llm(OpenAI(model=model_name, base_url=base_url))
elif provider == 'anthropic':
builder.with_llm(Anthropic(model=model_name, base_url=base_url))
elif provider == 'gemini':
builder.with_llm(Gemini(model=model_name))
builder.with_llm(Gemini(model=model_name, base_url=base_url))
elif provider == 'ollama':
builder.with_llm(OllamaLLM(model=model_name))
builder.with_llm(OllamaLLM(model=model_name, base_url=base_url))
else:
raise ValueError(f'Unsupported model provider: {provider}')
else:
if base_llm is None:
raise ValueError(
'Model must be specified in YAML configuration or base_llm must be provided'
)
builder.with_llm(base_llm)

# Set tools if provided
if tools:
Expand Down
3 changes: 2 additions & 1 deletion flo_ai/flo_ai/llm/anthropic_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,11 @@ def __init__(
model: str = 'claude-3-5-sonnet-20240620',
temperature: float = 0.7,
api_key: Optional[str] = None,
base_url: str = None,
**kwargs,
):
super().__init__(model, api_key, temperature, **kwargs)
self.client = AsyncAnthropic(api_key=self.api_key)
self.client = AsyncAnthropic(api_key=self.api_key, base_url=base_url)

async def generate(
self,
Expand Down
1 change: 1 addition & 0 deletions flo_ai/flo_ai/llm/base_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
@dataclass
class ImageMessage:
image_url: Optional[str] = None
image_bytes: Optional[bytes] = None
image_file_path: Optional[str] = None
image_base64: Optional[str] = None
mime_type: Optional[str] = None
Expand Down
6 changes: 6 additions & 0 deletions flo_ai/flo_ai/llm/gemini_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ def __init__(
model: str = 'gemini-2.5-flash',
temperature: float = 0.7,
api_key: Optional[str] = None,
base_url: str = None,
**kwargs,
):
super().__init__(model, api_key, temperature, **kwargs)
Expand Down Expand Up @@ -153,6 +154,11 @@ def format_image_in_message(self, image: ImageMessage) -> str:
data=image_bytes,
mime_type=image.mime_type,
)
elif image.image_bytes:
return genai.types.Part.from_bytes(
data=image.image_bytes,
mime_type=image.mime_type,
)
raise NotImplementedError(
'Not other way other than file path has been implemented'
)
3 changes: 2 additions & 1 deletion flo_ai/flo_ai/llm/openai_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,13 @@ def __init__(
model='gpt-40-mini',
api_key: str = None,
temperature: float = 0.7,
base_url: str = None,
**kwargs,
):
super().__init__(
model=model, api_key=api_key, temperature=temperature, **kwargs
)
self.client = AsyncOpenAI(api_key=api_key, **kwargs)
self.client = AsyncOpenAI(api_key=api_key, base_url=base_url)
self.model = model
self.kwargs = kwargs

Expand Down
9 changes: 9 additions & 0 deletions flo_ai/flo_ai/models/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
"""
Models package for flo_ai - Agent framework components
"""

from .agent import Agent
from .agent_error import AgentError
from .base_agent import BaseAgent, AgentType, ReasoningPattern

__all__ = ['Agent', 'AgentError', 'BaseAgent', 'AgentType', 'ReasoningPattern']
3 changes: 3 additions & 0 deletions flo_ai/flo_ai/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .flo_utils import FloUtils

__all__ = ['FloUtils']
125 changes: 125 additions & 0 deletions flo_ai/flo_ai/utils/flo_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
from typing import Dict, Any
import json
from flo_ai.utils.logger import logger


class FloUtils:
@staticmethod
def extract_jsons_from_string(data: str, strict: bool = False) -> Dict[str, Any]:
"""
1) Find all balanced `{ … }` blocks via a custom parser
2) Strip comments and json.loads() each
3) Merge into one dict (later keys override earlier)
4) On strict mode, raise FloException if no JSON found
"""

# Custom function to find balanced braces since (?R) is not supported in Python re
def find_balanced_braces(text):
matches = []
i = 0
while i < len(text):
if text[i] == '{':
start = i
brace_count = 1
j = i + 1
in_string = False
escape_next = False

while j < len(text) and brace_count > 0:
char = text[j]

if escape_next:
escape_next = False
elif char == '\\' and in_string:
escape_next = True
elif char == '"' and not escape_next:
in_string = not in_string
elif not in_string:
if char == '{':
brace_count += 1
elif char == '}':
brace_count -= 1

j += 1

if brace_count == 0:
candidate = text[start:j]
# Try to validate it's actually JSON by attempting to parse
try:
cleaned = FloUtils.strip_comments_from_string(candidate)
json.loads(cleaned)
matches.append(candidate)
i = j # Continue from after the valid JSON
except json.JSONDecodeError:
# Not valid JSON, try starting from the next character
i += 1
else:
# Unbalanced braces, try starting from the next character
i += 1
else:
i += 1
return matches

matches = find_balanced_braces(data)
merged: Dict[str, Any] = {}

for json_str in matches:
try:
cleaned = FloUtils.strip_comments_from_string(json_str)
obj = json.loads(cleaned)
merged.update(obj)
except json.JSONDecodeError as e:
logger.error(f'Invalid JSON in response: {json_str}, {e}')

if strict and not matches:
logger.error(f'No JSON found in strict mode: {data}')
raise ValueError(f'No JSON found in strict mode: {data}')

return merged

@staticmethod
def strip_comments_from_string(data: str) -> str:
"""Remove JS-style comments (// and /*…*/) so json.loads() will succeed."""
cleaned = []
i = 0
length = len(data)

while i < length:
char = data[i]

if char not in '"/*':
cleaned.append(char)
i += 1
continue

if char == '"':
cleaned.append(char)
i += 1
while i < length:
char = data[i]
cleaned.append(char)
i += 1
if char == '"' and (i < 2 or data[i - 2] != '\\'):
break
continue

if char == '/' and i + 1 < length:
next_char = data[i + 1]
if next_char == '/':
i += 2
while i < length and data[i] != '\n':
i += 1
continue
elif next_char == '*':
i += 2
while i + 1 < length:
if data[i] == '*' and data[i + 1] == '/':
i += 2
break
i += 1
continue

cleaned.append(char)
i += 1

return ''.join(cleaned)
Loading