Skip to content

Commit

Permalink
LLM config via json file (EmergenceAI#95)
Browse files Browse the repository at this point in the history
* move the config to a file

* sample config file

* refactor readme and add instructions for json file config

* update example env

* logging configuration for clarity

* inform the user if a new system prompt was provided in config
  • Loading branch information
teaxio authored Aug 16, 2024
1 parent 3a7c843 commit 9ad1ef0
Show file tree
Hide file tree
Showing 7 changed files with 679 additions and 172 deletions.
8 changes: 6 additions & 2 deletions .env-example
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
AUTOGEN_MODEL_NAME=PUT YOUR MODEL NAME HERE, for example: gpt-4-turbo-preview or llama3-70b-8192
AUTOGEN_MODEL_NAME=PUT YOUR MODEL NAME HERE, for example: gpt-4-turbo, gpt-4o
AUTOGEN_MODEL_API_KEY=PUT YOUR MODEL API KEY HERE
AUTOGEN_MODEL_BASE_URL=IF YOU ARE USING OpenAI, remove this line, otherwise add the base URL, for example: https://api.groq.com/openai/v1
OPENAI_API_KEY=If you are using the testing/benchmarking, put your openai key here otherwise remove this line

BROWSER_STORAGE_DIR=/Users/macuser/Library/Application Support/Google/Chrome/Profile 5
BROWSER_STORAGE_DIR=/Users/macuser/Library/Application Support/Google/Chrome/Profile 5

# If you want to use json file for config (see sample agents_llm_config.json and the Readme)
AGENTS_LLM_CONFIG_FILE=agents_llm_config.json
AGENTS_LLM_CONFIG_FILE_REF_KEY=The key for the config stanza to use from the file
520 changes: 401 additions & 119 deletions README.md

Large diffs are not rendered by default.

25 changes: 17 additions & 8 deletions ae/core/agents/browser_nav_agent.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from datetime import datetime
from string import Template
from typing import Any

import autogen # type: ignore

Expand All @@ -17,35 +18,43 @@

#from ae.core.skills.pdf_text_extractor import extract_text_from_pdf
from ae.core.skills.press_key_combination import press_key_combination
from ae.utils.logger import logger


class BrowserNavAgent:
def __init__(self, config_list, browser_nav_executor: autogen.UserProxyAgent): # type: ignore
def __init__(self, model_config_list, llm_config_params: dict[str, Any], system_prompt: str|None, browser_nav_executor: autogen.UserProxyAgent): # type: ignore
"""
Initialize the BrowserNavAgent and store the AssistantAgent instance
as an instance attribute for external access.
Parameters:
- config_list: A list of configuration parameters required for AssistantAgent.
- model_config_list: A list of configuration parameters required for AssistantAgent.
- llm_config_params: A dictionary of configuration parameters for the LLM.
- system_prompt: The system prompt to be used for this agent or the default will be used if not provided.
- user_proxy_agent: An instance of the UserProxyAgent class.
"""
self.browser_nav_executor = browser_nav_executor
user_ltm = self.__get_ltm()

system_message = LLM_PROMPTS["BROWSER_AGENT_PROMPT"]
if system_prompt and len(system_prompt) > 0:
if isinstance(system_prompt, list):
system_message = "\n".join(system_prompt)
else:
system_message = system_prompt
logger.info(f"Using custom system prompt for BrowserNavAgent: {system_message}")

system_message = system_message + "\n" + f"Today's date is {datetime.now().strftime('%d %B %Y')}"
if user_ltm: #add the user LTM to the system prompt if it exists
user_ltm = "\n" + user_ltm
system_message = Template(system_message).substitute(basic_user_information=user_ltm)

logger.info(f"Browser nav agent using model: {model_config_list[0]['model']}")
self.agent = autogen.ConversableAgent(
name="browser_navigation_agent",
system_message=system_message,
llm_config={
"config_list": config_list,
"cache_seed": None,
"temperature": 0.0,
"top_p": 0.001,
"seed":12345
"config_list": model_config_list,
**llm_config_params #unpack all the name value pairs in llm_config_params as is
},
)
self.__register_skills()
Expand Down
26 changes: 18 additions & 8 deletions ae/core/agents/high_level_planner_agent.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from datetime import datetime
from string import Template
from typing import Any

import autogen # type: ignore
from autogen import ConversableAgent # type: ignore
Expand All @@ -8,35 +9,44 @@
from ae.core.post_process_responses import final_reply_callback_planner_agent as print_message_as_planner # type: ignore
from ae.core.prompts import LLM_PROMPTS
from ae.core.skills.get_user_input import get_user_input
from ae.utils.logger import logger


class PlannerAgent:
def __init__(self, config_list, user_proxy_agent:ConversableAgent): # type: ignore
def __init__(self, model_config_list, llm_config_params: dict[str, Any], system_prompt: str|None, user_proxy_agent:ConversableAgent): # type: ignore
"""
Initialize the PlannerAgent and store the AssistantAgent instance
as an instance attribute for external access.
Parameters:
- config_list: A list of configuration parameters required for AssistantAgent.
- model_config_list: A list of configuration parameters required for AssistantAgent.
- llm_config_params: A dictionary of configuration parameters for the LLM.
- system_prompt: The system prompt to be used for this agent or the default will be used if not provided.
- user_proxy_agent: An instance of the UserProxyAgent class.
"""

user_ltm = self.__get_ltm()
system_message = LLM_PROMPTS["PLANNER_AGENT_PROMPT"]

if system_prompt and len(system_prompt) > 0:
if isinstance(system_prompt, list):
system_message = "\n".join(system_prompt)
else:
system_message = system_prompt
logger.info(f"Using custom system prompt for PlannerAgent: {system_message}")


if user_ltm: #add the user LTM to the system prompt if it exists
user_ltm = "\n" + user_ltm
system_message = Template(system_message).substitute(basic_user_information=user_ltm)
system_message = system_message + "\n" + f"Today's date is {datetime.now().strftime('%d %B %Y')}"
logger.info(f"Planner agent using model: {model_config_list[0]['model']}")

self.agent = autogen.AssistantAgent(
name="planner_agent",
system_message=system_message,
llm_config={
"config_list": config_list,
"cache_seed": None,
"temperature": 0.0,
"top_p": 0.001,
"seed":12345
"config_list": model_config_list,
**llm_config_params #unpack all the name value pairs in llm_config_params as is
},
)

Expand Down
161 changes: 161 additions & 0 deletions ae/core/agents_llm_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@

import json
import os
from typing import Any

from dotenv import load_dotenv

from ae.utils.logger import logger


class AgentsLLMConfig:
# Mapping from environment keys to model config keys
KEY_MAPPING_ENV_MODEL: dict[str, str] = {
"AUTOGEN_MODEL_NAME": "model",
"AUTOGEN_MODEL_API_KEY": "api_key",
"AUTOGEN_MODEL_BASE_URL": "base_url",
"AUTOGEN_MODEL_API_TYPE": "api_type",
"AUTOGEN_MODEL_API_VERSION": "api_version",
}

# Mapping from environment keys to LLM config keys
KEY_MAPPING_ENV_LLM: dict[str, str] = {
"AUTOGEN_LLM_TEMPERATURE": "temperature",
"AUTOGEN_LLM_TOP_P": "top_p",
}

# Mapping from file keys to model config keys
KEY_MAPPING_FILE: dict[str, str] = {
"model_name": "model",
"model_api_key": "api_key",
"model_base_url": "base_url",
}

def __init__(self, env_file_path: str = ".env") -> None:
load_dotenv(env_file_path, verbose=True, override=True)
self.config: dict[str, Any] = self._load_config()

def _load_config(self) -> dict[str, Any]:
config_file = os.getenv("AGENTS_LLM_CONFIG_FILE")
config_file_ref_key = os.getenv("AGENTS_LLM_CONFIG_FILE_REF_KEY")

if config_file:
try:
with open(config_file, 'r') as file: # noqa: UP015
file_config = json.load(file)

if config_file_ref_key:
if config_file_ref_key in file_config:
logger.info(f"Loading configuration from: {config_file} with key: {config_file_ref_key}")
raw_config = file_config[config_file_ref_key]

# Process configurations for both planner_agent and browser_nav_agent
planner_config = self._normalize_config_from_file(raw_config.get("planner_agent", {}))
browser_nav_config = self._normalize_config_from_file(raw_config.get("browser_nav_agent", {}))

config = {
"planner_agent": planner_config,
"browser_nav_agent": browser_nav_config,
"other_settings": {k: v for k, v in raw_config.items() if k not in ["planner_agent", "browser_nav_agent"]},
}
logger.info(f"Using configuration key '{config_file_ref_key}' from the config file.")
else:
logger.error(f"Key '{config_file_ref_key}' not found in the configuration file.")
raise KeyError(f"Key '{config_file_ref_key}' not found in the configuration file.")
else:
logger.error("AGENTS_LLM_CONFIG_FILE_REF_KEY is not provided.")
raise ValueError("AGENTS_LLM_CONFIG_FILE_REF_KEY must be provided if AGENTS_LLM_CONFIG_FILE is set.")

except Exception as e:
logger.error(f"Error loading configuration file: {e}")
raise e
else:
logger.info("Loading configuration from environment variables")
# Load configurations from environment variables
normalized_config = self._normalize_config_from_env()

config = {
"planner_agent": normalized_config,
"browser_nav_agent": normalized_config
}

return config

def _normalize_config_from_file(self, agent_config: dict[str, Any]) -> dict[str, Any]:
"""Normalize agent-specific config from a file, grouping keys into model_config_params, llm_config_params, and other_settings."""
model_config = {}
llm_config_params = {}
other_settings = {}

for k, v in agent_config.items():
if k in self.KEY_MAPPING_FILE:
model_config[self.KEY_MAPPING_FILE[k]] = v
elif k == "llm_config_params":
llm_config_params = v # Keep llm_config_params as is
else:
other_settings[k] = v

return {
"model_config_params": model_config,
"llm_config_params": llm_config_params,
"other_settings": other_settings,
}

def _normalize_config_from_env(self) -> dict[str, Any]:
"""Normalize config from environment variables, adding defaults for 'temperature', 'top_p', and 'seed' based on model name."""
model_config = {}
llm_config_params = {}
other_settings = {}

# Populate model_config_params
for original_key, mapped_key in self.KEY_MAPPING_ENV_MODEL.items():
value = os.getenv(original_key)
if value is not None:
model_config[mapped_key] = value

# Populate llm_config_params
for original_key, mapped_key in self.KEY_MAPPING_ENV_LLM.items():
value = os.getenv(original_key)
if value is not None:
llm_config_params[mapped_key] = value

# Capture other settings that start with 'AUTOGEN_MODEL'
for original_key in os.environ:
if original_key.startswith("AUTOGEN_MODEL") and original_key not in self.KEY_MAPPING_ENV_MODEL:
other_settings[original_key] = os.getenv(original_key)

# Apply defaults for 'temperature', 'top_p', 'seed' if not present
model_name:str = model_config.get("model", "").lower() # type: ignore

if model_name.startswith("gpt"): # type: ignore
llm_config_params.setdefault("temperature", 0.0) # type: ignore
llm_config_params.setdefault("top_p", 0.001) # type: ignore
llm_config_params.setdefault("seed", 12345) # type: ignore
else:
llm_config_params.setdefault("temperature", 0.1) # type: ignore
llm_config_params.setdefault("top_p", 0.1) # type: ignore

return {
"model_config_params": model_config,
"llm_config_params": llm_config_params,
"other_settings": other_settings,
}

def get_planner_agent_config(self) -> dict[str, Any]:
return self.config["planner_agent"]

def get_browser_nav_agent_config(self) -> dict[str, Any]:
return self.config["browser_nav_agent"]

def get_full_config(self) -> dict[str, Any]:
return self.config

# Example usage
if __name__ == "__main__":
config = AgentsLLMConfig()

planner_config = config.get_planner_agent_config()
browser_nav_config = config.get_browser_nav_agent_config()

print("Planner Agent Config:", planner_config)
print("Browser Nav Agent Config:", browser_nav_config)
Loading

0 comments on commit 9ad1ef0

Please sign in to comment.