Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use anthropic and groq-based model with Agent-E #107

Merged
merged 13 commits into from
Nov 4, 2024
File renamed without changes.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -163,3 +163,6 @@ Pipfile
# file containing LLM config for the agents
agents_llm_config.json
ae/testing.py

# All $HOME directory for the chrome browser
$HOME/
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ Agent-E relies on several environment variables for its configuration. You need

## Running the Code

Once you have set up the environment and installed all the dependencies, you can run Agent-E using the following command:
Once you have set up the environment and installed all the dependencies, you can run Agent-E using `./run.sh` script or using the following command:
```bash
python -m ae.main
```
Expand Down
1 change: 1 addition & 0 deletions ae/core/agents_llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ class AgentsLLMConfig:
"model_name": "model",
"model_api_key": "api_key",
"model_base_url": "base_url",
"model_api_type": "api_type",
}

def __init__(self, env_file_path: str = ".env", llm_config: dict[str,Any] | None = None) -> None:
Expand Down
50 changes: 49 additions & 1 deletion agents_llm_config-example.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,5 +46,53 @@
"seed":12345
}
}
}
},
"llama": {
"planner_agent": {
"model_name": "llama-3.1-70b-versatile",
"model_api_key": "",
"model_api_type": "groq",
"system_prompt": "You are a web automation task planner....",
"llm_config_params": {
"cache_seed": null,
"temperature": 0.1,
"top_p": 0.1
}
},
"browser_nav_agent": {
"model_name": "llama-3.1-70b-versatile",
"model_api_key": "",
"model_api_type": "groq",
"system_prompt": "You will perform web navigation tasks with the functions that you have...\nOnce a task is completed, confirm completion with ##TERMINATE TASK##.",
"llm_config_params": {
"cache_seed": null,
"temperature": 0.1,
"top_p": 0.1
}
}
},
"anthropic": {
"planner_agent": {
"model_name": "claude-3-opus-20240229",
"model_api_key": "",
"model_api_type": "anthropic",
"system_prompt": "You are a web automation task planner....",
"llm_config_params": {
"cache_seed": null,
"temperature": 0.1,
"top_p": 0.1
}
},
"browser_nav_agent": {
"model_name": "claude-3-opus-20240229",
"model_api_key": "",
"model_api_type": "anthropic",
"system_prompt": "You will perform web navigation tasks with the functions that you have...\nOnce a task is completed, confirm completion with ##TERMINATE TASK##.",
"llm_config_params": {
"cache_seed": null,
"temperature": 0.1,
"top_p": 0.1
}
}
}
}
6 changes: 4 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,14 @@ license = "MIT"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"anthropic==0.23.1",
"anthropic>=0.23.1",
"google-generativeai==0.5.1",
"nltk==3.8.1",
"pdfplumber==0.11.1",
"playwright==1.44.0",
"pyautogen==0.2.27",
"autogen-agentchat~=0.2",
"autogen-agentchat[anthropic]~=0.2",
"autogen-agentchat[groq]~=0.2",
"pydantic==2.6.2",
"python-dotenv==1.0.0",
"tabulate==0.9.0",
Expand Down
Loading