Skip to content

Commit

Permalink
make ruff happy
Browse files Browse the repository at this point in the history
  • Loading branch information
teaxio committed Jul 1, 2024
1 parent c6e6157 commit 4d3912d
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 36 deletions.
9 changes: 3 additions & 6 deletions ae/core/agents/browser_nav_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,22 @@
from string import Template

import autogen # type: ignore
from autogen.agentchat.conversable_agent import register_function # type: ignore

from ae.core.memory.static_ltm import get_user_ltm
from ae.core.post_process_responses import final_reply_callback_browser_agent as print_message_from_user_proxy # type: ignore
from ae.core.post_process_responses import final_reply_callback_user_proxy as print_message_from_browser_agent # type: ignore
from ae.core.prompts import LLM_PROMPTS
from ae.core.skills.click_using_selector import click as click_element
from ae.core.skills.enter_text_using_selector import bulk_enter_text
from ae.core.skills.enter_text_and_click import enter_text_and_click
from ae.core.skills.pdf_text_extractor import extract_text_from_pdf
from ae.core.skills.enter_text_using_selector import bulk_enter_text
from ae.core.skills.enter_text_using_selector import entertext
from ae.core.skills.get_dom_with_content_type import get_dom_with_content_type
from ae.core.skills.get_url import geturl
from ae.core.skills.open_url import openurl
from ae.core.skills.pdf_text_extractor import extract_text_from_pdf

#from ae.core.skills.pdf_text_extractor import extract_text_from_pdf
from ae.core.skills.press_key_combination import press_key_combination



class BrowserNavAgent:
def __init__(self, config_list, browser_nav_executor: autogen.UserProxyAgent): # type: ignore
"""
Expand Down
2 changes: 0 additions & 2 deletions ae/core/agents/high_level_planner_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,7 @@
from string import Template

import autogen # type: ignore
from autogen import Agent # type: ignore
from autogen import ConversableAgent # type: ignore
from autogen import OpenAIWrapper # type: ignore

from ae.core.memory.static_ltm import get_user_ltm
from ae.core.post_process_responses import final_reply_callback_planner_agent as print_message_as_planner # type: ignore
Expand Down
32 changes: 15 additions & 17 deletions ae/core/autogen_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,13 +98,13 @@ async def create(cls, agents_needed: list[str] | None = None, max_chat_round: in

def trigger_nested_chat(manager: autogen.ConversableAgent):
content:str=manager.last_message()["content"] # type: ignore
content_json=parse_response(content)
content_json = parse_response(content) # type: ignore
next_step = content_json.get('next_step', None)
plan = content_json.get('plan', None)
if plan is not None:
print_message_from_planner("Plan: "+ plan)
print(f"Next Step: {next_step}")
if next_step is None:
if next_step is None:
print_message_from_planner("Received no response, terminating..") # type: ignore
print("Trigger nested chat returned False")
return False
Expand All @@ -130,9 +130,9 @@ def my_custom_summary_method(sender: autogen.ConversableAgent,recipient: autogen

def reflection_message(recipient, messages, sender, config): # type: ignore
last_message=messages[-1]["content"] # type: ignore
content_json = parse_response(last_message)
content_json = parse_response(last_message) # type: ignore
next_step = content_json.get('next_step', None)
if next_step is None:
if next_step is None:
print ("Message to nested chat returned None")
return None
else:
Expand All @@ -145,10 +145,10 @@ def reflection_message(recipient, messages, sender, config): # type: ignore
{
"sender": self.agents_map["browser_nav_executor"],
"recipient": self.agents_map["browser_nav_agent"],
"message":reflection_message,
"message":reflection_message,
"max_turns": self.number_of_rounds,
"summary_method": my_custom_summary_method,
}
}
],
trigger=trigger_nested_chat, # type: ignore
)
Expand Down Expand Up @@ -200,11 +200,11 @@ async def __initialize_agents(self, agents_needed: list[str]):
user_delegate_agent = await self.__create_user_delegate_agent()
agents_map["user"] = user_delegate_agent
agents_needed.remove("user")

browser_nav_executor = self.__create_browser_nav_executor_agent()
agents_map["browser_nav_executor"] = browser_nav_executor
agents_needed.remove("browser_nav_executor")

for agent_needed in agents_needed:
if agent_needed == "browser_nav_agent":
browser_nav_agent: autogen.ConversableAgent = self.__create_browser_nav_agent(agents_map["browser_nav_executor"] )
Expand All @@ -227,7 +227,7 @@ async def __create_user_delegate_agent(self) -> autogen.ConversableAgent:
"""
def is_planner_termination_message(x: dict[str, str])->bool: # type: ignore
should_terminate = False
content:Any = x.get("content", "")
content:Any = x.get("content", "")
if content is None:
content = ""
should_terminate = True
Expand All @@ -240,12 +240,12 @@ def is_planner_termination_message(x: dict[str, str])->bool: # type: ignore
except json.JSONDecodeError:
print("Error decoding JSON content")
should_terminate = True

return should_terminate # type: ignore

task_delegate_agent = autogen.ConversableAgent(
name="user",
llm_config=False,
llm_config=False,
system_message=LLM_PROMPTS["USER_AGENT_PROMPT"],
is_termination_msg=is_planner_termination_message, # type: ignore
human_input_mode="NEVER",
Expand All @@ -267,7 +267,7 @@ def is_browser_executor_termination_message(x: dict[str, str])->bool: # type: ig
return False
else:
return True

browser_nav_executor_agent = UserProxyAgent_SequentialFunctionExecution(
name="browser_nav_executor",
is_termination_msg=is_browser_executor_termination_message,
Expand Down Expand Up @@ -331,8 +331,8 @@ async def process_command(self, command: str, current_url: str | None = None) ->
try:
if self.agents_map is None:
raise ValueError("Agents map is not initialized.")
print(self.agents_map["browser_nav_executor"].function_map) # type: ignore
# print(self.agents_map["browser_nav_executor"].function_map) # type: ignore

result=await self.agents_map["user"].a_initiate_chat( # type: ignore
self.agents_map["planner_agent"], # self.manager # type: ignore
max_turns=self.number_of_rounds,
Expand All @@ -350,5 +350,3 @@ async def process_command(self, command: str, current_url: str | None = None) ->
logger.error(f"Unable to process command: \"{command}\". {bre}")
traceback.print_exc()



23 changes: 12 additions & 11 deletions ae/utils/response_parser.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
import json
from typing import Dict, Any
from typing import Any

def parse_response(message: str) -> Dict[str, Any]:

def parse_response(message: str) -> dict[str, Any]:
"""
Parse the response from the browser agent and return the response as a dictionary.
"""
# Parse the response content
json_response = {}
raw_messgae = message
message = message.replace("\n", "\\n") # type: ignore
# replace all \\n
# replace all \\n
message = message.replace("\\n", "")
#if message starts with ``` and ends with ``` then remove them
if message.startswith("```"):
Expand All @@ -18,14 +19,14 @@ def parse_response(message: str) -> Dict[str, Any]:
message = message[:-3]
if message.startswith("json"):
message = message[4:]

message = message.strip()
try:
json_response = json.loads(message)
except:
# If the response is not a valid JSON, try pass it using string matching.
json_response: dict[str, Any] = json.loads(message)
except Exception as e:
# If the response is not a valid JSON, try pass it using string matching.
#This should seldom be triggered
print(f"Error parsing JSON response {raw_messgae}. Attempting to parse using string matching.")
print(f"Error parsing JSON response {raw_messgae}. Error; {e}. Attempting to parse using string matching.")
if ("plan" in message and "next_step" in message):
start = message.index("plan") + len("plan")
end = message.index("next_step")
Expand All @@ -42,7 +43,7 @@ def parse_response(message: str) -> Dict[str, Any]:
json_response["terminate"] = "yes"
else:
json_response["terminate"] = "no"

start=message.index("final_response") + len("final_response")
end=len(message)-1
json_response["final_response"] = message[start:end].replace('"', '').strip()
Expand All @@ -55,5 +56,5 @@ def parse_response(message: str) -> Dict[str, Any]:
json_response["terminate"] = "yes"
else:
json_response["terminate"] = "no"
return json_response

return json_response

0 comments on commit 4d3912d

Please sign in to comment.