From 0e76eade03638acb5a4b648f5610e13a2f2db08a Mon Sep 17 00:00:00 2001 From: DeepakAkkil Date: Wed, 3 Jul 2024 16:10:56 +0300 Subject: [PATCH] OpenAI call changes towards more deterministic responses Even with a temperature 0, the response can be very non-deterministic. Setting a low top_p and a "seed" are additional steps that can be taken to move towards deterministic responses. https://cookbook.openai.com/examples/reproducible_outputs_with_the_seed_parameter --- ae/core/agents/browser_nav_agent.py | 4 +++- ae/core/agents/high_level_planner_agent.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/ae/core/agents/browser_nav_agent.py b/ae/core/agents/browser_nav_agent.py index 9cd8b45..58dbb76 100644 --- a/ae/core/agents/browser_nav_agent.py +++ b/ae/core/agents/browser_nav_agent.py @@ -43,7 +43,9 @@ def __init__(self, config_list, browser_nav_executor: autogen.UserProxyAgent): # llm_config={ "config_list": config_list, "cache_seed": None, - "temperature": 0.0 + "temperature": 0.0, + "top_p": 0.001, + "seed":12345 }, ) self.__register_skills() diff --git a/ae/core/agents/high_level_planner_agent.py b/ae/core/agents/high_level_planner_agent.py index e37a537..2d440b9 100644 --- a/ae/core/agents/high_level_planner_agent.py +++ b/ae/core/agents/high_level_planner_agent.py @@ -34,7 +34,9 @@ def __init__(self, config_list, user_proxy_agent:ConversableAgent): # type: igno llm_config={ "config_list": config_list, "cache_seed": None, - "temperature": 0.0 + "temperature": 0.0, + "top_p": 0.001, + "seed":12345 }, )