Skip to content

Commit

Permalink
cleaned up all react agent code and finished first draft of react agent
Browse files Browse the repository at this point in the history
  • Loading branch information
liyin2015 committed Jul 11, 2024
1 parent f5988c0 commit 4c5053d
Show file tree
Hide file tree
Showing 5 changed files with 462 additions and 28 deletions.
74 changes: 74 additions & 0 deletions developer_notes/react_note.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
from lightrag.components.agent import ReActAgent
from lightrag.core import Generator, ModelClientType, ModelClient
from lightrag.utils import setup_env

setup_env()


# Define tools
def multiply(a: int, b: int) -> int:
"""
Multiply two numbers.
"""
return a * b


def add(a: int, b: int) -> int:
"""
Add two numbers.
"""
return a + b


def divide(a: float, b: float) -> float:
"""
Divide two numbers.
"""
return float(a) / b


llama3_model_kwargs = {
"model": "llama3-70b-8192", # llama3 70b works better than 8b here.
"temperature": 0.0,
}
gpt_model_kwargs = {
"model": "gpt-3.5-turbo",
"temperature": 0.0,
}


def test_react_agent(model_client: ModelClient, model_kwargs: dict):
tools = [multiply, add, divide]
queries = [
"What is the capital of France? and what is 465 times 321 then add 95297 and then divide by 13.2?",
"Give me 5 words rhyming with cool, and make a 4-sentence poem using them",
]
# define a generator without tools for comparison

generator = Generator(
model_client=model_client,
model_kwargs=model_kwargs,
)

react = ReActAgent(
max_steps=6,
add_llm_as_fallback=True,
tools=tools,
model_client=model_client,
model_kwargs=model_kwargs,
)
# print(react)

for query in queries:
print(f"Query: {query}")
agent_response = react.call(query)
llm_response = generator.call(prompt_kwargs={"input_str": query})
print(f"Agent response: {agent_response}")
print(f"LLM response: {llm_response}")
print("")


if __name__ == "__main__":
test_react_agent(ModelClientType.GROQ(), llama3_model_kwargs)
# test_react_agent(ModelClientType.OPENAI(), gpt_model_kwargs)
print("Done")
Binary file added docs/source/_static/images/query_1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/source/_static/images/query_2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 4c5053d

Please sign in to comment.