-
Notifications
You must be signed in to change notification settings - Fork 39
LangChain Agent
ChatAgent is able to build a chat system with action options from given tools such as doc retrieval through vector store. It also accepts llm and memory modules as parameters to enable more flexibility.
The default ChatAgent is based on LangChain ConversationChatAgent
, using pre-defined prompts.
If you want to modify prompt templates, refer to Configuration.
from chat_agent import ChatAgent
# Define tools
tools = [
Tool(
name='Test',
func=lambda x: 'This is a test tool.',
description='Test action'
)
]
# Define an agent
agent = ChatAgent.from_llm_and_tools(
llm=chat_llm,
tools=tools)
# Define a chain
agent_chain = AgentExecutor.from_agent_and_tools(
agent=agent,
tools=tools,
verbose=False
)
# Run a test
final_answer = agent_chain.run(input='Test question')
Customize ChatAgent: The ChatAgent should inherit Langchain ConversationalChatAgent. You can modify module methods in chat_agent.py under agent directory.
from typing import Any, List, Optional, Sequence
from langchain.agents.conversational_chat.prompt import PREFIX, SUFFIX
from langchain.agents import ConversationalChatAgent, Agent
from langchain.prompts.base import BasePromptTemplate
from langchain.tools.base import BaseTool
from .output_parser import OutputParser
from .prompt import PREFIX, SUFFIX, TEMPLATE_TOOL_RESPONSE
class ChatAgent(ConversationalChatAgent):
'''Customize LangChain ConversationalChatAgent'''
template_tool_response=TEMPLATE_TOOL_RESPONSE
@classmethod
def _get_default_output_parser(cls, **kwargs: Any):
return OutputParser(**kwargs)
@classmethod
def from_llm_and_tools(cls, llm, tools,
callback_manager: Optional[BaseCallbackManager] = None,
input_variables: Optional[List[str]] = None,
**kwargs: Any
) -> Agent:
return super().from_llm_and_tools(
llm,
tools,
callback_manager,
output_parser=OutPutParser,
system_message=PREFIX,
human_message=SUFFIX,
input_variables,
**kwargs
)
@classmethod
def create_prompt(
cls,
tools: Sequence[BaseTool],
input_variables: Optional[List[str]] = None
) -> BasePromptTemplate:
return super().create_prompt(tools, system_message=PREFIX, human_message=SUFFIX, input_variables, OutPutParser)
output parser: you can modify OutputParser under agent directory.
from langchain.agents.conversational_chat.output_parser import ConvoOutputParser
from .prompt import FORMAT_INSTRUCTIONS
class OutputParser(ConvoOutputParser):
def get_format_instructions(self) -> str:
return FORMAT_INSTRUCTIONS
# def parse(self, text: str):
# '''Customize parse method here'''
# return super().parse(text)
Akcio is a proprietary project owned and developed by Zilliz. It is published under the Server Side Public License (SSPL) v1.
© Copyright 2023, Zilliz Inc.
Towhee
LangChain
Others