Skip to content

Commit

Permalink
Merge pull request #704 from FlowiseAI/feature/ConversationRetrievalQ…
Browse files Browse the repository at this point in the history
…AAgent

Feature/Conversation Retrieval Agent
  • Loading branch information
HenryHengZJ authored Aug 8, 2023
2 parents 6c9da10 + 5ef0324 commit f4b85ff
Show file tree
Hide file tree
Showing 18 changed files with 1,008 additions and 100 deletions.
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor, InitializeAgentExecutorOptions } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { getBaseClasses } from '../../../src/utils'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { BaseChatMemory } from 'langchain/memory'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'

Expand Down Expand Up @@ -93,19 +92,10 @@ class ConversationalAgent_Agents implements INode {
const memory = nodeData.inputs?.memory as BaseChatMemory

if (options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory

for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
memory.chatHistory = mapChatHistory(options)
executor.memory = memory
}

const result = await executor.call({ input })

return result?.output
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { flatten } from 'lodash'
import { BaseChatMemory } from 'langchain/memory'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

class ConversationalRetrievalAgent_Agents implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]

constructor() {
this.label = 'Conversational Retrieval Agent'
this.name = 'conversationalRetrievalAgent'
this.version = 1.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = `An agent optimized for retrieval during conversation, answering questions based on past dialogue, all using OpenAI's Function Calling`
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
name: 'tools',
type: 'Tool',
list: true
},
{
label: 'Memory',
name: 'memory',
type: 'BaseChatMemory'
},
{
label: 'OpenAI Chat Model',
name: 'model',
type: 'ChatOpenAI'
},
{
label: 'System Message',
name: 'systemMessage',
type: 'string',
rows: 4,
optional: true,
additionalParams: true
}
]
}

async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model
const memory = nodeData.inputs?.memory as BaseChatMemory
const systemMessage = nodeData.inputs?.systemMessage as string

let tools = nodeData.inputs?.tools
tools = flatten(tools)

const executor = await initializeAgentExecutorWithOptions(tools, model, {
agentType: 'openai-functions',
verbose: process.env.DEBUG === 'true' ? true : false,
agentArgs: {
prefix: systemMessage ?? `You are a helpful AI assistant.`
},
returnIntermediateSteps: true
})
executor.memory = memory
return executor
}

async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const executor = nodeData.instance as AgentExecutor

if (executor.memory) {
;(executor.memory as any).memoryKey = 'chat_history'
;(executor.memory as any).outputKey = 'output'
;(executor.memory as any).chatHistory = mapChatHistory(options)
}

const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const result = await executor.call({ input }, [loggerHandler, handler])
return result?.output
} else {
const result = await executor.call({ input }, [loggerHandler])
return result?.output
}
}
}

module.exports = { nodeClass: ConversationalRetrievalAgent_Agents }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { getBaseClasses } from '../../../src/utils'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { BaseChatMemory } from 'langchain/memory'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

class OpenAIFunctionAgent_Agents implements INode {
Expand Down Expand Up @@ -82,17 +81,7 @@ class OpenAIFunctionAgent_Agents implements INode {
const memory = nodeData.inputs?.memory as BaseChatMemory

if (options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory

for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
memory.chatHistory = mapChatHistory(options)
executor.memory = memory
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConversationChain } from 'langchain/chains'
import { getBaseClasses } from '../../../src/utils'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts'
import { BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { BufferMemory } from 'langchain/memory'
import { BaseChatModel } from 'langchain/chat_models/base'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
import { flatten } from 'lodash'
import { Document } from 'langchain/document'
Expand Down Expand Up @@ -106,17 +105,7 @@ class ConversationChain_Chains implements INode {
const memory = nodeData.inputs?.memory as BufferMemory

if (options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory

for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
memory.chatHistory = mapChatHistory(options)
chain.memory = memory
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
import { ConversationalRetrievalQAChain, QAChainParams } from 'langchain/chains'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { BaseRetriever } from 'langchain/schema/retriever'
import { BaseChatMemory, BufferMemory, ChatMessageHistory, BufferMemoryInput } from 'langchain/memory'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'
import {
Expand Down Expand Up @@ -105,15 +104,17 @@ class ConversationalRetrievalQAChain_Chains implements INode {
const systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean
const chainOption = nodeData.inputs?.chainOption as string
const memory = nodeData.inputs?.memory
const externalMemory = nodeData.inputs?.memory

const obj: any = {
verbose: process.env.DEBUG === 'true' ? true : false,
questionGeneratorChainOptions: {
template: CUSTOM_QUESTION_GENERATOR_CHAIN_PROMPT
}
}

if (returnSourceDocuments) obj.returnSourceDocuments = returnSourceDocuments

if (chainOption === 'map_reduce') {
obj.qaChainOptions = {
type: 'map_reduce',
Expand Down Expand Up @@ -142,20 +143,21 @@ class ConversationalRetrievalQAChain_Chains implements INode {
} as QAChainParams
}

if (memory) {
memory.inputKey = 'question'
memory.memoryKey = 'chat_history'
if (chainOption === 'refine') memory.outputKey = 'output_text'
else memory.outputKey = 'text'
obj.memory = memory
if (externalMemory) {
externalMemory.memoryKey = 'chat_history'
externalMemory.inputKey = 'question'
externalMemory.outputKey = 'text'
externalMemory.returnMessages = true
if (chainOption === 'refine') externalMemory.outputKey = 'output_text'
obj.memory = externalMemory
} else {
const fields: BufferMemoryInput = {
memoryKey: 'chat_history',
inputKey: 'question',
outputKey: 'text',
returnMessages: true
}
if (chainOption === 'refine') fields.outputKey = 'output_text'
else fields.outputKey = 'text'
obj.memory = new BufferMemory(fields)
}

Expand All @@ -166,7 +168,6 @@ class ConversationalRetrievalQAChain_Chains implements INode {
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
const chain = nodeData.instance as ConversationalRetrievalQAChain
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean
const memory = nodeData.inputs?.memory
const chainOption = nodeData.inputs?.chainOption as string

let model = nodeData.inputs?.model
Expand All @@ -177,21 +178,8 @@ class ConversationalRetrievalQAChain_Chains implements INode {

const obj = { question: input }

// If external memory like Zep, Redis is being used, ignore below
if (!memory && chain.memory && options && options.chatHistory) {
const chatHistory = []
const histories: IMessage[] = options.chatHistory
const memory = chain.memory as BaseChatMemory

for (const message of histories) {
if (message.type === 'apiMessage') {
chatHistory.push(new AIMessage(message.message))
} else if (message.type === 'userMessage') {
chatHistory.push(new HumanMessage(message.message))
}
}
memory.chatHistory = new ChatMessageHistory(chatHistory)
chain.memory = memory
if (options && options.chatHistory && chain.memory) {
;(chain.memory as any).chatHistory = mapChatHistory(options)
}

const loggerHandler = new ConsoleCallbackHandler(options.logger)
Expand Down
26 changes: 21 additions & 5 deletions packages/components/nodes/memory/DynamoDb/DynamoDb.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { ICommonObject, INode, INodeData, INodeParams, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src'
import { DynamoDBChatMessageHistory } from 'langchain/stores/message/dynamodb'
import { BufferMemory } from 'langchain/memory'
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'

class DynamoDb_Memory implements INode {
label: string
Expand Down Expand Up @@ -51,7 +51,7 @@ class DynamoDb_Memory implements INode {
label: 'Session ID',
name: 'sessionId',
type: 'string',
description: 'if empty, chatId will be used automatically',
description: 'If not specified, the first CHAT_MESSAGE_ID will be used as sessionId',
default: '',
additionalParams: true,
optional: true
Expand Down Expand Up @@ -86,9 +86,11 @@ const initalizeDynamoDB = async (nodeData: INodeData, options: ICommonObject): P
const sessionId = nodeData.inputs?.sessionId as string
const region = nodeData.inputs?.region as string
const memoryKey = nodeData.inputs?.memoryKey as string

const chatId = options.chatId

let isSessionIdUsingChatMessageId = false
if (!sessionId && chatId) isSessionIdUsingChatMessageId = true

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const accessKeyId = getCredentialParam('accessKey', credentialData, nodeData)
const secretAccessKey = getCredentialParam('secretAccessKey', credentialData, nodeData)
Expand All @@ -106,12 +108,26 @@ const initalizeDynamoDB = async (nodeData: INodeData, options: ICommonObject): P
}
})

const memory = new BufferMemory({
const memory = new BufferMemoryExtended({
memoryKey,
chatHistory: dynamoDb,
returnMessages: true
returnMessages: true,
isSessionIdUsingChatMessageId
})
return memory
}

interface BufferMemoryExtendedInput {
isSessionIdUsingChatMessageId: boolean
}

class BufferMemoryExtended extends BufferMemory {
isSessionIdUsingChatMessageId? = false

constructor(fields: BufferMemoryInput & Partial<BufferMemoryExtendedInput>) {
super(fields)
this.isSessionIdUsingChatMessageId = fields.isSessionIdUsingChatMessageId
}
}

module.exports = { nodeClass: DynamoDb_Memory }
Loading

0 comments on commit f4b85ff

Please sign in to comment.