Skip to content

Commit

Permalink
feat(memory): Add ConversationBufferWindowMemory (#25)
Browse files Browse the repository at this point in the history
Co-authored-by: David Miguel <me@davidmiguel.com>
  • Loading branch information
a-mpch and davidmigloz authored Jul 30, 2023
1 parent 799e079 commit 9c271f7
Show file tree
Hide file tree
Showing 3 changed files with 150 additions and 0 deletions.
71 changes: 71 additions & 0 deletions packages/langchain/lib/src/memory/buffer_window.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import '../model_io/chat_models/models/models.dart';
import '../model_io/chat_models/utils.dart';
import 'chat.dart';
import 'models/models.dart';
import 'stores/message/in_memory.dart';

/// {@template conversation_buffer_window_memory}
/// Buffer for storing a conversation in-memory inside a limited size window
/// and then retrieving the messages at a later time.
///
/// It uses [ChatMessageHistory] as in-memory storage by default.
///
/// Example:
/// ```dart
/// final memory = ConversationBufferWindowMemory();
/// await memory.saveContext({'foo': 'bar'}, {'bar': 'foo'});
/// final res = await memory.loadMemoryVariables();
/// // {'history': 'Human: bar\nAI: foo'}
/// ```
/// {@endtemplate}
final class ConversationBufferWindowMemory extends BaseChatMemory {
/// {@macro conversation_buffer_window_memory}
ConversationBufferWindowMemory({
super.chatHistory,
super.inputKey,
super.outputKey,
super.returnMessages = false,
this.humanPrefix = 'Human',
this.aiPrefix = 'AI',
this.memoryKey = 'history',
this.k = 5,
});

/// The prefix to use for human messages.
final String humanPrefix;

/// The prefix to use for AI messages.
final String aiPrefix;

/// The memory key to use for the chat history.
final String memoryKey;

/// Number of messages to keep in the buffer.
final int k;

@override
Set<String> get memoryKeys => {memoryKey};

@override
Future<MemoryVariables> loadMemoryVariables([
final MemoryInputValues values = const {},
]) async {
final messages = k > 0 ? await _getChatMessages() : <ChatMessage>[];
if (returnMessages) {
return {memoryKey: messages};
}
return {
memoryKey: messages.toBufferString(
humanPrefix: humanPrefix,
aiPrefix: aiPrefix,
),
};
}

Future<List<ChatMessage>> _getChatMessages() async {
final historyMessages = await chatHistory.getChatMessages();
return historyMessages.length > k
? historyMessages.sublist(historyMessages.length - k * 2)
: historyMessages;
}
}
1 change: 1 addition & 0 deletions packages/langchain/lib/src/memory/memory.dart
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
export 'base.dart';
export 'buffer.dart';
export 'buffer_window.dart';
export 'chat.dart';
export 'models/models.dart';
export 'simple.dart';
Expand Down
78 changes: 78 additions & 0 deletions packages/langchain/test/memory/buffer_window_test.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import 'package:langchain/src/memory/memory.dart';
import 'package:langchain/src/model_io/chat_models/chat_models.dart';
import 'package:test/test.dart';

void main() {
group('ConversationBufferWindowMemory tests', () {
test('Test buffer memory', () async {
final memory = ConversationBufferWindowMemory();
final result1 = await memory.loadMemoryVariables();
expect(result1, {'history': ''});

await memory.saveContext(
inputValues: {'foo': 'bar'},
outputValues: {'bar': 'foo'},
);
const expectedString = 'Human: bar\nAI: foo';
final result2 = await memory.loadMemoryVariables();
expect(result2, {'history': expectedString});
});

test('Test buffer memory return messages', () async {
final memory = ConversationBufferWindowMemory(k: 1, returnMessages: true);
final result1 = await memory.loadMemoryVariables();
expect(result1, {'history': <ChatMessage>[]});

await memory.saveContext(
inputValues: {'foo': 'bar'},
outputValues: {'bar': 'foo'},
);
final expectedResult = [
ChatMessage.human('bar'),
ChatMessage.ai('foo'),
];
final result2 = await memory.loadMemoryVariables();
expect(result2, {'history': expectedResult});

await memory.saveContext(
inputValues: {'foo': 'bar1'},
outputValues: {'bar': 'foo1'},
);

final expectedResult2 = [
ChatMessage.human('bar1'),
ChatMessage.ai('foo1'),
];
final result3 = await memory.loadMemoryVariables();
expect(result3, {'history': expectedResult2});
});

test('Test buffer memory with pre-loaded history', () async {
final pastMessages = [
ChatMessage.human("My name's Jonas"),
ChatMessage.ai('Nice to meet you, Jonas!'),
];
final memory = ConversationBufferMemory(
returnMessages: true,
chatHistory: ChatMessageHistory(messages: pastMessages),
);
final result = await memory.loadMemoryVariables();
expect(result, {'history': pastMessages});
});

test('Test clear memory', () async {
final memory = ConversationBufferMemory();
await memory.saveContext(
inputValues: {'foo': 'bar'},
outputValues: {'bar': 'foo'},
);
const expectedString = 'Human: bar\nAI: foo';
final result1 = await memory.loadMemoryVariables();
expect(result1, {'history': expectedString});

memory.clear();
final result2 = await memory.loadMemoryVariables();
expect(result2, {'history': ''});
});
});
}

0 comments on commit 9c271f7

Please sign in to comment.