-
Notifications
You must be signed in to change notification settings - Fork 0
Open
Description
Problem: Users may want to implement custom memory but lack a complete reference showing best practices, patterns, and integration.
Required Changes:
- Create comprehensive example (
examples/11_custom_memory/main.go):
// Complete example showing:
// - Custom memory implementation
// - Thread-safety patterns
// - Integration with agents
// - Testing custom memory
// - Multiple memory strategies
package main
import (
"context"
"fmt"
"math"
"sort"
"sync"
"time"
"github.com/kshard/chatter"
"github.com/kshard/chatter/provider/autoconfig"
"github.com/kshard/thinker"
"github.com/kshard/thinker/agent"
"github.com/kshard/thinker/codec"
)
//------------------------------------------------------------------------------
// Example 1: Priority-based memory (retains important observations)
//------------------------------------------------------------------------------
type PriorityMemory struct {
mu sync.Mutex
maxSize int
stratum chatter.Stratum
observations []*thinker.Observation
}
func NewPriorityMemory(maxSize int, stratum chatter.Stratum) *PriorityMemory {
return &PriorityMemory{
maxSize: maxSize,
stratum: stratum,
observations: make([]*thinker.Observation, 0, maxSize),
}
}
func (m *PriorityMemory) Purge() {
m.mu.Lock()
defer m.mu.Unlock()
m.observations = nil
}
func (m *PriorityMemory) Commit(obs *thinker.Observation) {
m.mu.Lock()
defer m.mu.Unlock()
// Score importance
obs.Reply.Importance = m.scoreImportance(obs)
m.observations = append(m.observations, obs)
// Sort by importance (highest first)
sort.Slice(m.observations, func(i, j int) bool {
return m.observations[i].Reply.Importance > m.observations[j].Reply.Importance
})
// Keep only top N
if len(m.observations) > m.maxSize {
m.observations = m.observations[:m.maxSize]
}
}
func (m *PriorityMemory) Context(prompt chatter.Message) []chatter.Message {
m.mu.Lock()
defer m.mu.Unlock()
ctx := make([]chatter.Message, 0)
if len(m.stratum) > 0 {
ctx = append(ctx, m.stratum)
}
// Return in chronological order (not priority order)
sorted := make([]*thinker.Observation, len(m.observations))
copy(sorted, m.observations)
sort.Slice(sorted, func(i, j int) bool {
return sorted[i].Created < sorted[j].Created
})
for _, obs := range sorted {
ctx = append(ctx, obs.Query.Content, obs.Reply.Content)
}
ctx = append(ctx, prompt)
return ctx
}
func (m *PriorityMemory) Reflect(ctx context.Context) error {
return nil // No reflection needed for this example
}
// scoreImportance: longer responses are more important
func (m *PriorityMemory) scoreImportance(obs *thinker.Observation) float64 {
length := float64(len(obs.Reply.Content.String()))
return math.Min(1.0, length/500.0)
}
//------------------------------------------------------------------------------
// Example 2: Time-decayed memory (recent observations weighted higher)
//------------------------------------------------------------------------------
type DecayMemory struct {
mu sync.Mutex
halfLife time.Duration
maxSize int
stratum chatter.Stratum
observations []*thinker.Observation
}
func NewDecayMemory(maxSize int, halfLife time.Duration, stratum chatter.Stratum) *DecayMemory {
return &DecayMemory{
maxSize: maxSize,
halfLife: halfLife,
stratum: stratum,
observations: make([]*thinker.Observation, 0, maxSize),
}
}
func (m *DecayMemory) Purge() {
m.mu.Lock()
defer m.mu.Unlock()
m.observations = nil
}
func (m *DecayMemory) Commit(obs *thinker.Observation) {
m.mu.Lock()
defer m.mu.Unlock()
m.observations = append(m.observations, obs)
// Evict old low-score observations
m.evictOld()
}
func (m *DecayMemory) Context(prompt chatter.Message) []chatter.Message {
m.mu.Lock()
defer m.mu.Unlock()
ctx := make([]chatter.Message, 0)
if len(m.stratum) > 0 {
ctx = append(ctx, m.stratum)
}
for _, obs := range m.observations {
ctx = append(ctx, obs.Query.Content, obs.Reply.Content)
}
ctx = append(ctx, prompt)
return ctx
}
func (m *DecayMemory) Reflect(ctx context.Context) error {
return nil
}
func (m *DecayMemory) evictOld() {
now := time.Now()
// Score all observations with time decay
type scored struct {
obs *thinker.Observation
score float64
}
scored := make([]scored, len(m.observations))
for i, obs := range m.observations {
age := now.Sub(obs.Created.Time())
score := math.Exp(-age.Seconds() / m.halfLife.Seconds())
scored[i] = scored{obs: obs, score: score}
}
// Sort by score
sort.Slice(scored, func(i, j int) bool {
return scored[i].score > scored[j].score
})
// Keep top N
if len(scored) > m.maxSize {
scored = scored[:m.maxSize]
}
// Rebuild observations list in chronological order
m.observations = make([]*thinker.Observation, len(scored))
for i, s := range scored {
m.observations[i] = s.obs
}
sort.Slice(m.observations, func(i, j int) bool {
return m.observations[i].Created < m.observations[j].Created
})
}
//------------------------------------------------------------------------------
// Main: Demonstrates using custom memory with agents
//------------------------------------------------------------------------------
func main() {
llm, err := autoconfig.FromNetRC("thinker")
if err != nil {
panic(err)
}
fmt.Println("=== Example 1: Priority-based Memory ===\n")
demonstratePriorityMemory(llm)
fmt.Println("\n=== Example 2: Time-decayed Memory ===\n")
demonstrateDecayMemory(llm)
}
func demonstratePriorityMemory(llm chatter.Chatter) {
// Create priority memory (keeps important observations)
mem := NewPriorityMemory(5, "You are a helpful assistant")
agent := agent.NewPrompter(llm, simpleEncoder)
// Note: Need to swap memory - see agent implementation
// Simulate conversation with varying importance
inputs := []string{
"Hello", // Short, low importance
"What's the capital of France?", // Short
"Explain the theory of relativity in detail", // Long, high importance
"OK thanks", // Short
"Tell me about quantum computing and its applications", // Long
}
for _, input := range inputs {
result, _ := agent.Prompt(context.Background(), input)
fmt.Printf("Q: %s\nA: %s\nImportance: %.2f\n\n",
input, truncate(result.String(), 60),
// Would need to access memory to show score
)
}
fmt.Println("Memory retains only high-importance observations")
}
func demonstrateDecayMemory(llm chatter.Chatter) {
// Create decay memory (recent weighted higher)
mem := NewDecayMemory(10, 1*time.Hour, "You are a helpful assistant")
// Use with agent...
fmt.Println("Recent observations weighted higher with exponential decay")
}
func simpleEncoder(q string) (chatter.Message, error) {
var prompt chatter.Prompt
prompt.WithTask(q)
return &prompt, nil
}
func truncate(s string, n int) string {
if len(s) <= n {
return s
}
return s[:n] + "..."
}- Add README for example (
examples/11_custom_memory/README.md):
# Custom Memory Implementation Example
Demonstrates how to implement custom memory strategies.
## Examples Included
1. **Priority Memory**: Retains observations by importance score
2. **Decay Memory**: Time-weighted with exponential decay
3. **Testing patterns**: How to test custom memory
## Key Patterns
### Thread Safety
Always use `sync.Mutex` for concurrent access:
```go
type MyMemory struct {
mu sync.Mutex
// ...
}
func (m *MyMemory) Commit(obs *Observation) {
m.mu.Lock()
defer m.mu.Unlock()
// ... safe mutation
}Importance Scoring
Use Observation.Reply.Importance field:
obs.Reply.Importance = calculateScore(obs)Context Building
Return messages in logical order (usually chronological):
func (m *MyMemory) Context(prompt Message) []Message {
return []Message{systemPrompt, ...history, prompt}
}Running
cd examples/11_custom_memory
go run main.goSee Also
doc/MEMORY.md- Memory implementation guidememory/stream.go- Reference implementationmemory/reflection.go- Advanced reflection example
Estimated Effort: 3 hours
Skills Required: Example creation, documentation, Go patterns
Benefits:
- ✅ Complete working examples
- ✅ Shows best practices (thread-safety, scoring)
- ✅ Multiple strategies demonstrated
- ✅ Easy to copy and adapt
- ✅ Well-documented