Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import asyncio
import numpy as np
import pytest

from semantic_kernel.memory.volatile_memory_store import VolatileMemoryStore
from semantic_kernel.memory.semantic_text_memory import SemanticTextMemory
from semantic_kernel.connectors.ai.embedding_generator_base import EmbeddingGeneratorBase

class FakeEmbeddingGenerator(EmbeddingGeneratorBase):
async def generate_embeddings(self, texts, settings=None, **kwargs) -> np.ndarray:
"""
Generate embeddings for a list of texts.

Returns:
np.ndarray: A 2D array where each row is an embedding corresponding to a text.
"""
vectors = []
for text in texts:
hash_digest = hashlib.sha256(text.encode('utf-8')).hexdigest()
vec = np.array([
len(text),
int(hash_digest[:8], 16) % 10,
int(hash_digest[:16], 16) % 100,
], dtype=float)
vectors.append(vec)
return np.vstack(vectors)

def create_memory() -> SemanticTextMemory:
storage = VolatileMemoryStore()
generator = FakeEmbeddingGenerator()
return SemanticTextMemory(storage=storage, embeddings_generator=generator)

@pytest.mark.asyncio
async def test_retrieval_accuracy():
memory = create_memory()

await memory.save_information(collection="aboutMe", id="info1", text="I enjoy hiking")
await memory.save_information(collection="aboutMe", id="info2", text="I work as a tour guide")
await memory.save_information(collection="aboutMe", id="info3", text="I visited Iceland last year")

result = await memory.search(collection="aboutMe", query="hiking", limit=1)
assert result[0].text == "I enjoy hiking"

result = await memory.search(collection="aboutMe", query="tour", limit=1)
assert result[0].text == "I work as a tour guide"

result = await memory.search(collection="aboutMe", query="Iceland", limit=1)
assert result[0].text == "I visited Iceland last year"
Loading