Skip to content

Commit 2a2ddb6

Browse files
Update examples to work with refactored API
- Add store extras ([memory], [disk]) to all dependency files - Fix hatch build configuration in pyproject.toml files - Update TTLClampWrapper to require both min_ttl and max_ttl - Update StatisticsWrapper API to use new nested structure - Update DiskStore parameter from root_directory to directory - Update RetryWrapper parameter from base_delay to initial_delay - Update PassthroughCacheWrapper parameters to primary_key_value/cache_key_value - Update FernetEncryptionWrapper to use fernet instance instead of key - Update FallbackWrapper parameters to primary_key_value/fallback_key_value - Update LimitSizeWrapper parameter from max_size_bytes to max_size - Move StatisticsWrapper to top of stack in trading_data for better tracking - Add raise_on_decryption_error=False for graceful encryption failures All tests passing: chat_app (7/7), trading_data (9/9), web_scraper_cache (13/13) Co-authored-by: William Easton <strawgate@users.noreply.github.com>
1 parent ebf8c02 commit 2a2ddb6

File tree

9 files changed

+62
-37
lines changed

9 files changed

+62
-37
lines changed

examples/chat_app/chat_app.py

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,10 @@ def __init__(self):
4242

4343
# Wrapper stack (applied inside-out):
4444
# 1. StatisticsWrapper - Track operation metrics
45-
# 2. TTLClampWrapper - Enforce max TTL of 24 hours (86400 seconds)
45+
# 2. TTLClampWrapper - Enforce TTL between 1 hour and 24 hours
4646
# 3. LoggingWrapper - Log all operations for debugging
4747
stats = StatisticsWrapper(key_value=base_store)
48-
ttl_clamped = TTLClampWrapper(key_value=stats, max_ttl=86400) # 24 hours
48+
ttl_clamped = TTLClampWrapper(key_value=stats, min_ttl=3600, max_ttl=86400) # 1 hour min, 24 hours max
4949
wrapped_store = LoggingWrapper(key_value=ttl_clamped)
5050

5151
# PydanticAdapter for type-safe message storage/retrieval
@@ -112,18 +112,25 @@ async def delete_message(self, conversation_id: str, message_id: str) -> bool:
112112

113113
def get_statistics(self) -> dict[str, int]:
114114
"""
115-
Get operation statistics.
115+
Get operation statistics across all conversations.
116116
117117
Returns:
118-
Dictionary with operation counts (puts, gets, deletes, etc.)
118+
Dictionary with aggregated operation counts (puts, gets, deletes, etc.)
119119
"""
120120
if isinstance(self.stats_wrapper, StatisticsWrapper):
121+
# Aggregate statistics across all collections (conversations)
122+
total_puts = sum(coll_stats.put.count for coll_stats in self.stats_wrapper.statistics.collections.values())
123+
total_gets = sum(coll_stats.get.count for coll_stats in self.stats_wrapper.statistics.collections.values())
124+
total_deletes = sum(coll_stats.delete.count for coll_stats in self.stats_wrapper.statistics.collections.values())
125+
get_hits = sum(coll_stats.get.hit for coll_stats in self.stats_wrapper.statistics.collections.values())
126+
get_misses = sum(coll_stats.get.miss for coll_stats in self.stats_wrapper.statistics.collections.values())
127+
121128
return {
122-
"total_puts": self.stats_wrapper.total_puts,
123-
"total_gets": self.stats_wrapper.total_gets,
124-
"total_deletes": self.stats_wrapper.total_deletes,
125-
"get_hits": self.stats_wrapper.get_hits,
126-
"get_misses": self.stats_wrapper.get_misses,
129+
"total_puts": total_puts,
130+
"total_gets": total_gets,
131+
"total_deletes": total_deletes,
132+
"get_hits": get_hits,
133+
"get_misses": get_misses,
127134
}
128135
return {}
129136

examples/chat_app/pyproject.toml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ version = "0.1.0"
44
description = "Simple async chat application using py-key-value"
55
requires-python = ">=3.10"
66
dependencies = [
7-
"py-key-value-aio>=0.2.8",
7+
"py-key-value-aio[memory]>=0.2.8",
88
"pydantic>=2.0.0,<3.0.0",
99
]
1010

@@ -17,6 +17,9 @@ dev = [
1717
[tool.pytest.ini_options]
1818
asyncio_mode = "auto"
1919

20+
[tool.hatch.build.targets.wheel]
21+
packages = ["."]
22+
2023
[build-system]
2124
requires = ["hatchling"]
2225
build-backend = "hatchling.build"

examples/chat_app/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
py-key-value-aio>=0.2.8
1+
py-key-value-aio[memory]>=0.2.8
22
pydantic>=2.0.0,<3.0.0

examples/trading_data/pyproject.toml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ version = "0.1.0"
44
description = "Trading data caching using py-key-value with compression and multi-tier caching"
55
requires-python = ">=3.10"
66
dependencies = [
7-
"py-key-value-aio>=0.2.8",
7+
"py-key-value-aio[disk,memory]>=0.2.8",
88
"pydantic>=2.0.0,<3.0.0",
99
]
1010

@@ -17,6 +17,9 @@ dev = [
1717
[tool.pytest.ini_options]
1818
asyncio_mode = "auto"
1919

20+
[tool.hatch.build.targets.wheel]
21+
packages = ["."]
22+
2023
[build-system]
2124
requires = ["hatchling"]
2225
build-backend = "hatchling.build"
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
py-key-value-aio>=0.2.8
1+
py-key-value-aio[disk,memory]>=0.2.8
22
pydantic>=2.0.0,<3.0.0

examples/trading_data/trading_app.py

Lines changed: 22 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -50,22 +50,24 @@ def __init__(self, cache_dir: str = ".trading_cache"):
5050
memory_cache = MemoryStore()
5151

5252
# Tier 2: Disk cache for historical data
53-
disk_cache = DiskStore(root_directory=cache_dir)
53+
disk_cache = DiskStore(directory=cache_dir)
5454

5555
# Wrapper stack (applied inside-out):
56-
# 1. StatisticsWrapper - Track cache metrics
57-
# 2. RetryWrapper - Handle transient failures (3 retries with exponential backoff)
58-
# 3. LoggingWrapper - Log operations for debugging
59-
# 4. PassthroughCacheWrapper - Two-tier caching (memory → disk)
60-
stats = StatisticsWrapper(key_value=disk_cache)
61-
retry_wrapper = RetryWrapper(key_value=stats, max_retries=3, base_delay=0.1)
56+
# 1. RetryWrapper - Handle transient failures on disk (3 retries with exponential backoff)
57+
# 2. LoggingWrapper - Log disk operations for debugging
58+
# 3. PassthroughCacheWrapper - Two-tier caching (memory → disk)
59+
# 4. StatisticsWrapper - Track all cache metrics (wraps everything)
60+
retry_wrapper = RetryWrapper(key_value=disk_cache, max_retries=3, initial_delay=0.1)
6261
disk_with_logging = LoggingWrapper(key_value=retry_wrapper)
6362

64-
cache_store = PassthroughCacheWrapper(cache=memory_cache, key_value=disk_with_logging)
63+
cache_store = PassthroughCacheWrapper(primary_key_value=disk_with_logging, cache_key_value=memory_cache)
64+
65+
# Wrap the entire cache stack with statistics to track all operations
66+
stats = StatisticsWrapper(key_value=cache_store)
6567

6668
# PydanticAdapter for type-safe price data storage/retrieval
6769
self.adapter: PydanticAdapter[PriceData] = PydanticAdapter[PriceData](
68-
key_value=cache_store,
70+
key_value=stats,
6971
pydantic_model=PriceData,
7072
)
7173

@@ -138,25 +140,28 @@ async def delete_price(self, symbol: str, data_id: str) -> bool:
138140
"""
139141
return await self.adapter.delete(collection=f"symbol:{symbol}", key=data_id)
140142

141-
def get_cache_statistics(self) -> dict[str, int]:
143+
def get_cache_statistics(self) -> dict[str, int | float]:
142144
"""
143-
Get cache performance statistics.
145+
Get cache performance statistics across all symbols.
144146
145147
Returns:
146-
Dictionary with cache metrics (hits, misses, operations)
148+
Dictionary with aggregated cache metrics (hits, misses, operations)
147149
"""
148150
if isinstance(self.stats_wrapper, StatisticsWrapper):
149-
total_gets = self.stats_wrapper.total_gets
150-
hits = self.stats_wrapper.get_hits
151-
misses = self.stats_wrapper.get_misses
151+
# Aggregate statistics across all collections (symbols)
152+
total_puts = sum(coll_stats.put.count for coll_stats in self.stats_wrapper.statistics.collections.values())
153+
total_gets = sum(coll_stats.get.count for coll_stats in self.stats_wrapper.statistics.collections.values())
154+
total_deletes = sum(coll_stats.delete.count for coll_stats in self.stats_wrapper.statistics.collections.values())
155+
hits = sum(coll_stats.get.hit for coll_stats in self.stats_wrapper.statistics.collections.values())
156+
misses = sum(coll_stats.get.miss for coll_stats in self.stats_wrapper.statistics.collections.values())
152157

153158
return {
154159
"total_gets": total_gets,
155160
"cache_hits": hits,
156161
"cache_misses": misses,
157162
"hit_rate_percent": round((hits / total_gets * 100) if total_gets > 0 else 0, 2),
158-
"total_puts": self.stats_wrapper.total_puts,
159-
"total_deletes": self.stats_wrapper.total_deletes,
163+
"total_puts": total_puts,
164+
"total_deletes": total_deletes,
160165
}
161166
return {}
162167

examples/web_scraper_cache/pyproject.toml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ version = "0.1.0"
44
description = "Web scraper cache using py-key-value with encryption and size limits"
55
requires-python = ">=3.10"
66
dependencies = [
7-
"py-key-value-aio>=0.2.8",
7+
"py-key-value-aio[disk,memory]>=0.2.8",
88
"pydantic>=2.0.0,<3.0.0",
99
"cryptography>=41.0.0",
1010
]
@@ -18,6 +18,9 @@ dev = [
1818
[tool.pytest.ini_options]
1919
asyncio_mode = "auto"
2020

21+
[tool.hatch.build.targets.wheel]
22+
packages = ["."]
23+
2124
[build-system]
2225
requires = ["hatchling"]
2326
build-backend = "hatchling.build"
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
py-key-value-aio>=0.2.8
1+
py-key-value-aio[disk,memory]>=0.2.8
22
pydantic>=2.0.0,<3.0.0
33
cryptography>=41.0.0

examples/web_scraper_cache/scraper.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
from key_value.aio.adapters.pydantic import PydanticAdapter
2020
from key_value.aio.stores.disk.store import DiskStore
2121
from key_value.aio.stores.memory.store import MemoryStore
22-
from key_value.aio.wrappers.encryption.wrapper import FernetEncryptionWrapper
22+
from key_value.aio.wrappers.encryption.fernet import FernetEncryptionWrapper
2323
from key_value.aio.wrappers.fallback.wrapper import FallbackWrapper
2424
from key_value.aio.wrappers.limit_size.wrapper import LimitSizeWrapper
2525
from key_value.aio.wrappers.ttl_clamp.wrapper import TTLClampWrapper
@@ -60,11 +60,14 @@ def __init__(self, cache_dir: str = ".scraper_cache", encryption_key: bytes | No
6060
self.encryption_key = encryption_key
6161

6262
# Primary store: Disk with encryption and size limits
63-
disk_store = DiskStore(root_directory=cache_dir)
63+
disk_store = DiskStore(directory=cache_dir)
6464

6565
# Fallback store: Memory (for when disk fails)
6666
fallback_store = MemoryStore()
6767

68+
# Create Fernet instance for encryption
69+
fernet = Fernet(encryption_key)
70+
6871
# Wrapper stack (applied inside-out):
6972
# 1. TTLClampWrapper - Enforce cache duration (min 1 hour, max 7 days)
7073
# 2. LimitSizeWrapper - Prevent huge pages (max 5MB per page)
@@ -73,10 +76,11 @@ def __init__(self, cache_dir: str = ".scraper_cache", encryption_key: bytes | No
7376
primary_with_wrappers = TTLClampWrapper(
7477
key_value=LimitSizeWrapper(
7578
key_value=FernetEncryptionWrapper(
76-
key_value=FallbackWrapper(key_value=disk_store, fallback=fallback_store),
77-
key=encryption_key,
79+
key_value=FallbackWrapper(primary_key_value=disk_store, fallback_key_value=fallback_store),
80+
fernet=fernet,
81+
raise_on_decryption_error=False, # Return None on decryption failure instead of raising
7882
),
79-
max_size_bytes=5 * 1024 * 1024, # 5MB limit
83+
max_size=5 * 1024 * 1024, # 5MB limit
8084
),
8185
min_ttl=3600, # 1 hour minimum
8286
max_ttl=7 * 24 * 3600, # 7 days maximum

0 commit comments

Comments
 (0)