diff --git a/README.md b/README.md index 4acb39c..461aa0e 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ The Bee Agent Framework makes it easy to build scalable agent-based workflows wi - ๐Ÿ’พ **Memory**: Multiple [strategies](https://github.com/i-am-bee/bee-agent-framework/blob/main/docs/overview.md#memory) to optimize token spend. - โธ๏ธ **Serialization** Handle complex agentic workflows and easily pause/resume them [without losing state](https://github.com/i-am-bee/bee-agent-framework/blob/main/docs/overview.md#serializer). - ๐Ÿ” **Traceability**: Get full visibility of your agentโ€™s inner workings, [log](https://github.com/i-am-bee/bee-agent-framework/blob/main/docs/overview.md#logger) all running events, and use our MLflow integration (coming soon) to debug performance. -- ๐ŸŽ›๏ธ **Production-level** control with [caching](https://github.com/i-am-bee/bee-agent-framework/blob/main/docs/overview.md#cache) and [error handling](https://github.com/i-am-bee/bee-agent-framework/blob/main/docs/overview.md#errors). +- ๐ŸŽ›๏ธ **Production-level** control with [caching](./docs/cache.md) and [error handling](https://github.com/i-am-bee/bee-agent-framework/blob/main/docs/overview.md#errors). - ๐Ÿšง (Coming soon) **API**: Configure and deploy your agents with a production-hardened API. - ๐Ÿšง (Coming soon) **Chat UI**: Serve your agent to users in a delightful GUI with built-in transparency, explainability, and user controls. - ... more on our [Roadmap](#roadmap) @@ -139,7 +139,7 @@ The source directory (`src`) provides numerous modules that one can use. | **template** | Prompt Templating system based on `Mustache` with various improvements\_. | | **memory** | Various types of memories to use with agent. | | [**tools**](./docs/tools.md) | Tools that an agent can use. | -| **cache** | Preset of different caching approaches that can be used together with tools. | +| [**cache**](./docs/cache.md) | Preset of different caching approaches that can be used together with tools. | | **errors** | Base framework error classes used by each module. | | **adapters** | Concrete implementations of given modules for different environments. | | **logger** | Core component for logging all actions within the framework. | diff --git a/docs/cache.md b/docs/cache.md new file mode 100644 index 0000000..331f128 --- /dev/null +++ b/docs/cache.md @@ -0,0 +1,343 @@ +# Cache + +> [!TIP] +> +> Location within the framework `bee-agent-framework/cache`. + +Caching is a process used to temporarily store copies of data or computations in a cache (a storage location) to facilitate faster access upon future requests. The primary purpose of caching is to improve the efficiency and performance of systems by reducing the need to repeatedly fetch or compute the same data from a slower or more resource-intensive source. + +## Usage + +### Capabilities showcase + + + +```ts +import { UnconstrainedCache } from "bee-agent-framework/cache/unconstrainedCache"; + +const cache = new UnconstrainedCache(); + +// Save +await cache.set("a", 1); +await cache.set("b", 2); + +// Read +const result = await cache.get("a"); +console.log(result); // 1 + +// Meta +console.log(cache.enabled); // true +console.log(await cache.has("a")); // true +console.log(await cache.has("b")); // true +console.log(await cache.has("c")); // false +console.log(await cache.size()); // 2 + +// Delete +await cache.delete("a"); +console.log(await cache.has("a")); // false + +// Clear +await cache.clear(); +console.log(await cache.size()); // 0 +``` + +_Source: [examples/cache/unconstrainedCache.ts](/examples/cache/unconstrainedCache.ts)_ + +### Caching function output + intermediate steps + + + +```ts +import { UnconstrainedCache } from "bee-agent-framework/cache/unconstrainedCache"; + +const cache = new UnconstrainedCache(); + +async function fibonacci(n: number): Promise { + const cacheKey = n.toString(); + const cached = await cache.get(cacheKey); + if (cached !== undefined) { + return cached; + } + + const result = n < 1 ? 0 : n <= 2 ? 1 : (await fibonacci(n - 1)) + (await fibonacci(n - 2)); + await cache.set(cacheKey, result); + return result; +} + +console.info(await fibonacci(10)); // 55 +console.info(await fibonacci(9)); // 34 (retrieved from cache) +console.info(`Cache size ${await cache.size()}`); // 10 +``` + +_Source: [examples/cache/unconstrainedCacheFunction.ts](/examples/cache/unconstrainedCacheFunction.ts)_ + +### Usage with tools + + + +```ts +import { DuckDuckGoSearchTool } from "bee-agent-framework/tools/search/duckDuckGoSearch"; +import { SlidingCache } from "bee-agent-framework/cache/slidingCache"; + +const ddg = new DuckDuckGoSearchTool({ + cache: new SlidingCache({ + size: 100, // max 100 entries + ttl: 5 * 60 * 1000, // 5 minutes lifespan + }), +}); + +const response = await ddg.run({ + query: "the time of the fastest marathon run", + page: 1, +}); +// upcoming requests with the EXACTLY same input will be retrieved from the cache +``` + +_Source: [examples/cache/toolCache.ts](/examples/cache/toolCache.ts)_ + +> [!IMPORTANT] +> +> Cache key is created by serializing function parameters (the order of keys in the object does not matter). + +## Cache types + +The framework provides multiple out-of-the-box cache implementations. + +### UnconstrainedCache + +Unlimited in size. + +```ts +import { UnconstrainedCache } from "bee-agent-framework/cache/unconstrainedCache"; +const cache = new UnconstrainedCache(); + +await cache.set("a", 1); +console.log(await cache.has("a")); // true +console.log(await cache.size()); // 1 +``` + +### SlidingCache + +Keeps last `k` entries in the memory. The oldest ones are deleted. + + + +```ts +import { SlidingCache } from "bee-agent-framework/cache/slidingCache"; + +const cache = new SlidingCache({ + size: 3, // (required) number of items that can be live in the cache at a single moment + ttl: 1000, // (optional, default is Infinity) Time in miliseconds after the element is removed from a cache +}); + +await cache.set("a", 1); +await cache.set("b", 2); +await cache.set("c", 3); + +await cache.set("d", 4); // overflow - cache internally removes the oldest entry (key "a") +console.log(await cache.has("a")); // false +console.log(await cache.size()); // 3 +``` + +_Source: [examples/cache/slidingCache.ts](/examples/cache/slidingCache.ts)_ + +### FileCache + +One may want to persist data to a file so that the data can be later loaded. In that case the `FileCache` is ideal candidate. +You have to provide a location where the cache is persisted. + + + +```ts +import { FileCache } from "bee-agent-framework/cache/fileCache"; +import * as os from "node:os"; + +const cache = new FileCache({ + fullPath: `${os.tmpdir()}/bee_file_cache.json`, +}); +console.log(`Saving cache to "${cache.source}"`); +await cache.set("abc", { firstName: "John", lastName: "Doe" }); +``` + +_Source: [examples/cache/fileCache.ts](/examples/cache/fileCache.ts)_ + +> [!NOTE] +> +> Provided location (`fullPath`) doesn't have to exist. It gets automatically created when needed. + +> [!NOTE] +> +> Every modification to the cache (adding, deleting, clearing) immediately updates the target file. + +#### Using a custom provider + + + +```ts +import { FileCache } from "bee-agent-framework/cache/fileCache"; +import { UnconstrainedCache } from "bee-agent-framework/cache/unconstrainedCache"; +import os from "node:os"; + +const memoryCache = new UnconstrainedCache(); +await memoryCache.set("a", 1); + +const fileCache = await FileCache.fromProvider(memoryCache, { + fullPath: `${os.tmpdir()}/bee_file_cache.json`, +}); +console.log(`Saving cache to "${fileCache.source}"`); +console.log(await fileCache.get("a")); // 1 +``` + +_Source: [examples/cache/fileCacheCustomProvider.ts](/examples/cache/fileCacheCustomProvider.ts)_ + +### NullCache + +The special type of cache is `NullCache` which implements the `BaseCache` interface but does nothing. + +The reason for implementing is to enable [Null object pattern](https://en.wikipedia.org/wiki/Null_object_pattern). + +### @Cache (decorator cache) + + + +```ts +import { Cache } from "bee-agent-framework/cache/decoratorCache"; + +class Generator { + @Cache() + get(seed: number) { + return (Math.random() * 1000) / Math.max(seed, 1); + } +} + +const generator = new Generator(); +const a = generator.get(5); +const b = generator.get(5); +console.info(a === b); // true +console.info(a === generator.get(6)); // false +``` + +_Source: [examples/cache/decoratorCache.ts](/examples/cache/decoratorCache.ts)_ + +**Complex example** + + + +```ts +import { Cache, SingletonCacheKeyFn } from "bee-agent-framework/cache/decoratorCache"; + +class MyService { + @Cache({ + cacheKey: SingletonCacheKeyFn, + ttl: 3600, + enumerable: true, + enabled: true, + }) + get id() { + return Math.floor(Math.random() * 1000); + } + + reset() { + Cache.getInstance(this, "id").clear(); + } +} + +const service = new MyService(); +const a = service.id; +console.info(a === service.id); // true +service.reset(); +console.info(a === service.id); // false +``` + +_Source: [examples/cache/decoratorCacheComplex.ts](/examples/cache/decoratorCacheComplex.ts)_ + +> [!NOTE] +> +> Default `cacheKey` function is `ObjectHashKeyFn` + +> [!CAUTION] +> +> Calling an annotated method with the `@Cache` decorator with different parameters (despite the fact you are not using them) yields in cache bypass (different arguments = different cache key) generated. +> Be aware of that. If you want your method always to return the same response, use `SingletonCacheKeyFn`. + +### CacheFn + +Because previously mentioned `CacheDecorator` can be applied only to class methods/getter the framework +provides a way how to do caching on a function level. + + + +```ts +import { CacheFn } from "bee-agent-framework/cache/decoratorCache"; +import { setTimeout } from "node:timers/promises"; + +const getSecret = CacheFn.create( + async () => { + // instead of mocking response you would do a real fetch request + const response = await Promise.resolve({ secret: Math.random(), expiresIn: 100 }); + getSecret.updateTTL(response.expiresIn); + return response.secret; + }, + {}, // options object +); + +const token = await getSecret(); +console.info(token === (await getSecret())); // true +await setTimeout(150); +console.info(token === (await getSecret())); // false +``` + +_Source: [examples/cache/cacheFn.ts](/examples/cache/cacheFn.ts)_ + +> [!NOTE] +> +> Internally, the function is wrapped as a class; therefore, the same rules apply here as if it were a method annotated with the `@Cache` decorator. + +## Creating a custom cache provider + +To create your cache implementation, you must implement the `BaseCache` class. + + + +```ts +import { BaseCache } from "bee-agent-framework/cache/base"; +import { NotImplementedError } from "bee-agent-framework/errors"; + +export class CustomCache extends BaseCache { + size(): Promise { + throw new NotImplementedError(); + } + + set(key: string, value: T): Promise { + throw new NotImplementedError(); + } + + get(key: string): Promise { + throw new NotImplementedError(); + } + + has(key: string): Promise { + throw new NotImplementedError(); + } + + delete(key: string): Promise { + throw new NotImplementedError(); + } + + clear(): Promise { + throw new NotImplementedError(); + } + + createSnapshot() { + throw new NotImplementedError(); + } + + loadSnapshot(snapshot: ReturnType): void { + throw new NotImplementedError(); + } +} +``` + +_Source: [examples/cache/custom.ts](/examples/cache/custom.ts)_ + +The simplest implementation is `UnconstrainedCache`, which can be found [here](/src/cache/unconstrainedCache.ts). diff --git a/examples/cache/cacheFn.ts b/examples/cache/cacheFn.ts new file mode 100644 index 0000000..3d21268 --- /dev/null +++ b/examples/cache/cacheFn.ts @@ -0,0 +1,17 @@ +import { CacheFn } from "bee-agent-framework/cache/decoratorCache"; +import { setTimeout } from "node:timers/promises"; + +const getSecret = CacheFn.create( + async () => { + // instead of mocking response you would do a real fetch request + const response = await Promise.resolve({ secret: Math.random(), expiresIn: 100 }); + getSecret.updateTTL(response.expiresIn); + return response.secret; + }, + {}, // options object +); + +const token = await getSecret(); +console.info(token === (await getSecret())); // true +await setTimeout(150); +console.info(token === (await getSecret())); // false diff --git a/examples/cache/custom.ts b/examples/cache/custom.ts new file mode 100644 index 0000000..7559e31 --- /dev/null +++ b/examples/cache/custom.ts @@ -0,0 +1,36 @@ +import { BaseCache } from "bee-agent-framework/cache/base"; +import { NotImplementedError } from "bee-agent-framework/errors"; + +export class CustomCache extends BaseCache { + size(): Promise { + throw new NotImplementedError(); + } + + set(key: string, value: T): Promise { + throw new NotImplementedError(); + } + + get(key: string): Promise { + throw new NotImplementedError(); + } + + has(key: string): Promise { + throw new NotImplementedError(); + } + + delete(key: string): Promise { + throw new NotImplementedError(); + } + + clear(): Promise { + throw new NotImplementedError(); + } + + createSnapshot() { + throw new NotImplementedError(); + } + + loadSnapshot(snapshot: ReturnType): void { + throw new NotImplementedError(); + } +} diff --git a/examples/cache/decoratorCache.ts b/examples/cache/decoratorCache.ts new file mode 100644 index 0000000..ea7db9d --- /dev/null +++ b/examples/cache/decoratorCache.ts @@ -0,0 +1,14 @@ +import { Cache } from "bee-agent-framework/cache/decoratorCache"; + +class Generator { + @Cache() + get(seed: number) { + return (Math.random() * 1000) / Math.max(seed, 1); + } +} + +const generator = new Generator(); +const a = generator.get(5); +const b = generator.get(5); +console.info(a === b); // true +console.info(a === generator.get(6)); // false diff --git a/examples/cache/decoratorCacheComplex.ts b/examples/cache/decoratorCacheComplex.ts new file mode 100644 index 0000000..797f3a8 --- /dev/null +++ b/examples/cache/decoratorCacheComplex.ts @@ -0,0 +1,23 @@ +import { Cache, SingletonCacheKeyFn } from "bee-agent-framework/cache/decoratorCache"; + +class MyService { + @Cache({ + cacheKey: SingletonCacheKeyFn, + ttl: 3600, + enumerable: true, + enabled: true, + }) + get id() { + return Math.floor(Math.random() * 1000); + } + + reset() { + Cache.getInstance(this, "id").clear(); + } +} + +const service = new MyService(); +const a = service.id; +console.info(a === service.id); // true +service.reset(); +console.info(a === service.id); // false diff --git a/examples/cache/fileCache.ts b/examples/cache/fileCache.ts new file mode 100644 index 0000000..be08ad0 --- /dev/null +++ b/examples/cache/fileCache.ts @@ -0,0 +1,8 @@ +import { FileCache } from "bee-agent-framework/cache/fileCache"; +import * as os from "node:os"; + +const cache = new FileCache({ + fullPath: `${os.tmpdir()}/bee_file_cache.json`, +}); +console.log(`Saving cache to "${cache.source}"`); +await cache.set("abc", { firstName: "John", lastName: "Doe" }); diff --git a/examples/cache/fileCacheCustomProvider.ts b/examples/cache/fileCacheCustomProvider.ts new file mode 100644 index 0000000..2e9313e --- /dev/null +++ b/examples/cache/fileCacheCustomProvider.ts @@ -0,0 +1,12 @@ +import { FileCache } from "bee-agent-framework/cache/fileCache"; +import { UnconstrainedCache } from "bee-agent-framework/cache/unconstrainedCache"; +import os from "node:os"; + +const memoryCache = new UnconstrainedCache(); +await memoryCache.set("a", 1); + +const fileCache = await FileCache.fromProvider(memoryCache, { + fullPath: `${os.tmpdir()}/bee_file_cache.json`, +}); +console.log(`Saving cache to "${fileCache.source}"`); +console.log(await fileCache.get("a")); // 1 diff --git a/examples/cache/slidingCache.ts b/examples/cache/slidingCache.ts new file mode 100644 index 0000000..528e3c5 --- /dev/null +++ b/examples/cache/slidingCache.ts @@ -0,0 +1,14 @@ +import { SlidingCache } from "bee-agent-framework/cache/slidingCache"; + +const cache = new SlidingCache({ + size: 3, // (required) number of items that can be live in the cache at a single moment + ttl: 1000, // (optional, default is Infinity) Time in miliseconds after the element is removed from a cache +}); + +await cache.set("a", 1); +await cache.set("b", 2); +await cache.set("c", 3); + +await cache.set("d", 4); // overflow - cache internally removes the oldest entry (key "a") +console.log(await cache.has("a")); // false +console.log(await cache.size()); // 3 diff --git a/examples/cache/toolCache.ts b/examples/cache/toolCache.ts new file mode 100644 index 0000000..afc6f41 --- /dev/null +++ b/examples/cache/toolCache.ts @@ -0,0 +1,15 @@ +import { DuckDuckGoSearchTool } from "bee-agent-framework/tools/search/duckDuckGoSearch"; +import { SlidingCache } from "bee-agent-framework/cache/slidingCache"; + +const ddg = new DuckDuckGoSearchTool({ + cache: new SlidingCache({ + size: 100, // max 100 entries + ttl: 5 * 60 * 1000, // 5 minutes lifespan + }), +}); + +const response = await ddg.run({ + query: "the time of the fastest marathon run", + page: 1, +}); +// upcoming requests with the EXACTLY same input will be retrieved from the cache diff --git a/examples/cache/unconstrainedCache.ts b/examples/cache/unconstrainedCache.ts new file mode 100644 index 0000000..893adc2 --- /dev/null +++ b/examples/cache/unconstrainedCache.ts @@ -0,0 +1,26 @@ +import { UnconstrainedCache } from "bee-agent-framework/cache/unconstrainedCache"; + +const cache = new UnconstrainedCache(); + +// Save +await cache.set("a", 1); +await cache.set("b", 2); + +// Read +const result = await cache.get("a"); +console.log(result); // 1 + +// Meta +console.log(cache.enabled); // true +console.log(await cache.has("a")); // true +console.log(await cache.has("b")); // true +console.log(await cache.has("c")); // false +console.log(await cache.size()); // 2 + +// Delete +await cache.delete("a"); +console.log(await cache.has("a")); // false + +// Clear +await cache.clear(); +console.log(await cache.size()); // 0 diff --git a/examples/cache/unconstrainedCacheFunction.ts b/examples/cache/unconstrainedCacheFunction.ts new file mode 100644 index 0000000..ce1237c --- /dev/null +++ b/examples/cache/unconstrainedCacheFunction.ts @@ -0,0 +1,19 @@ +import { UnconstrainedCache } from "bee-agent-framework/cache/unconstrainedCache"; + +const cache = new UnconstrainedCache(); + +async function fibonacci(n: number): Promise { + const cacheKey = n.toString(); + const cached = await cache.get(cacheKey); + if (cached !== undefined) { + return cached; + } + + const result = n < 1 ? 0 : n <= 2 ? 1 : (await fibonacci(n - 1)) + (await fibonacci(n - 2)); + await cache.set(cacheKey, result); + return result; +} + +console.info(await fibonacci(10)); // 55 +console.info(await fibonacci(9)); // 34 (retrieved from cache) +console.info(`Cache size ${await cache.size()}`); // 10