Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 80 additions & 25 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ itertools = "0.13.0"
lazy_static = "1.5.0"
prost = "0.13"
prost-types = "0.13"
redis = { version = "0.31.0", features = ["aio", "connection-manager", "tokio-comp"] }
regex = "1.5.4"
reqwest = "0.12.15"
serde = { version = "1.0.126", features = ["rc"] }
Expand Down
6 changes: 6 additions & 0 deletions docs/environment-variables.md
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,12 @@ those.
file not found or logical issue working as a safety mechanism to
prevent infinite spamming of IPFS servers and network congestion
(default: 100 000).
- `GRAPH_IPFS_CACHE_LOCATION`: When set, files retrieved from IPFS will be
cached in that location; future accesses to the same file will be served
from cache rather than IPFS. This can either be a URL starting with
`redis://`, in which case there must be a Redis instance running at that
URL, or an absolute file system path which must be a directory writable
by the `graph-node` process (experimental)

## GraphQL

Expand Down
3 changes: 2 additions & 1 deletion graph/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ num-bigint = { version = "=0.2.6", features = ["serde"] }
num-integer = { version = "=0.1.46" }
num-traits = "=0.2.19"
rand.workspace = true
redis = { workspace = true }
regex = "1.5.4"
semver = { version = "1.0.23", features = ["serde"] }
serde = { workspace = true }
Expand Down Expand Up @@ -85,7 +86,7 @@ tonic = { workspace = true }
prost = { workspace = true }
prost-types = { workspace = true }

futures03 = { version = "0.3.1", package = "futures", features = ["compat"] }
futures03 = { version = "0.3.31", package = "futures", features = ["compat"] }
wasmparser = "0.118.1"
thiserror = "2.0.12"
parking_lot = "0.12.3"
Expand Down
35 changes: 1 addition & 34 deletions graph/src/components/link_resolver/ipfs.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Duration;

use anyhow::anyhow;
Expand All @@ -9,7 +8,6 @@ use derivative::Derivative;
use futures03::compat::Stream01CompatExt;
use futures03::stream::StreamExt;
use futures03::stream::TryStreamExt;
use lru_time_cache::LruCache;
use serde_json::Value;

use crate::derive::CheapClone;
Expand All @@ -30,13 +28,9 @@ pub struct IpfsResolver {
#[derivative(Debug = "ignore")]
client: Arc<dyn IpfsClient>,

#[derivative(Debug = "ignore")]
cache: Arc<Mutex<LruCache<ContentPath, Vec<u8>>>>,

timeout: Duration,
max_file_size: usize,
max_map_file_size: usize,
max_cache_file_size: usize,

/// When set to `true`, it means infinite retries, ignoring the timeout setting.
retry: bool,
Expand All @@ -48,13 +42,9 @@ impl IpfsResolver {

Self {
client,
cache: Arc::new(Mutex::new(LruCache::with_capacity(
env.max_ipfs_cache_size as usize,
))),
timeout: env.ipfs_timeout,
max_file_size: env.max_ipfs_file_bytes,
max_map_file_size: env.max_ipfs_map_file_size,
max_cache_file_size: env.max_ipfs_cache_file_size,
retry: false,
}
}
Expand All @@ -74,18 +64,10 @@ impl LinkResolverTrait for IpfsResolver {
Box::new(s)
}

async fn cat(&self, logger: &Logger, link: &Link) -> Result<Vec<u8>, Error> {
async fn cat(&self, _logger: &Logger, link: &Link) -> Result<Vec<u8>, Error> {
let path = ContentPath::new(&link.link)?;
let timeout = self.timeout;
let max_file_size = self.max_file_size;
let max_cache_file_size = self.max_cache_file_size;

if let Some(data) = self.cache.lock().unwrap().get(&path) {
trace!(logger, "IPFS cat cache hit"; "hash" => path.to_string());
return Ok(data.to_owned());
}

trace!(logger, "IPFS cat cache miss"; "hash" => path.to_string());

let (timeout, retry_policy) = if self.retry {
(None, RetryPolicy::NonDeterministic)
Expand All @@ -100,21 +82,6 @@ impl LinkResolverTrait for IpfsResolver {
.await?
.to_vec();

if data.len() <= max_cache_file_size {
let mut cache = self.cache.lock().unwrap();

if !cache.contains_key(&path) {
cache.insert(path.clone(), data.clone());
}
} else {
debug!(
logger,
"IPFS file too large for cache";
"path" => path.to_string(),
"size" => data.len(),
);
}

Ok(data)
}

Expand Down
Loading
Loading