Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
109 changes: 1 addition & 108 deletions benchmark/Grafana-dashboard.json
Original file line number Diff line number Diff line change
Expand Up @@ -845,113 +845,6 @@
],
"title": "Insert Merkle Ops by Type",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"barWidthFactor": 0.6,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "yellow",
"value": 1000
},
{
"color": "red",
"value": 5000
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 31
},
"id": 13,
"options": {
"legend": {
"calcs": [
"min",
"mean",
"max",
"last"
],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"hideZeros": false,
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "11.5.1",
"targets": [
{
"editorMode": "code",
"expr": "firewood_nodes_unwritten",
"legendFormat": "Unwritten Nodes",
"range": true,
"refId": "A",
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
}
}
],
"title": "Unwritten Nodes (In Memory)",
"type": "timeseries"
}
],
"refresh": "10s",
Expand All @@ -970,4 +863,4 @@
"uid": "adxfhfmwx5ypsc",
"version": 12,
"weekStart": ""
}
}
16 changes: 7 additions & 9 deletions storage/src/nodestore/hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ where
pub(super) fn hash_helper(
#[cfg(feature = "ethhash")] &self,
node: Node,
) -> Result<(MaybePersistedNode, HashType, usize), FileIoError> {
) -> Result<(MaybePersistedNode, HashType), FileIoError> {
let mut root_path = Path::new();
#[cfg(not(feature = "ethhash"))]
let res = Self::hash_helper_inner(node, PathGuard::from_path(&mut root_path))?;
Expand All @@ -136,10 +136,9 @@ where
mut node: Node,
mut path_prefix: PathGuard<'_>,
#[cfg(feature = "ethhash")] fake_root_extra_nibble: Option<u8>,
) -> Result<(MaybePersistedNode, HashType, usize), FileIoError> {
) -> Result<(MaybePersistedNode, HashType), FileIoError> {
// If this is a branch, find all unhashed children and recursively hash them.
trace!("hashing {node:?} at {path_prefix:?}");
let mut nodes_processed = 1usize; // Count this node
if let Node::Branch(ref mut b) = node {
// special case code for ethereum hashes at the account level
#[cfg(feature = "ethhash")]
Expand Down Expand Up @@ -205,7 +204,7 @@ where
let child_node = std::mem::take(child_node);

// Hash this child and update
let (child_node, child_hash, child_count) = {
let (child_node, child_hash) = {
// we extend and truncate path_prefix to reduce memory allocations]
let mut child_path_prefix = PathGuard::new(&mut path_prefix);
child_path_prefix.0.extend(b.partial_path.0.iter().copied());
Expand All @@ -218,16 +217,15 @@ where
#[cfg(not(feature = "ethhash"))]
child_path_prefix.0.push(nibble as u8);
#[cfg(feature = "ethhash")]
let (child_node, child_hash, child_count) =
let (child_node, child_hash) =
self.hash_helper_inner(child_node, child_path_prefix, make_fake_root)?;
#[cfg(not(feature = "ethhash"))]
let (child_node, child_hash, child_count) =
let (child_node, child_hash) =
Self::hash_helper_inner(child_node, child_path_prefix)?;

(child_node, child_hash, child_count)
(child_node, child_hash)
};

nodes_processed = nodes_processed.saturating_add(child_count);
*child = Some(Child::MaybePersisted(child_node, child_hash));
trace!("child now {child:?}");
}
Expand All @@ -253,7 +251,7 @@ where
#[cfg(not(feature = "ethhash"))]
let hash = hash_node(&node, &path_prefix);

Ok((SharedNode::new(node).into(), hash, nodes_processed))
Ok((SharedNode::new(node).into(), hash))
}

#[cfg(feature = "ethhash")]
Expand Down
64 changes: 7 additions & 57 deletions storage/src/nodestore/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ pub(crate) mod header;
pub(crate) mod persist;
pub(crate) mod primitives;

use crate::firewood_gauge;
use crate::linear::OffsetReader;
use crate::logger::trace;
use crate::node::branch::ReadSerializable as _;
Expand All @@ -53,7 +52,6 @@ use arc_swap::access::DynAccess;
use smallvec::SmallVec;
use std::fmt::Debug;
use std::io::{Error, ErrorKind, Read};
use std::sync::atomic::AtomicUsize;

// Re-export types from alloc module
pub use alloc::NodeAllocator;
Expand Down Expand Up @@ -125,7 +123,6 @@ impl<S: ReadableStorage> NodeStore<Committed, S> {
deleted: Box::default(),
root_hash: None,
root: header.root_address().map(Into::into),
unwritten_nodes: AtomicUsize::new(0),
},
storage,
};
Expand Down Expand Up @@ -155,7 +152,6 @@ impl<S: ReadableStorage> NodeStore<Committed, S> {
deleted: Box::default(),
root_hash: None,
root: None,
unwritten_nodes: AtomicUsize::new(0),
},
})
}
Expand Down Expand Up @@ -362,8 +358,6 @@ pub struct Committed {
deleted: Box<[MaybePersistedNode]>,
root_hash: Option<TrieHash>,
root: Option<MaybePersistedNode>,
/// TODO: No readers of this variable yet - will be used for tracking unwritten nodes in committed revisions
unwritten_nodes: AtomicUsize,
}

impl Clone for Committed {
Expand All @@ -372,10 +366,6 @@ impl Clone for Committed {
deleted: self.deleted.clone(),
root_hash: self.root_hash.clone(),
root: self.root.clone(),
unwritten_nodes: AtomicUsize::new(
self.unwritten_nodes
.load(std::sync::atomic::Ordering::Relaxed),
),
}
}
}
Expand Down Expand Up @@ -409,8 +399,6 @@ pub struct ImmutableProposal {
root_hash: Option<TrieHash>,
/// The root node, either in memory or on disk
root: Option<MaybePersistedNode>,
/// The number of unwritten nodes in this proposal
unwritten_nodes: usize,
}

impl ImmutableProposal {
Expand All @@ -428,21 +416,6 @@ impl ImmutableProposal {
}
}

impl Drop for ImmutableProposal {
fn drop(&mut self) {
// When an immutable proposal is dropped without being committed,
// decrement the gauge to reflect that these nodes will never be written
if self.unwritten_nodes > 0 {
#[allow(clippy::cast_precision_loss)]
firewood_gauge!(
"firewood.nodes.unwritten",
"current number of unwritten nodes"
)
.decrement(self.unwritten_nodes as f64);
}
}
}

/// Contains the state of a revision of a merkle trie.
///
/// The first generic parameter is the type of the revision, which supports reading nodes from parent proposals.
Expand Down Expand Up @@ -505,23 +478,14 @@ impl<T: Into<NodeStoreParent>, S: ReadableStorage> From<NodeStore<T, S>>
/// Commit a proposal to a new revision of the trie
impl<S: WritableStorage> From<NodeStore<ImmutableProposal, S>> for NodeStore<Committed, S> {
fn from(val: NodeStore<ImmutableProposal, S>) -> Self {
let NodeStore {
header,
kind,
storage,
} = val;
// Use ManuallyDrop to prevent the Drop impl from running since we're committing
let kind = std::mem::ManuallyDrop::new(kind);

NodeStore {
header,
header: val.header,
kind: Committed {
deleted: kind.deleted.clone(),
root_hash: kind.root_hash.clone(),
root: kind.root.clone(),
unwritten_nodes: AtomicUsize::new(kind.unwritten_nodes),
deleted: val.kind.deleted.clone(),
root_hash: val.kind.root_hash.clone(),
root: val.kind.root.clone(),
},
storage,
storage: val.storage,
}
}
}
Expand All @@ -548,7 +512,6 @@ impl<S: WritableStorage> NodeStore<Arc<ImmutableProposal>, S> {
deleted: self.kind.deleted.clone(),
root_hash: self.kind.root_hash.clone(),
root: self.kind.root.clone(),
unwritten_nodes: AtomicUsize::new(self.kind.unwritten_nodes),
},
storage: self.storage.clone(),
}
Expand All @@ -574,7 +537,6 @@ impl<S: ReadableStorage> TryFrom<NodeStore<MutableProposal, S>>
parent: Arc::new(ArcSwap::new(Arc::new(kind.parent))),
root_hash: None,
root: None,
unwritten_nodes: 0,
}),
storage,
};
Expand All @@ -587,31 +549,19 @@ impl<S: ReadableStorage> TryFrom<NodeStore<MutableProposal, S>>

// Hashes the trie and returns the address of the new root.
#[cfg(feature = "ethhash")]
let (root, root_hash, unwritten_count) = nodestore.hash_helper(root)?;
let (root, root_hash) = nodestore.hash_helper(root)?;
#[cfg(not(feature = "ethhash"))]
let (root, root_hash, unwritten_count) =
NodeStore::<MutableProposal, S>::hash_helper(root)?;
let (root, root_hash) = NodeStore::<MutableProposal, S>::hash_helper(root)?;

let immutable_proposal =
Arc::into_inner(nodestore.kind).expect("no other references to the proposal");
// Use ManuallyDrop to prevent Drop from running since we're replacing the proposal
let immutable_proposal = std::mem::ManuallyDrop::new(immutable_proposal);
nodestore.kind = Arc::new(ImmutableProposal {
deleted: immutable_proposal.deleted.clone(),
parent: immutable_proposal.parent.clone(),
root_hash: Some(root_hash.into_triehash()),
root: Some(root),
unwritten_nodes: unwritten_count,
});

// Track unwritten nodes in metrics
#[allow(clippy::cast_precision_loss)]
firewood_gauge!(
"firewood.nodes.unwritten",
"current number of unwritten nodes"
)
.increment(unwritten_count as f64);

Ok(nodestore)
}
}
Expand Down
Loading