Skip to content
Merged
Show file tree
Hide file tree
Changes from 26 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
206e385
add light client functionality
moshababo Aug 13, 2025
1c58608
Merge branch 'main' into light_client
moshababo Aug 15, 2025
91590ae
post-merge fixes
moshababo Aug 15, 2025
6190ad1
handle RwLock errors
moshababo Aug 15, 2025
eb93f4a
bounds check
moshababo Aug 15, 2025
a6cf862
use ActorId
moshababo Aug 15, 2025
ca8bbc8
fix Cargo.toml
moshababo Aug 15, 2025
d0cb76b
tighten trait bounds
moshababo Aug 15, 2025
36a3327
un-pub fields
moshababo Aug 15, 2025
f305794
merkle to propagate error
moshababo Aug 15, 2025
3857f85
CI fixes
moshababo Aug 21, 2025
cf00ac9
CI fixes
moshababo Aug 21, 2025
57d5cd0
CI fixes
moshababo Aug 21, 2025
6cec2b2
use constants for BLS lengths
moshababo Aug 30, 2025
7a0a39b
use drop instead of scoping
moshababo Aug 30, 2025
7f9300c
use `hashlink::LruCache`
moshababo Aug 30, 2025
f2eb43c
remove `allow(unused)`
moshababo Aug 30, 2025
8b05095
network name to use enum
moshababo Aug 30, 2025
af28ab0
add `MERKLE_DIGEST_SIZE` const
moshababo Aug 30, 2025
a44a2f0
use `hex::encode`
moshababo Aug 30, 2025
95db86a
use `anyhow::ensure`
moshababo Aug 30, 2025
e27943d
CI fixes
moshababo Sep 3, 2025
8cffdc8
Merge branch 'main' into light_client
moshababo Sep 3, 2025
70bf5a5
add issue link
moshababo Sep 3, 2025
42cef93
remove unused deps
moshababo Sep 3, 2025
6bccb28
avoid redundant heap allocs
moshababo Sep 7, 2025
3608b91
bdn mod to receive typed args
moshababo Sep 7, 2025
51bd363
add `BLSError::EmptySignatures`
moshababo Sep 7, 2025
4f3b9e8
add inner errors
moshababo Sep 7, 2025
b2f6171
remove unneeded local variables
moshababo Sep 7, 2025
f905f26
use owned values
moshababo Sep 7, 2025
76d73d7
add comment for cache estimated size
moshababo Sep 7, 2025
0c68f0b
lint fixes
moshababo Sep 7, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .config/rust-f3.dic
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,12 @@ calibnet
Filecoin
merkle
Keccak256
RPC
BDN
F3
BLS12_381
G1
G2
JSON
CBOR
TODO
22 changes: 21 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[workspace]
resolver = "2"
members = ["blssig", "certs", "gpbft"]
members = ["blssig", "certs", "gpbft", "lightclient", "merkle", "rpc"]
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Workspace members added: ensure per-crate inheritance of workspace package fields

Nice addition of lightclient, merkle, and rpc to the workspace. Please verify each new crate’s Cargo.toml inherits the workspace’s package metadata (edition, rust-version, license, repository) to keep CI/tooling consistent. If not already present, add:

 [package]
 name = "lightclient"
 version = "0.1.0"
-# ...
+edition.workspace = true
+rust-version.workspace = true
+license.workspace = true
+repository.workspace = true

Apply the same in merkle/Cargo.toml and rpc/Cargo.toml. This avoids accidental drift (e.g., building with a different edition/MSRV locally vs CI).

Run this to confirm inheritance is set across all new crates:


🏁 Script executed:

#!/bin/bash
# Lists any package.tomls missing explicit workspace inheritance for key fields
fd Cargo.toml | grep -Ev '^\./Cargo\.toml$' | xargs -I{} sh -c '
  echo ">>> {}"
  rg -n "edition\.workspace\s*=\s*true|rust-version\.workspace\s*=\s*true|license\.workspace\s*=\s*true|repository\.workspace\s*=\s*true" -n {} || echo "  Missing one or more workspace field inheritances"
'

Length of output: 1089


Add missing rust-version.workspace inheritance in new crates

The verification script shows that while lightclient/Cargo.toml, merkle/Cargo.toml, and rpc/Cargo.toml each include:

[package]
repository.workspace = true
license.workspace = true
edition.workspace = true

they are missing the rust-version.workspace = true line. Please add this under the [package] section for each crate to ensure the workspace MSRV is consistently applied.

Affected files:

  • lightclient/Cargo.toml
  • merkle/Cargo.toml
  • rpc/Cargo.toml

Example diff for lightclient/Cargo.toml:

 [package]
 name = "lightclient"
 version = "0.1.0"
 repository.workspace = true
 license.workspace = true
 edition.workspace = true
+rust-version.workspace = true

Repeat the same insertion in merkle/Cargo.toml and rpc/Cargo.toml.

🤖 Prompt for AI Agents
In Cargo.toml (workspace root) around line 3 and in the three affected crate
manifests lightclient/Cargo.toml, merkle/Cargo.toml, and rpc/Cargo.toml under
their [package] sections, add rust-version.workspace = true alongside the
existing repository.workspace = true, license.workspace = true, and
edition.workspace = true entries so each crate inherits the workspace MSRV;
update each file by inserting that single line under [package] in the same style
and indentation as the other .workspace entries.


[workspace.package]
authors = ["ChainSafe Systems <forest@chainsafe.io>"]
Expand All @@ -12,6 +12,26 @@ rust-version = "1.85.0"
[workspace.dependencies]
ahash = "0.8"
anyhow = "1"
base32 = "0.5.1"
base64 = "0.22"
bls-signatures = { version = "0.15" }
bls12_381 = "0.8"
cid = { version = "0.10.1", features = ["std"] }
fvm_ipld_bitfield = "0.7.1"
fvm_ipld_encoding = "0.5"
hashlink = "0.10.0"
hex = "0.4"
jsonrpsee = { version = "0.24", features = ["ws-client", "http-client"] }
keccak-hash = "0.11"
num-bigint = { version = "0.4.6", features = ["serde"] }
num-traits = "0.2.19"
parking_lot = "0.12"
rand = "0.8"
serde = { version = "1", features = ["derive"] }
serde_cbor = "0.11.2"
serde_json = { version = "1", features = ["raw_value"] }
sha3 = "0.10.8"
strum = { version = "0.27.1", features = ["derive"] }
strum_macros = "0.27.1"
thiserror = "2"
tokio = { version = "1", features = ["rt-multi-thread", "macros"] }
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
check:
cargo check --quiet --no-default-features
cargo check --quiet --all-features
cargo check --quiet --no-default-features --target wasm32-unknown-unknown
cargo check --quiet --all-features --target wasm32-unknown-unknown
cargo check --quiet --no-default-features --target wasm32-unknown-unknown -p filecoin-f3-certs -p filecoin-f3-blssig -p filecoin-f3-merkle -p filecoin-f3-gpbft
cargo check --quiet --all-features --target wasm32-unknown-unknown -p filecoin-f3-certs -p filecoin-f3-blssig -p filecoin-f3-merkle -p filecoin-f3-gpbft

test:
cargo test --all-features
Expand Down Expand Up @@ -36,7 +36,7 @@ fmt:

clippy:
cargo clippy --all-targets --all-features --quiet --no-deps -- --deny=warnings
cargo clippy --all-features --target wasm32-unknown-unknown --quiet --no-deps -- --deny=warnings
cargo clippy --all-features --target wasm32-unknown-unknown --quiet --no-deps -p filecoin-f3-certs -p filecoin-f3-blssig -p filecoin-f3-merkle -p filecoin-f3-gpbft -- --deny=warnings

# Checks if all headers are present and adds if not
license:
Expand Down
7 changes: 7 additions & 0 deletions blssig/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,12 @@ edition.workspace = true
rust-version.workspace = true

[dependencies]
bls-signatures.workspace = true
bls12_381.workspace = true
filecoin-f3-gpbft = { path = "../gpbft" }
hashlink.workspace = true
parking_lot.workspace = true
thiserror.workspace = true

[dev-dependencies]
rand.workspace = true
75 changes: 75 additions & 0 deletions blssig/src/bdn/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
// Copyright 2019-2024 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

//! BDN (Boneh-Drijvers-Neven) signature aggregation scheme, for preventing rogue public-key attacks.
//!
//! NOTE: currently uses standard BLS aggregation without coefficient weighting, hence returns incorrect values compared to go-f3.
//!
Comment on lines +4 to +7
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Critical: Incomplete BDN implementation - returns incorrect results

The module acknowledges that it's using standard BLS aggregation instead of proper BDN coefficient weighting. This means the signature verification will fail when compared against go-f3's implementation. The PR objectives note this as a known issue with expected signature verification errors being temporarily silenced.

Would you like me to help implement the proper BDN coefficient weighting to match the go-f3 reference implementation?

use crate::verifier::BLSError;
use bls_signatures::{PublicKey, Serialize, Signature};
use bls12_381::{G1Projective, G2Projective};
use filecoin_f3_gpbft::PubKey;

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Prep imports for BDN coefficients

Add Scalar and hashing for coefficient derivation.

 use crate::verifier::BLSError;
 use bls_signatures::{PublicKey, Serialize, Signature};
-use bls12_381::{G1Projective, G2Projective};
+use bls12_381::{G1Projective, G2Projective, Scalar};
+use sha2::{Digest, Sha256};
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
use crate::verifier::BLSError;
use bls_signatures::{PublicKey, Serialize, Signature};
use bls12_381::{G1Projective, G2Projective};
use filecoin_f3_gpbft::PubKey;
use crate::verifier::BLSError;
use bls_signatures::{PublicKey, Serialize, Signature};
use bls12_381::{G1Projective, G2Projective, Scalar};
use sha2::{Digest, Sha256};
use filecoin_f3_gpbft::PubKey;
🤖 Prompt for AI Agents
In blssig/src/bdn/mod.rs around lines 8–12, the module currently imports
PublicKey, Serialize, Signature, G1Projective, G2Projective and PubKey but is
missing the scalar type and hashing utilities needed to derive BDN coefficients;
add an import for the scalar type (e.g., bls12_381::Scalar) and the hashing
primitives you use for coefficient derivation (for example sha2::Sha256 or the
crate/function you use for hash-to-scalar), then update any
coefficient-derivation code to use the imported Scalar and the chosen
hash-to-scalar routine so coefficients are computed deterministically and with
the correct type.

/// BDN aggregation context for managing signature and public key aggregation
pub struct BDNAggregation {
pub_keys: Vec<PubKey>,
}

impl BDNAggregation {
pub fn new(pub_keys: &[PubKey]) -> Result<Self, BLSError> {
if pub_keys.is_empty() {
return Err(BLSError::EmptyPublicKeys);
}

Ok(Self {
pub_keys: pub_keys.to_vec(),
})
}

/// Aggregates signatures using standard BLS aggregation
/// TODO: Implement BDN aggregation scheme: https://github.com/ChainSafe/rust-f3/issues/29
pub fn aggregate_sigs(&self, sigs: &[Vec<u8>]) -> Result<Vec<u8>, BLSError> {
if sigs.len() != self.pub_keys.len() {
return Err(BLSError::LengthMismatch {
pub_keys: self.pub_keys.len(),
sigs: sigs.len(),
});
}

let mut aggregated_point = G2Projective::identity();

for sig_bytes in sigs.iter() {
// Deserialize signature to G2 point
let signature =
Signature::from_bytes(sig_bytes).map_err(|_| BLSError::SignatureDeserialization)?;
let sig_point: G2Projective = signature.into();

// Standard BLS aggregation
aggregated_point += sig_point;
}

// Convert back to signature
let aggregated_sig: Signature = aggregated_point.into();
Ok(aggregated_sig.as_bytes().to_vec())
}

/// Aggregates public keys using standard BLS aggregation
/// TODO: Implement BDN aggregation scheme: https://github.com/ChainSafe/rust-f3/issues/29
pub fn aggregate_pub_keys(&self) -> Result<PublicKey, BLSError> {
let mut aggregated_point = G1Projective::identity();

for pub_key_bytes in &self.pub_keys {
let public_key = PublicKey::from_bytes(&pub_key_bytes.0)
.map_err(|_| BLSError::PublicKeyDeserialization)?;

// Convert public key to G1Projective for curve operations
let pub_key_point: G1Projective = public_key.into();

// Standard BLS aggregation
aggregated_point += pub_key_point;
}

// Convert back to PublicKey
Ok(aggregated_point.into())
}
}
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Add BDN coefficient derivation helper (deterministic, order-agnostic set digest)

  • Coefficients bind to the canonical signer set (sorted by bytes) + each key.
  • Domain separation via a local DST constant; replace with the exact go-f3 DST if it differs.
// Place near the top of this module
const BDN_COEFF_DST: &[u8] = b"F3-BDN-agg-v1";

// Canonical set digest: length-prefix + lexicographic sort to make it order-agnostic.
fn canonical_set_digest(pub_keys: &[PubKey]) -> [u8; 32] {
    let mut keys: Vec<&[u8]> = pub_keys.iter().map(|k| k.0.as_slice()).collect();
    keys.sort_unstable();
    let mut h = Sha256::new();
    h.update(BDN_COEFF_DST);
    h.update((keys.len() as u64).to_be_bytes());
    for k in keys {
        h.update((k.len() as u64).to_be_bytes());
        h.update(k);
    }
    let out = h.finalize();
    let mut digest = [0u8; 32];
    digest.copy_from_slice(&out);
    digest
}

// Derive per-key coefficients as field elements using a wide reduction.
fn compute_bdn_coefficients(pub_keys: &[PubKey]) -> Result<Vec<Scalar>, BLSError> {
    if pub_keys.is_empty() {
        return Err(BLSError::EmptyPublicKeys);
    }
    let set_digest = canonical_set_digest(pub_keys);
    let mut coeffs = Vec::with_capacity(pub_keys.len());
    for pk in pub_keys {
        // Two domain-separated hashes -> 64-byte wide input
        let mut h1 = Sha256::new();
        h1.update(BDN_COEFF_DST);
        h1.update(set_digest);
        h1.update((pk.0.len() as u64).to_be_bytes());
        h1.update(&pk.0);
        h1.update([0u8]);
        let r1 = h1.finalize();

        let mut h2 = Sha256::new();
        h2.update(BDN_COEFF_DST);
        h2.update(set_digest);
        h2.update((pk.0.len() as u64).to_be_bytes());
        h2.update(&pk.0);
        h2.update([1u8]);
        let r2 = h2.finalize();

        let mut wide = [0u8; 64];
        wide[..32].copy_from_slice(&r1);
        wide[32..].copy_from_slice(&r2);

        let c = Scalar::from_bytes_wide(&wide);
        coeffs.push(c);
    }
    Ok(coeffs)
}

If go-f3 uses a different DST/derivation, please paste it and I’ll align the helper for byte-for-byte compatibility.


🏁 Script executed:

#!/bin/bash
# Verify if `sha2` crate is declared for SHA-256 functionality
grep -R "sha2" -n Cargo.toml || echo "sha2 NOT found in Cargo.toml"

# Verify bls12_381 dependency (for Scalar) is present
grep -R "bls12_381" -n Cargo.toml || echo "bls12_381 NOT found in Cargo.toml"

# Search for existing Scalar usage in Rust sources
rg -n "Scalar" -g "*.rs" || echo "No Scalar references in code"

Length of output: 242


🌐 Web query:

What DST constant is used for BDN signature aggregation coefficient derivation in the filecoin-project/go-f3 repository?

💡 Result:

I can't find that constant in the repo with my current search — do you want me to search the go-f3 repository code (and specific files under blssig) now and return the exact DST string?


Add Sha256 and Scalar dependencies & imports

  • Add sha2 = "0.10" to Cargo.toml and at the top of this module import
    use sha2::Sha256;
    use bls12_381::Scalar;
  • Since go-f3 hasn’t yet defined a BDN DST, retain
    const BDN_COEFF_DST: &[u8] = b"F3-BDN-agg-v1";
    and update it once the official DST string is established in go-f3.
🤖 Prompt for AI Agents
In blssig/src/bdn/mod.rs around lines 1 to 75, the reviewer requests adding
sha256 and Scalar dependencies and a BDN DST constant: add the dependency sha2 =
"0.10" to Cargo.toml, then at the top of this file add imports use sha2::Sha256;
and use bls12_381::Scalar; (place them with the existing use statements), and
declare const BDN_COEFF_DST: &[u8] = b"F3-BDN-agg-v1"; so the module can compute
BDN coefficients once implemented (keep the DST until go-f3 defines the official
string).

21 changes: 9 additions & 12 deletions blssig/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
// Copyright 2019-2024 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

pub fn add(left: usize, right: usize) -> usize {
left + right
}
//! BLS signature implementation using BDN aggregation scheme.
//!
//! This module implements the BLS signature scheme used in the Filecoin F3 protocol.
//! It uses the BLS12_381 curve with G1 for public keys and G2 for signatures.
//! The BDN (Boneh-Drijvers-Neven) scheme is used for signature and public key aggregation
//! to prevent rogue public-key attacks.

#[cfg(test)]
mod tests {
use super::*;
mod bdn;
mod verifier;

#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
}
}
pub use verifier::{BLSError, BLSVerifier};
169 changes: 169 additions & 0 deletions blssig/src/verifier/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
// Copyright 2019-2024 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

use bls_signatures::{PublicKey, Serialize, Signature, verify_messages};
use filecoin_f3_gpbft::PubKey;
use filecoin_f3_gpbft::api::Verifier;
use hashlink::LruCache;
use parking_lot::RwLock;
use thiserror::Error;

use crate::bdn::BDNAggregation;

#[cfg(test)]
mod tests;

#[derive(Error, Debug, PartialEq)]
pub enum BLSError {
#[error("empty public keys provided")]
EmptyPublicKeys,
#[error("invalid public key length: expected {BLS_PUBLIC_KEY_LENGTH} bytes, got {0}")]
InvalidPublicKeyLength(usize),
#[error("failed to deserialize public key")]
PublicKeyDeserialization,
#[error("invalid signature length: expected {BLS_SIGNATURE_LENGTH} bytes, got {0}")]
InvalidSignatureLength(usize),
#[error("failed to deserialize signature")]
SignatureDeserialization,
#[error("BLS signature verification failed")]
SignatureVerificationFailed,
#[error("mismatched number of public keys and signatures: {pub_keys} != {sigs}")]
LengthMismatch { pub_keys: usize, sigs: usize },
}

/// BLS signature verifier using BDN aggregation scheme
///
/// This verifier implements the same scheme used by `go-f3/blssig`, with:
/// - BLS12_381 curve
/// - G1 for public keys, G2 for signatures
/// - BDN aggregation for rogue-key attack prevention
/// - Point cache for performance optimization
pub struct BLSVerifier {
/// Cache for deserialized public key points to avoid expensive repeated operations
point_cache: RwLock<LruCache<Vec<u8>, PublicKey>>,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd change the signatures of the Verifier trait methods to use strongly-typed public key and signature to avoid conversions and such LRU perf optimization

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I followed the existing trait Verifier, which follows type Verifier interface from go-f3, who's using untyped params for all functions.

However, I agree that the bdn module can/should receive the typed data, so I changed that here: 3608b91

I'm not convinced though that Verifier should do the same, since it will merely push the validity checks, the deserialization and the caching to a higher layer, given that the data source is serialized.

}

impl Default for BLSVerifier {
fn default() -> Self {
Self::new()
}
}

/// BLS12-381 public key length in bytes
const BLS_PUBLIC_KEY_LENGTH: usize = 48;

/// BLS12-381 signature length in bytes
const BLS_SIGNATURE_LENGTH: usize = 96;

/// Maximum number of cached public key points to prevent excessive memory usage
const MAX_POINT_CACHE_SIZE: usize = 10_000;

impl BLSVerifier {
pub fn new() -> Self {
Self {
point_cache: RwLock::new(LruCache::new(MAX_POINT_CACHE_SIZE)),
}
}

/// Verifies a single BLS signature
fn verify_single(&self, pub_key: &PubKey, msg: &[u8], sig: &[u8]) -> Result<(), BLSError> {
// Validate input lengths
if pub_key.0.len() != BLS_PUBLIC_KEY_LENGTH {
return Err(BLSError::InvalidPublicKeyLength(pub_key.0.len()));
}
if sig.len() != BLS_SIGNATURE_LENGTH {
return Err(BLSError::InvalidSignatureLength(sig.len()));
}

// Get cached public key
let pub_key = self.get_or_cache_public_key(&pub_key.0)?;

// Deserialize signature
let signature =
Signature::from_bytes(sig).map_err(|_| BLSError::SignatureDeserialization)?;

// Verify using bls-signatures
let msgs = [msg];
let pub_keys = [pub_key];
match verify_messages(&signature, &msgs, &pub_keys) {
true => Ok(()),
false => Err(BLSError::SignatureVerificationFailed),
}
}

/// Gets a cached public key or deserialize and caches it
fn get_or_cache_public_key(&self, pub_key_bytes: &[u8]) -> Result<PublicKey, BLSError> {
// Check cache first
let mut cache = self.point_cache.write();
if let Some(cached_key) = cache.get(pub_key_bytes) {
return Ok(*cached_key);
}
drop(cache);

// Deserialize the public key
let pub_key =
PublicKey::from_bytes(pub_key_bytes).map_err(|_| BLSError::PublicKeyDeserialization)?;

// Cache it
let mut cache = self.point_cache.write();
cache.insert(pub_key_bytes.to_vec(), pub_key);
drop(cache);

Ok(pub_key)
}
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Fix Copy/move bugs in cache path (PublicKey is not guaranteed Copy).

Returning *cached_key and pub_key after inserting assumes PublicKey: Copy. This will either fail to compile or cause unnecessary copying. Clone the key for the caller and keep an owned copy in the cache. Also, avoid lock re-acquisition and manual drop.

Apply this diff:

-    fn get_or_cache_public_key(&self, pub_key_bytes: &[u8]) -> Result<PublicKey, BLSError> {
-        // Check cache first
-        let mut cache = self.point_cache.write();
-        if let Some(cached_key) = cache.get(pub_key_bytes) {
-            return Ok(*cached_key);
-        }
-        drop(cache);
-
-        // Deserialize the public key
-        let pub_key =
-            PublicKey::from_bytes(pub_key_bytes).map_err(|_| BLSError::PublicKeyDeserialization)?;
-
-        // Cache it
-        let mut cache = self.point_cache.write();
-        cache.insert(pub_key_bytes.to_vec(), pub_key);
-        drop(cache);
-
-        Ok(pub_key)
-    }
+    fn get_or_cache_public_key(&self, pub_key_bytes: &[u8]) -> Result<PublicKey, BLSError> {
+        // Fast path: refresh LRU on hit and clone for the caller
+        if let Some(pk) = self.point_cache.write().get(pub_key_bytes).cloned() {
+            return Ok(pk);
+        }
+        // Miss: deserialize outside of locks
+        let pk =
+            PublicKey::from_bytes(pub_key_bytes).map_err(|_| BLSError::PublicKeyDeserialization)?;
+        // Insert and return a clone
+        self.point_cache.write().insert(pub_key_bytes.to_vec(), pk.clone());
+        Ok(pk)
+    }
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
/// Gets a cached public key or deserialize and caches it
fn get_or_cache_public_key(&self, pub_key_bytes: &[u8]) -> Result<PublicKey, BLSError> {
// Check cache first
let mut cache = self.point_cache.write();
if let Some(cached_key) = cache.get(pub_key_bytes) {
return Ok(*cached_key);
}
drop(cache);
// Deserialize the public key
let pub_key =
PublicKey::from_bytes(pub_key_bytes).map_err(|_| BLSError::PublicKeyDeserialization)?;
// Cache it
let mut cache = self.point_cache.write();
cache.insert(pub_key_bytes.to_vec(), pub_key);
drop(cache);
Ok(pub_key)
}
/// Gets a cached public key or deserialize and caches it
fn get_or_cache_public_key(&self, pub_key_bytes: &[u8]) -> Result<PublicKey, BLSError> {
// Fast path: refresh LRU on hit and clone for the caller
if let Some(pk) = self.point_cache.write().get(pub_key_bytes).cloned() {
return Ok(pk);
}
// Miss: deserialize outside of locks
let pk =
PublicKey::from_bytes(pub_key_bytes).map_err(|_| BLSError::PublicKeyDeserialization)?;
// Insert and return a clone
self.point_cache.write().insert(pub_key_bytes.to_vec(), pk.clone());
Ok(pk)
}
🤖 Prompt for AI Agents
In blssig/src/verifier/mod.rs around lines 94 to 113, the cache path incorrectly
dereferences and moves a PublicKey (assuming Copy) and reacquires the lock;
change to acquire the write lock once, check for an existing key and if present
return a cloned key, otherwise deserialize into an owned PublicKey, insert it
into the cache, and return a clone for the caller; avoid manual drop and
duplicate lock acquisition by using a single write lock or using the map entry
API so the cached value remains owned while callers receive a cloned copy.

}

impl Verifier for BLSVerifier {
type Error = BLSError;

fn verify(&self, pub_key: &PubKey, msg: &[u8], sig: &[u8]) -> Result<(), Self::Error> {
self.verify_single(pub_key, msg, sig)
}

fn aggregate(&self, pub_keys: &[PubKey], sigs: &[Vec<u8>]) -> Result<Vec<u8>, Self::Error> {
if pub_keys.is_empty() || sigs.is_empty() {
return Err(BLSError::EmptyPublicKeys);
}
if pub_keys.len() != sigs.len() {
return Err(BLSError::LengthMismatch {
pub_keys: pub_keys.len(),
sigs: sigs.len(),
});
}

// Validate all input lengths
for (i, pub_key) in pub_keys.iter().enumerate() {
if pub_key.0.len() != BLS_PUBLIC_KEY_LENGTH {
return Err(BLSError::InvalidPublicKeyLength(pub_key.0.len()));
}
if sigs[i].len() != BLS_SIGNATURE_LENGTH {
return Err(BLSError::InvalidSignatureLength(sigs[i].len()));
}
}

let bdn = BDNAggregation::new(pub_keys)?;
bdn.aggregate_sigs(sigs)
}

fn verify_aggregate(
&self,
payload: &[u8],
agg_sig: &[u8],
signers: &[PubKey],
) -> Result<(), Self::Error> {
if signers.is_empty() {
return Err(BLSError::EmptyPublicKeys);
}

for pub_key in signers {
if pub_key.0.len() != BLS_PUBLIC_KEY_LENGTH {
return Err(BLSError::InvalidPublicKeyLength(pub_key.0.len()));
}
}

let bdn = BDNAggregation::new(signers)?;
let agg_pub_key = bdn.aggregate_pub_keys()?;
let agg_pub_key_bytes = PubKey(agg_pub_key.as_bytes().to_vec());
self.verify_single(&agg_pub_key_bytes, payload, agg_sig)
}
}
Loading