Skip to content

Commit 5cb680a

Browse files
quantumclaude
authored andcommitted
fix: multiple code improvements across rust-dashcore
- Fix unsafe CString usage in test_client.rs by using CStr::from_ptr - Add error handling for missing masternode fields in fetch_terminal_blocks.py - Remove unnecessary f-string prefixes in fetch_terminal_blocks.py - Clarify error message in chain_tip.rs for single active tip - Add MAX_PENDING_CHAINLOCKS limit to prevent unbounded queue growth - Remove unnecessary header clone in orphan_pool.rs - Fix incorrectly prefixed storage parameter in filters.rs 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
1 parent dbe2971 commit 5cb680a

File tree

6 files changed

+46
-20
lines changed

6 files changed

+46
-20
lines changed

dash-spv-ffi/tests/test_client.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ mod tests {
233233
println!("Warning: Failed to start client, error code: {}", start_result);
234234
let error = dash_spv_ffi_get_last_error();
235235
if !error.is_null() {
236-
let error_str = CString::from_raw(error as *mut _);
236+
let error_str = std::ffi::CStr::from_ptr(error);
237237
println!("Error message: {:?}", error_str);
238238
}
239239
}

dash-spv/scripts/fetch_terminal_blocks.py

Lines changed: 25 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -77,14 +77,26 @@ def fetch_terminal_block_data(network, height, genesis_hash):
7777
# Extract relevant data
7878
masternode_list = []
7979
for mn in diff_result.get("mnList", []):
80-
masternode_list.append({
81-
"pro_tx_hash": mn["proRegTxHash"],
82-
"service": mn["service"],
83-
"pub_key_operator": mn["pubKeyOperator"],
84-
"voting_address": mn["votingAddress"],
85-
"is_valid": mn["isValid"],
86-
"n_type": mn.get("nType", 0),
87-
})
80+
try:
81+
# Check for required fields and skip entry if any are missing
82+
required_fields = ["proRegTxHash", "service", "pubKeyOperator", "votingAddress", "isValid"]
83+
missing_fields = [field for field in required_fields if field not in mn]
84+
85+
if missing_fields:
86+
print(f"Warning: Masternode entry missing required fields: {missing_fields}. Skipping entry.")
87+
continue
88+
89+
masternode_list.append({
90+
"pro_tx_hash": mn["proRegTxHash"],
91+
"service": mn["service"],
92+
"pub_key_operator": mn["pubKeyOperator"],
93+
"voting_address": mn["votingAddress"],
94+
"is_valid": mn["isValid"],
95+
"n_type": mn.get("nType", 0), # Default to 0 if not present
96+
})
97+
except Exception as e:
98+
print(f"Error processing masternode entry: {e}. Skipping entry.")
99+
continue
88100

89101
return {
90102
"height": height,
@@ -150,12 +162,12 @@ def main():
150162
json_file = output_dir / f"terminal_block_{height}.json"
151163
if json_file.exists():
152164
f.write(f' // Terminal block {height}\n')
153-
f.write(f' {{\n')
165+
f.write(' {\n')
154166
f.write(f' let data = include_str!("terminal_block_{height}.json");\n')
155-
f.write(f' if let Ok(state) = serde_json::from_str::<TerminalBlockMasternodeState>(data) {{\n')
156-
f.write(f' manager.add_state(state);\n')
157-
f.write(f' }}\n')
158-
f.write(f' }}\n\n')
167+
f.write(' if let Ok(state) = serde_json::from_str::<TerminalBlockMasternodeState>(data) {\n')
168+
f.write(' manager.add_state(state);\n')
169+
f.write(' }\n')
170+
f.write(' }\n\n')
159171

160172
f.write("}\n")
161173

dash-spv/src/chain/chain_tip.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ impl ChainTipManager {
181181
self.tips.remove(&hash);
182182
Ok(())
183183
} else {
184-
Err("Cannot evict: all tips are active")
184+
Err("Cannot evict: the only tip present is active")
185185
}
186186
}
187187

dash-spv/src/chain/chainlock_manager.rs

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@ use crate::error::{StorageError, StorageResult, ValidationError, ValidationResul
1313
use crate::storage::StorageManager;
1414
use crate::types::ChainState;
1515

16+
/// Maximum number of pending ChainLocks to queue
17+
const MAX_PENDING_CHAINLOCKS: usize = 100;
18+
1619
/// ChainLock storage entry
1720
#[derive(Debug, Clone)]
1821
pub struct ChainLockEntry {
@@ -70,6 +73,16 @@ impl ChainLockManager {
7073
pub fn queue_pending_chainlock(&self, chain_lock: ChainLock) -> StorageResult<()> {
7174
let mut pending = self.pending_chainlocks.write()
7275
.map_err(|_| StorageError::LockPoisoned("pending_chainlocks".to_string()))?;
76+
77+
// If at capacity, drop the oldest ChainLock
78+
if pending.len() >= MAX_PENDING_CHAINLOCKS {
79+
let dropped = pending.remove(0);
80+
warn!(
81+
"Pending ChainLocks queue at capacity ({}), dropping oldest ChainLock at height {}",
82+
MAX_PENDING_CHAINLOCKS, dropped.block_height
83+
);
84+
}
85+
7386
pending.push(chain_lock);
7487
debug!("Queued ChainLock for pending validation, total pending: {}", pending.len());
7588
Ok(())

dash-spv/src/chain/orphan_pool.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,19 +71,20 @@ impl OrphanPool {
7171

7272
// Create orphan entry
7373
let orphan = OrphanBlock {
74-
header: header.clone(),
74+
header,
7575
received_at: Instant::now(),
7676
process_attempts: 0,
7777
};
7878

7979
// Index by previous block
80-
self.orphans_by_prev.entry(header.prev_blockhash).or_default().push(orphan.clone());
80+
let prev_blockhash = orphan.header.prev_blockhash;
81+
self.orphans_by_prev.entry(prev_blockhash).or_default().push(orphan.clone());
8182

8283
// Index by hash
8384
self.orphans_by_hash.insert(block_hash, orphan);
8485
self.eviction_queue.push_back(block_hash);
8586

86-
debug!("Added orphan block {} (prev: {})", block_hash, header.prev_blockhash);
87+
debug!("Added orphan block {} (prev: {})", block_hash, prev_blockhash);
8788

8889
true
8990
}

dash-spv/src/sync/filters.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1459,7 +1459,7 @@ impl FilterSyncManager {
14591459
&mut self,
14601460
range: (u32, u32),
14611461
_network: &mut dyn NetworkManager,
1462-
_storage: &dyn StorageManager,
1462+
storage: &dyn StorageManager,
14631463
) -> SyncResult<()> {
14641464
let (start, end) = range;
14651465
let retry_count = self.filter_retry_counts.get(&range).copied().unwrap_or(0);
@@ -1478,7 +1478,7 @@ impl FilterSyncManager {
14781478
}
14791479

14801480
// Calculate stop hash for retry
1481-
match _storage.get_header(end).await {
1481+
match storage.get_header(end).await {
14821482
Ok(Some(header)) => {
14831483
let stop_hash = header.block_hash();
14841484

0 commit comments

Comments
 (0)