Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 13 additions & 52 deletions dash-spv/tests/edge_case_filter_sync_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,32 +10,14 @@ use dash_spv::{
};
use dashcore::{
block::Header as BlockHeader, hash_types::FilterHeader, network::message::NetworkMessage,
BlockHash, Network,
Network,
};
use dashcore_hashes::Hash;
use std::collections::HashSet;
use std::sync::Arc;
use tempfile::TempDir;
use tokio::sync::mpsc::UnboundedReceiver;
use tokio::sync::Mutex;

/// Create a mock block header
fn create_mock_header(height: u32, prev_hash: BlockHash) -> BlockHeader {
BlockHeader {
version: dashcore::block::Version::ONE,
prev_blockhash: prev_hash,
merkle_root: dashcore::hash_types::TxMerkleNode::all_zeros(),
time: 1234567890 + height,
bits: dashcore::pow::CompactTarget::from_consensus(0x1d00ffff),
nonce: height,
}
}

/// Create a mock filter header
fn create_mock_filter_header(height: u32) -> FilterHeader {
FilterHeader::from_slice(&[height as u8; 32]).unwrap()
}

/// Mock network manager that captures sent messages
struct MockNetworkManager {
sent_messages: Arc<Mutex<Vec<NetworkMessage>>>,
Expand Down Expand Up @@ -114,26 +96,18 @@ async fn test_filter_sync_at_tip_edge_case() {
let mut network = MockNetworkManager::new();

// Set up storage with headers and filter headers at the same height (tip)
let height = 100;
let mut headers = Vec::new();
let mut filter_headers = Vec::new();
let mut prev_hash = BlockHash::all_zeros();

for i in 1..=height {
let header = create_mock_header(i, prev_hash);
prev_hash = header.block_hash();
headers.push(header);
filter_headers.push(create_mock_filter_header(i));
}
const TIP_HEIGHT: u32 = 100;
let headers = BlockHeader::dummy_batch(0..TIP_HEIGHT + 1);
let filter_headers = FilterHeader::dummy_batch(0..TIP_HEIGHT + 1);

storage.store_headers(&headers).await.unwrap();
storage.store_filter_headers(&filter_headers).await.unwrap();

// Verify initial state
let tip_height = storage.get_tip_height().await.unwrap();
let filter_tip_height = storage.get_filter_tip_height().await.unwrap().unwrap();
assert_eq!(tip_height, height - 1); // 0-indexed
assert_eq!(filter_tip_height, height - 1); // 0-indexed
assert_eq!(tip_height, TIP_HEIGHT); // 0-indexed
assert_eq!(filter_tip_height, TIP_HEIGHT); // 0-indexed

// Try to start filter sync when already at tip
let result = filter_sync.start_sync_headers(&mut network, &mut storage).await;
Expand All @@ -157,23 +131,11 @@ async fn test_no_invalid_getcfheaders_at_tip() {
.expect("Failed to create tmp storage");
let mut network = MockNetworkManager::new();

// Create a scenario where we're one block behind
let height = 100;
let mut headers = Vec::new();
let mut filter_headers = Vec::new();
let mut prev_hash = BlockHash::all_zeros();

// Store headers up to height
for i in 1..=height {
let header = create_mock_header(i, prev_hash);
prev_hash = header.block_hash();
headers.push(header);
}

// Store filter headers up to height - 1
for i in 1..=(height - 1) {
filter_headers.push(create_mock_filter_header(i));
}
// Create a scenario where we're one filter header behind
// FilterHeader at TIP_HEIGHT is the one missing
const TIP_HEIGHT: u32 = 99;
let headers = BlockHeader::dummy_batch(0..TIP_HEIGHT + 1);
let filter_headers = FilterHeader::dummy_batch(0..TIP_HEIGHT);

storage.store_headers(&headers).await.unwrap();
storage.store_filter_headers(&filter_headers).await.unwrap();
Expand All @@ -191,10 +153,9 @@ async fn test_no_invalid_getcfheaders_at_tip() {
NetworkMessage::GetCFHeaders(get_cf_headers) => {
// The critical check: start_height must be <= height of stop_hash
assert_eq!(
get_cf_headers.start_height,
height - 1,
get_cf_headers.start_height, TIP_HEIGHT,
"Start height should be {}",
height - 1
TIP_HEIGHT
);
// We can't easily verify the stop_hash height here, but the request should be valid
println!(
Expand Down
35 changes: 5 additions & 30 deletions dash-spv/tests/filter_header_verification_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use dash_spv::{
sync::legacy::filters::FilterSyncManager,
};
use dashcore::{
block::{Header as BlockHeader, Version},
block::Header as BlockHeader,
hash_types::{FilterHash, FilterHeader},
network::message::NetworkMessage,
network::message_filter::CFHeaders,
Expand Down Expand Up @@ -97,31 +97,6 @@ impl NetworkManager for MockNetworkManager {
}
}

/// Create test headers for a given range
fn create_test_headers_range(start_height: u32, count: u32) -> Vec<BlockHeader> {
let mut headers = Vec::new();

for i in 0..count {
let height = start_height + i;
let header = BlockHeader {
version: Version::from_consensus(1),
prev_blockhash: if height == 0 {
BlockHash::all_zeros()
} else {
// Create a deterministic previous hash
BlockHash::from_byte_array([((height - 1) % 256) as u8; 32])
},
merkle_root: dashcore::TxMerkleNode::from_byte_array([(height % 256) as u8; 32]),
time: 1234567890 + height,
bits: dashcore::CompactTarget::from_consensus(0x1d00ffff),
nonce: height,
};
headers.push(header);
}

headers
}

/// Create test filter headers with proper chain linkage
fn create_test_cfheaders_message(
start_height: u32,
Expand Down Expand Up @@ -181,7 +156,7 @@ async fn test_filter_header_verification_failure_reproduction() {

// Step 1: Store initial headers to simulate having a synced header chain
println!("Step 1: Setting up initial header chain...");
let initial_headers = create_test_headers_range(1000, 5000); // Headers 1000-4999
let initial_headers = BlockHeader::dummy_batch(1000..5000); // Headers 1000-4999
storage.store_headers(&initial_headers).await.expect("Failed to store initial headers");

let tip_height = storage.get_tip_height().await.unwrap();
Expand Down Expand Up @@ -345,7 +320,7 @@ async fn test_overlapping_batches_from_different_peers() {

// Step 1: Set up headers for the full range we'll need
println!("Step 1: Setting up header chain (heights 1-3000)...");
let initial_headers = create_test_headers_range(1, 3000); // Headers 1-2999
let initial_headers = BlockHeader::dummy_batch(1..3000); // Headers 1-2999
storage.store_headers(&initial_headers).await.expect("Failed to store initial headers");

let tip_height = storage.get_tip_height().await.unwrap();
Expand Down Expand Up @@ -520,7 +495,7 @@ async fn test_filter_header_verification_overlapping_batches() {
FilterSyncManager::new(&config, received_heights);

// Set up initial headers - start from 1 for proper sync
let initial_headers = create_test_headers_range(1, 2000);
let initial_headers = BlockHeader::dummy_batch(1..2000);
storage.store_headers(&initial_headers).await.expect("Failed to store initial headers");

// Start filter sync first (required for message processing)
Expand Down Expand Up @@ -618,7 +593,7 @@ async fn test_filter_header_verification_race_condition_simulation() {
FilterSyncManager::new(&config, received_heights);

// Set up headers - need enough for batch B (up to height 3000)
let initial_headers = create_test_headers_range(1, 3001);
let initial_headers = BlockHeader::dummy_batch(1..3001);
storage.store_headers(&initial_headers).await.expect("Failed to store initial headers");

// Simulate: Start sync, send request for batch A
Expand Down