Skip to content

Commit 3ee66af

Browse files
committed
Remove unused mutable refs
1 parent 2fb2bf1 commit 3ee66af

File tree

2 files changed

+11
-13
lines changed

2 files changed

+11
-13
lines changed

crates/subspace-service/src/sync_from_dsn.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -307,10 +307,10 @@ where
307307
// right away.
308308
// TODO: implement starting from any segment index, and handle the partial block at the end of
309309
// the last segment.
310-
let mut last_processed_segment_index = SegmentIndex::ZERO;
310+
let last_processed_segment_index = SegmentIndex::ZERO;
311311
// TODO: We'll be able to just take finalized block once we are able to decouple pruning from
312312
// finality: https://github.com/paritytech/polkadot-sdk/issues/1570
313-
let mut last_processed_block_number = info.best_number;
313+
let last_processed_block_number = info.best_number;
314314
let segment_header_downloader = SegmentHeaderDownloader::new(node);
315315

316316
if let Some(reason) = notifications.next().await {
@@ -324,8 +324,8 @@ where
324324
client,
325325
piece_getter,
326326
import_queue_service,
327-
&mut last_processed_segment_index,
328-
&mut last_processed_block_number,
327+
last_processed_segment_index,
328+
last_processed_block_number,
329329
erasure_coding,
330330
);
331331
let wait_almost_synced_fut = async {

crates/subspace-service/src/sync_from_dsn/import_blocks.rs

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,8 @@ pub(super) async fn import_blocks_from_dsn<Block, AS, Client, PG, IQS>(
3535
client: &Client,
3636
piece_getter: &PG,
3737
import_queue_service: &mut IQS,
38-
last_processed_segment_index: &mut SegmentIndex,
39-
last_processed_block_number: &mut <Block::Header as Header>::Number,
38+
last_processed_segment_index: SegmentIndex,
39+
mut last_processed_block_number: <Block::Header as Header>::Number,
4040
erasure_coding: &ErasureCoding,
4141
) -> Result<u64, Error>
4242
where
@@ -70,7 +70,7 @@ where
7070
let mut imported_blocks = 0;
7171
let mut reconstructor = Arc::new(Mutex::new(Reconstructor::new(erasure_coding.clone())));
7272
// Start from the first unprocessed segment and process all segments known so far
73-
let segment_indices_iter = (*last_processed_segment_index + SegmentIndex::ONE)
73+
let segment_indices_iter = (last_processed_segment_index + SegmentIndex::ONE)
7474
..=segment_headers_store
7575
.max_segment_index()
7676
.expect("Exists, we have inserted segment headers above; qed");
@@ -104,7 +104,7 @@ where
104104
// We have already processed the last block in this segment, or one higher than it,
105105
// so it can't change. Resetting the reconstructor loses any partial blocks, so we
106106
// only reset if the (possibly partial) last block has been processed.
107-
if *last_processed_block_number >= last_archived_maybe_partial_block_number {
107+
if last_processed_block_number >= last_archived_maybe_partial_block_number {
108108
debug!(
109109
target: LOG_TARGET,
110110
%segment_index,
@@ -113,14 +113,13 @@ where
113113
%last_archived_block_partial,
114114
"Already processed last (possibly partial) block in segment, resetting reconstructor",
115115
);
116-
*last_processed_segment_index = segment_index;
117116
// Reset reconstructor instance
118117
reconstructor = Arc::new(Mutex::new(Reconstructor::new(erasure_coding.clone())));
119118
continue;
120119
}
121120
// Just one partial unprocessed block and this was the last segment available, so nothing to
122121
// import
123-
if last_archived_maybe_partial_block_number == *last_processed_block_number + One::one()
122+
if last_archived_maybe_partial_block_number == last_processed_block_number + One::one()
124123
&& last_archived_block_partial
125124
&& segment_indices_iter.peek().is_none()
126125
{
@@ -203,6 +202,7 @@ where
203202
}
204203
trace!(
205204
target: LOG_TARGET,
205+
%segment_index,
206206
%block_number,
207207
%best_block_number,
208208
%just_queued_blocks_count,
@@ -216,7 +216,7 @@ where
216216
let signed_block =
217217
decode_block::<Block>(&block_bytes).map_err(|error| error.to_string())?;
218218

219-
*last_processed_block_number = block_number;
219+
last_processed_block_number = block_number;
220220

221221
// No need to import blocks that are already present, if block is not present it might
222222
// correspond to a short fork, so we need to import it even if we already have another
@@ -256,8 +256,6 @@ where
256256
// Import queue handles verification and importing it into the client
257257
import_queue_service.import_blocks(BlockOrigin::NetworkInitialSync, blocks_to_import);
258258
}
259-
260-
*last_processed_segment_index = segment_index;
261259
}
262260

263261
Ok(imported_blocks)

0 commit comments

Comments
 (0)