Skip to content

Commit 6972b6b

Browse files
committed
Fix errors
1 parent 41a614a commit 6972b6b

File tree

12 files changed

+128
-272
lines changed

12 files changed

+128
-272
lines changed

compiler/rustc_data_structures/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
#![deny(unsafe_op_in_unsafe_fn)]
1414
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
1515
#![doc(rust_logo)]
16+
#![feature(alloc_layout_extra)]
1617
#![feature(allocator_api)]
1718
#![feature(array_windows)]
1819
#![feature(ascii_char)]

compiler/rustc_data_structures/src/sync.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -45,16 +45,17 @@ pub use self::mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
4545
pub use self::parallel::{
4646
join, par_for_each_in, par_map, parallel_guard, scope, try_par_for_each_in,
4747
};
48+
pub use self::table::*;
4849
pub use self::vec::{AppendOnlyIndexVec, AppendOnlyVec};
4950
pub use self::worker_local::{Registry, WorkerLocal};
5051
pub use crate::marker::*;
5152

5253
mod freeze;
5354
mod lock;
5455
mod parallel;
56+
mod table;
5557
mod vec;
5658
mod worker_local;
57-
mod table;
5859

5960
/// Keep the conditional imports together in a submodule, so that import-sorting
6061
/// doesn't split them up.

compiler/rustc_data_structures/src/sync/table/collect.rs

+25-27
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,19 @@
11
//! An API for quiescent state based reclamation.
22
3-
use crate::{scopeguard::guard, util::cold_path};
4-
use parking_lot::Mutex;
53
use std::arch::asm;
6-
use std::{
7-
cell::Cell,
8-
collections::HashMap,
9-
intrinsics::unlikely,
10-
marker::PhantomData,
11-
mem,
12-
sync::LazyLock,
13-
sync::atomic::{AtomicUsize, Ordering},
14-
thread::{self, ThreadId},
15-
};
4+
use std::cell::Cell;
5+
use std::collections::HashMap;
6+
use std::intrinsics::unlikely;
7+
use std::marker::PhantomData;
8+
use std::mem;
9+
use std::sync::LazyLock;
10+
use std::sync::atomic::{AtomicUsize, Ordering};
11+
use std::thread::{self, ThreadId};
12+
13+
use parking_lot::Mutex;
14+
15+
use super::scopeguard::guard;
16+
use super::util::cold_path;
1617

1718
mod code;
1819

@@ -53,12 +54,15 @@ where
5354
}
5455
}
5556

56-
#[thread_local]
57-
static DATA: Data = Data {
58-
pinned: Cell::new(false),
59-
registered: Cell::new(false),
60-
seen_events: Cell::new(0),
61-
};
57+
thread_local! {
58+
static DATA: Data = const {
59+
Data {
60+
pinned: Cell::new(false),
61+
registered: Cell::new(false),
62+
seen_events: Cell::new(0),
63+
}
64+
};
65+
}
6266

6367
struct Data {
6468
pinned: Cell<bool>,
@@ -110,16 +114,12 @@ cfg_if! {
110114
}
111115
}
112116

113-
// Never inline due to thread_local bugs
114-
#[inline(never)]
115117
fn data() -> *const Data {
116-
&DATA as *const Data
118+
DATA.with(|data| data as *const Data)
117119
}
118120

119-
// Never inline due to thread_local bugs
120-
#[inline(never)]
121121
fn data_init() -> *const Data {
122-
let data = hide(&DATA as *const Data);
122+
let data = hide(DATA.with(|data| data as *const Data));
123123

124124
{
125125
let data = unsafe { &*data };
@@ -149,9 +149,7 @@ pub fn pin<R>(f: impl FnOnce(Pin<'_>) -> R) -> R {
149149
let old_pinned = data.pinned.get();
150150
data.pinned.set(true);
151151
guard(old_pinned, |pin| data.pinned.set(*pin));
152-
f(Pin {
153-
_private: PhantomData,
154-
})
152+
f(Pin { _private: PhantomData })
155153
}
156154

157155
/// Removes the current thread from the threads allowed to access lock-free data structures.

compiler/rustc_data_structures/src/sync/table/mod.rs

+2-11
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,7 @@
1-
//! This crate contains [SyncTable] and [SyncPushVec] which offers lock-free reads and uses
1+
//! This module contains [SyncTable] and [SyncPushVec] which offers lock-free reads and uses
22
//! quiescent state based reclamation for which an API is available in the [collect] module.
33
4-
#![feature(
5-
alloc_layout_extra,
6-
allocator_api,
7-
core_intrinsics,
8-
dropck_eyepatch,
9-
extend_one,
10-
negative_impls,
11-
thread_local
12-
)]
13-
#![allow(clippy::len_without_is_empty, clippy::type_complexity)]
4+
#![allow(unexpected_cfgs, clippy::len_without_is_empty, clippy::type_complexity)]
145

156
#[macro_use]
167
mod macros;

compiler/rustc_data_structures/src/sync/table/raw/bitmask.rs

+3-6
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
use super::imp::{BITMASK_MASK, BITMASK_STRIDE, BitMaskWord};
21
use core::intrinsics;
32

3+
use super::imp::{BITMASK_MASK, BITMASK_STRIDE, BitMaskWord};
4+
45
/// A bit mask which contains the result of a `Match` operation on a `Group` and
56
/// allows iterating through them.
67
///
@@ -39,11 +40,7 @@ impl BitMask {
3940
/// Returns the first set bit in the `BitMask`, if there is one.
4041
#[inline]
4142
pub fn lowest_set_bit(self) -> Option<usize> {
42-
if self.0 == 0 {
43-
None
44-
} else {
45-
Some(unsafe { self.lowest_set_bit_nonzero() })
46-
}
43+
if self.0 == 0 { None } else { Some(unsafe { self.lowest_set_bit_nonzero() }) }
4744
}
4845

4946
/// Returns the first set bit in the `BitMask`, if there is one. The

compiler/rustc_data_structures/src/sync/table/raw/generic.rs

+7-16
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,15 @@
1-
use super::EMPTY;
2-
use super::bitmask::BitMask;
31
use core::intrinsics::atomic_load_acq;
42
use core::mem;
53

4+
use super::EMPTY;
5+
use super::bitmask::BitMask;
6+
67
// Use the native word size as the group size. Using a 64-bit group size on
78
// a 32-bit architecture will just end up being more expensive because
89
// shifts and multiplies will need to be emulated.
9-
#[cfg(any(
10-
target_pointer_width = "64",
11-
target_arch = "aarch64",
12-
target_arch = "x86_64",
13-
))]
10+
#[cfg(any(target_pointer_width = "64", target_arch = "aarch64", target_arch = "x86_64",))]
1411
type GroupWord = u64;
15-
#[cfg(all(
16-
target_pointer_width = "32",
17-
not(target_arch = "aarch64"),
18-
not(target_arch = "x86_64"),
19-
))]
12+
#[cfg(all(target_pointer_width = "32", not(target_arch = "aarch64"), not(target_arch = "x86_64"),))]
2013
type GroupWord = u32;
2114

2215
pub type BitMaskWord = GroupWord;
@@ -57,10 +50,8 @@ impl Group {
5750
_align: [Group; 0],
5851
bytes: [u8; Group::WIDTH],
5952
}
60-
const ALIGNED_BYTES: AlignedBytes = AlignedBytes {
61-
_align: [],
62-
bytes: [EMPTY; Group::WIDTH],
63-
};
53+
const ALIGNED_BYTES: AlignedBytes =
54+
AlignedBytes { _align: [], bytes: [EMPTY; Group::WIDTH] };
6455
unsafe { mem::transmute(ALIGNED_BYTES) }
6556
};
6657

compiler/rustc_data_structures/src/sync/table/raw/sse2.rs

+6-8
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
1-
use super::EMPTY;
2-
use super::bitmask::BitMask;
31
use core::arch::asm;
4-
use core::mem;
5-
62
#[cfg(target_arch = "x86")]
73
use core::arch::x86;
84
#[cfg(target_arch = "x86_64")]
95
use core::arch::x86_64 as x86;
6+
use core::mem;
7+
8+
use super::EMPTY;
9+
use super::bitmask::BitMask;
1010

1111
pub type BitMaskWord = u16;
1212
pub const BITMASK_STRIDE: usize = 1;
@@ -36,10 +36,8 @@ impl Group {
3636
_align: [Group; 0],
3737
bytes: [u8; Group::WIDTH],
3838
}
39-
const ALIGNED_BYTES: AlignedBytes = AlignedBytes {
40-
_align: [],
41-
bytes: [EMPTY; Group::WIDTH],
42-
};
39+
const ALIGNED_BYTES: AlignedBytes =
40+
AlignedBytes { _align: [], bytes: [EMPTY; Group::WIDTH] };
4341
unsafe { mem::transmute(ALIGNED_BYTES) }
4442
};
4543

compiler/rustc_data_structures/src/sync/table/sync_push_vec.rs

+28-64
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,21 @@
11
//! A contiguous push-only array type with lock-free reads.
22
3-
use crate::{
4-
collect::{self, Pin},
5-
scopeguard::guard,
6-
};
73
use core::ptr::NonNull;
4+
use std::alloc::{Allocator, Global, Layout, LayoutError, handle_alloc_error};
5+
use std::cell::UnsafeCell;
6+
use std::intrinsics::unlikely;
7+
use std::iter::FromIterator;
8+
use std::marker::PhantomData;
9+
use std::ops::{Deref, DerefMut};
10+
use std::ptr::slice_from_raw_parts;
11+
use std::sync::Arc;
12+
use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
13+
use std::{cmp, mem};
14+
815
use parking_lot::{Mutex, MutexGuard};
9-
use std::{
10-
alloc::{Allocator, Global, Layout, LayoutError, handle_alloc_error},
11-
cell::UnsafeCell,
12-
intrinsics::unlikely,
13-
iter::FromIterator,
14-
marker::PhantomData,
15-
mem,
16-
ops::{Deref, DerefMut},
17-
sync::atomic::{AtomicPtr, Ordering},
18-
};
19-
use std::{
20-
cmp,
21-
ptr::slice_from_raw_parts,
22-
sync::{Arc, atomic::AtomicUsize},
23-
};
16+
17+
use super::collect::{self, Pin};
18+
use super::scopeguard::guard;
2419

2520
mod code;
2621
mod tests;
@@ -94,10 +89,7 @@ impl<T> Copy for TableRef<T> {}
9489
impl<T> Clone for TableRef<T> {
9590
#[inline]
9691
fn clone(&self) -> Self {
97-
Self {
98-
data: self.data,
99-
marker: self.marker,
100-
}
92+
Self { data: self.data, marker: self.marker }
10193
}
10294
}
10395

@@ -115,12 +107,8 @@ impl<T> TableRef<T> {
115107
info: TableInfo,
116108
}
117109

118-
static EMPTY: EmptyTable = EmptyTable {
119-
info: TableInfo {
120-
capacity: 0,
121-
items: AtomicUsize::new(0),
122-
},
123-
};
110+
static EMPTY: EmptyTable =
111+
EmptyTable { info: TableInfo { capacity: 0, items: AtomicUsize::new(0) } };
124112

125113
Self {
126114
data: unsafe {
@@ -149,16 +137,10 @@ impl<T> TableRef<T> {
149137
let info =
150138
unsafe { NonNull::new_unchecked(ptr.as_ptr().add(info_offset) as *mut TableInfo) };
151139

152-
let mut result = Self {
153-
data: info,
154-
marker: PhantomData,
155-
};
140+
let mut result = Self { data: info, marker: PhantomData };
156141

157142
unsafe {
158-
*result.info_mut() = TableInfo {
159-
capacity,
160-
items: AtomicUsize::new(0),
161-
};
143+
*result.info_mut() = TableInfo { capacity, items: AtomicUsize::new(0) };
162144
}
163145

164146
result
@@ -342,13 +324,9 @@ impl<T> SyncPushVec<T> {
342324
pub fn with_capacity(capacity: usize) -> Self {
343325
Self {
344326
current: AtomicPtr::new(
345-
if capacity > 0 {
346-
TableRef::<T>::allocate(capacity)
347-
} else {
348-
TableRef::empty()
349-
}
350-
.data
351-
.as_ptr(),
327+
if capacity > 0 { TableRef::<T>::allocate(capacity) } else { TableRef::empty() }
328+
.data
329+
.as_ptr(),
352330
),
353331
old: UnsafeCell::new(Vec::new()),
354332
marker: PhantomData,
@@ -389,25 +367,16 @@ impl<T> SyncPushVec<T> {
389367
/// Creates a [LockedWrite] handle by taking the underlying mutex that protects writes.
390368
#[inline]
391369
pub fn lock(&self) -> LockedWrite<'_, T> {
392-
LockedWrite {
393-
table: Write { table: self },
394-
_guard: self.lock.lock(),
395-
}
370+
LockedWrite { table: Write { table: self }, _guard: self.lock.lock() }
396371
}
397372

398373
/// Creates a [LockedWrite] handle from a guard protecting the underlying mutex that protects writes.
399374
#[inline]
400375
pub fn lock_from_guard<'a>(&'a self, guard: MutexGuard<'a, ()>) -> LockedWrite<'a, T> {
401376
// Verify that we are target of the guard
402-
assert_eq!(
403-
&self.lock as *const _,
404-
MutexGuard::mutex(&guard) as *const _
405-
);
406-
407-
LockedWrite {
408-
table: Write { table: self },
409-
_guard: guard,
410-
}
377+
assert_eq!(&self.lock as *const _, MutexGuard::mutex(&guard) as *const _);
378+
379+
LockedWrite { table: Write { table: self }, _guard: guard }
411380
}
412381

413382
/// Extracts a mutable slice of the entire vector.
@@ -545,14 +514,9 @@ impl<T: Send> Write<'_, T> {
545514
fn replace_table(&mut self, new_table: TableRef<T>) {
546515
let table = self.table.current();
547516

548-
self.table
549-
.current
550-
.store(new_table.data.as_ptr(), Ordering::Release);
517+
self.table.current.store(new_table.data.as_ptr(), Ordering::Release);
551518

552-
let destroy = Arc::new(DestroyTable {
553-
table,
554-
lock: Mutex::new(false),
555-
});
519+
let destroy = Arc::new(DestroyTable { table, lock: Mutex::new(false) });
556520

557521
unsafe {
558522
(*self.table.old.get()).push(destroy.clone());

compiler/rustc_data_structures/src/sync/table/sync_push_vec/tests.rs

+1-9
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,7 @@ fn test_iter() {
88
let mut m = SyncPushVec::new();
99
m.write().push(1);
1010
m.write().push(2);
11-
assert_eq!(
12-
m.write()
13-
.read()
14-
.as_slice()
15-
.iter()
16-
.copied()
17-
.collect::<Vec<i32>>(),
18-
vec![1, 2]
19-
);
11+
assert_eq!(m.write().read().as_slice().iter().copied().collect::<Vec<i32>>(), vec![1, 2]);
2012
}
2113

2214
#[test]

0 commit comments

Comments
 (0)