@@ -43,9 +43,6 @@ use vec::Vec;
43
43
/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
44
44
const MAX_REFCOUNT : usize = ( isize:: MAX ) as usize ;
45
45
46
- /// A sentinel value that is used for the pointer of `Weak::new()`.
47
- const WEAK_EMPTY : usize = 1 ;
48
-
49
46
/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
50
47
/// Reference Counted'.
51
48
///
@@ -239,9 +236,9 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
239
236
#[ stable( feature = "arc_weak" , since = "1.4.0" ) ]
240
237
pub struct Weak < T : ?Sized > {
241
238
// This is a `NonNull` to allow optimizing the size of this type in enums,
242
- // but it is actually not truly "non-null". A `Weak::new()` will set this
243
- // to a sentinel value, instead of needing to allocate some space in the
244
- // heap.
239
+ // but it is not necessarily a valid pointer.
240
+ // `Weak::new` sets this to a dangling pointer so that it doesn’t need
241
+ // to allocate space on the heap.
245
242
ptr : NonNull < ArcInner < T > > ,
246
243
}
247
244
@@ -1034,14 +1031,18 @@ impl<T> Weak<T> {
1034
1031
/// ```
1035
1032
#[ stable( feature = "downgraded_weak" , since = "1.10.0" ) ]
1036
1033
pub fn new ( ) -> Weak < T > {
1037
- unsafe {
1038
- Weak {
1039
- ptr : NonNull :: new_unchecked ( WEAK_EMPTY as * mut _ ) ,
1040
- }
1034
+ Weak {
1035
+ ptr : NonNull :: dangling ( ) ,
1041
1036
}
1042
1037
}
1043
1038
}
1044
1039
1040
+ fn is_dangling < T : ?Sized > ( ptr : NonNull < T > ) -> bool {
1041
+ let address = ptr. as_ptr ( ) as * mut ( ) as usize ;
1042
+ let align = align_of_val ( unsafe { ptr. as_ref ( ) } ) ;
1043
+ address == align
1044
+ }
1045
+
1045
1046
impl < T : ?Sized > Weak < T > {
1046
1047
/// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending
1047
1048
/// the lifetime of the value if successful.
@@ -1073,11 +1074,7 @@ impl<T: ?Sized> Weak<T> {
1073
1074
pub fn upgrade ( & self ) -> Option < Arc < T > > {
1074
1075
// We use a CAS loop to increment the strong count instead of a
1075
1076
// fetch_add because once the count hits 0 it must never be above 0.
1076
- let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1077
- return None ;
1078
- } else {
1079
- unsafe { self . ptr . as_ref ( ) }
1080
- } ;
1077
+ let inner = self . inner ( ) ?;
1081
1078
1082
1079
// Relaxed load because any write of 0 that we can observe
1083
1080
// leaves the field in a permanently zero state (so a
@@ -1108,6 +1105,17 @@ impl<T: ?Sized> Weak<T> {
1108
1105
}
1109
1106
}
1110
1107
}
1108
+
1109
+ /// Return `None` when the pointer is dangling and there is no allocated `ArcInner`,
1110
+ /// i.e. this `Weak` was created by `Weak::new`
1111
+ #[ inline]
1112
+ fn inner ( & self ) -> Option < & ArcInner < T > > {
1113
+ if is_dangling ( self . ptr ) {
1114
+ None
1115
+ } else {
1116
+ Some ( unsafe { self . ptr . as_ref ( ) } )
1117
+ }
1118
+ }
1111
1119
}
1112
1120
1113
1121
#[ stable( feature = "arc_weak" , since = "1.4.0" ) ]
@@ -1125,10 +1133,10 @@ impl<T: ?Sized> Clone for Weak<T> {
1125
1133
/// ```
1126
1134
#[ inline]
1127
1135
fn clone ( & self ) -> Weak < T > {
1128
- let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1129
- return Weak { ptr : self . ptr } ;
1136
+ let inner = if let Some ( inner ) = self . inner ( ) {
1137
+ inner
1130
1138
} else {
1131
- unsafe { self . ptr . as_ref ( ) }
1139
+ return Weak { ptr : self . ptr } ;
1132
1140
} ;
1133
1141
// See comments in Arc::clone() for why this is relaxed. This can use a
1134
1142
// fetch_add (ignoring the lock) because the weak count is only locked
@@ -1203,10 +1211,10 @@ impl<T: ?Sized> Drop for Weak<T> {
1203
1211
// weak count can only be locked if there was precisely one weak ref,
1204
1212
// meaning that drop could only subsequently run ON that remaining weak
1205
1213
// ref, which can only happen after the lock is released.
1206
- let inner = if self . ptr . as_ptr ( ) as * const u8 as usize == WEAK_EMPTY {
1207
- return ;
1214
+ let inner = if let Some ( inner ) = self . inner ( ) {
1215
+ inner
1208
1216
} else {
1209
- unsafe { self . ptr . as_ref ( ) }
1217
+ return
1210
1218
} ;
1211
1219
1212
1220
if inner. weak . fetch_sub ( 1 , Release ) == 1 {
0 commit comments