@@ -109,12 +109,13 @@ impl core::fmt::Display for CollectionAllocErr {
109
109
/// Either a stack array with `length <= N` or a heap array
110
110
/// whose pointer and capacity are stored here.
111
111
///
112
- /// We store a `*const T` instead of a `*mut T` so that the type is covariant
112
+ /// We store a `NonNull<T>` instead of a `*mut T`, so that
113
+ /// niche-optimization can be performed and the type is covariant
113
114
/// with respect to `T`.
114
115
#[ repr( C ) ]
115
116
pub union RawSmallVec < T , const N : usize > {
116
117
inline : ManuallyDrop < MaybeUninit < [ T ; N ] > > ,
117
- heap : ( * const T , usize ) , // this pointer is never null
118
+ heap : ( NonNull < T > , usize ) ,
118
119
}
119
120
120
121
#[ inline]
@@ -143,7 +144,7 @@ impl<T, const N: usize> RawSmallVec<T, N> {
143
144
}
144
145
}
145
146
#[ inline]
146
- const fn new_heap ( ptr : * mut T , capacity : usize ) -> Self {
147
+ const fn new_heap ( ptr : NonNull < T > , capacity : usize ) -> Self {
147
148
Self {
148
149
heap : ( ptr, capacity) ,
149
150
}
@@ -168,15 +169,15 @@ impl<T, const N: usize> RawSmallVec<T, N> {
168
169
/// The vector must be on the heap
169
170
#[ inline]
170
171
const unsafe fn as_ptr_heap ( & self ) -> * const T {
171
- self . heap . 0
172
+ self . heap . 0 . as_ptr ( )
172
173
}
173
174
174
175
/// # Safety
175
176
///
176
177
/// The vector must be on the heap
177
178
#[ inline]
178
179
unsafe fn as_mut_ptr_heap ( & mut self ) -> * mut T {
179
- self . heap . 0 as * mut T
180
+ self . heap . 0 . as_ptr ( )
180
181
}
181
182
182
183
/// # Safety
@@ -216,7 +217,7 @@ impl<T, const N: usize> RawSmallVec<T, N> {
216
217
Err ( CollectionAllocErr :: AllocErr { layout : new_layout } )
217
218
} else {
218
219
copy_nonoverlapping ( ptr, new_ptr, len) ;
219
- * self = Self :: new_heap ( new_ptr, new_capacity) ;
220
+ * self = Self :: new_heap ( NonNull :: new_unchecked ( new_ptr) , new_capacity) ;
220
221
Ok ( ( ) )
221
222
}
222
223
} else {
@@ -236,7 +237,7 @@ impl<T, const N: usize> RawSmallVec<T, N> {
236
237
if new_ptr. is_null ( ) {
237
238
Err ( CollectionAllocErr :: AllocErr { layout : new_layout } )
238
239
} else {
239
- * self = Self :: new_heap ( new_ptr, new_capacity) ;
240
+ * self = Self :: new_heap ( NonNull :: new_unchecked ( new_ptr) , new_capacity) ;
240
241
Ok ( ( ) )
241
242
}
242
243
}
@@ -548,7 +549,9 @@ impl<T, const N: usize> SmallVec<T, N> {
548
549
let mut vec = ManuallyDrop :: new ( vec) ;
549
550
let len = vec. len ( ) ;
550
551
let cap = vec. capacity ( ) ;
551
- let ptr = vec. as_mut_ptr ( ) ;
552
+ // SAFETY: vec.capacity is not `0` (checked above), so the pointer
553
+ // can not dangle and thus specifically cannot be null.
554
+ let ptr = unsafe { NonNull :: new_unchecked ( vec. as_mut_ptr ( ) ) } ;
552
555
553
556
Self {
554
557
len : TaggedLen :: new ( len, true , Self :: is_zst ( ) ) ,
@@ -752,11 +755,10 @@ impl<T, const N: usize> SmallVec<T, N> {
752
755
debug_assert ! ( self . spilled( ) ) ;
753
756
let len = self . len ( ) ;
754
757
let ( ptr, cap) = self . raw . heap ;
755
- let ptr = ptr as * mut T ;
756
758
if len == cap {
757
759
self . reserve ( 1 ) ;
758
760
}
759
- ptr. add ( len) . write ( value) ;
761
+ ptr. as_ptr ( ) . add ( len) . write ( value) ;
760
762
self . set_len ( len + 1 )
761
763
}
762
764
@@ -826,9 +828,9 @@ impl<T, const N: usize> SmallVec<T, N> {
826
828
827
829
// SAFETY: len <= new_capacity <= Self::inline_size()
828
830
// so the copy is within bounds of the inline member
829
- copy_nonoverlapping ( ptr, self . raw . as_mut_ptr_inline ( ) , len) ;
831
+ copy_nonoverlapping ( ptr. as_ptr ( ) , self . raw . as_mut_ptr_inline ( ) , len) ;
830
832
drop ( DropDealloc {
831
- ptr : NonNull :: new_unchecked ( ptr as * mut u8 ) ,
833
+ ptr : NonNull :: new_unchecked ( ptr. as_ptr ( ) as * mut u8 ) ,
832
834
size_bytes : old_cap * size_of :: < T > ( ) ,
833
835
align : align_of :: < T > ( ) ,
834
836
} ) ;
@@ -904,10 +906,10 @@ impl<T, const N: usize> SmallVec<T, N> {
904
906
unsafe {
905
907
let ( ptr, capacity) = self . raw . heap ;
906
908
self . raw = RawSmallVec :: new_inline ( MaybeUninit :: uninit ( ) ) ;
907
- copy_nonoverlapping ( ptr, self . raw . as_mut_ptr_inline ( ) , len) ;
909
+ copy_nonoverlapping ( ptr. as_ptr ( ) , self . raw . as_mut_ptr_inline ( ) , len) ;
908
910
self . set_inline ( ) ;
909
911
alloc:: alloc:: dealloc (
910
- ptr as * mut T as * mut u8 ,
912
+ ptr. as_ptr ( ) as * mut u8 ,
911
913
Layout :: from_size_align_unchecked ( capacity * size_of :: < T > ( ) , align_of :: < T > ( ) ) ,
912
914
) ;
913
915
}
@@ -1083,10 +1085,16 @@ impl<T, const N: usize> SmallVec<T, N> {
1083
1085
vec
1084
1086
} else {
1085
1087
let this = ManuallyDrop :: new ( self ) ;
1086
- // SAFETY: ptr was created with the global allocator
1088
+ // SAFETY:
1089
+ // - `ptr` was created with the global allocator
1090
+ // - `ptr` was created with the appropriate alignment for `T`
1091
+ // - the allocation pointed to by ptr is exactly cap * sizeof(T)
1092
+ // - `len` is less than or equal to `cap`
1093
+ // - the first `len` entries are proper `T`-values
1094
+ // - the allocation is not larger than `isize::MAX`
1087
1095
unsafe {
1088
1096
let ( ptr, cap) = this. raw . heap ;
1089
- Vec :: from_raw_parts ( ptr as * mut T , len, cap)
1097
+ Vec :: from_raw_parts ( ptr. as_ptr ( ) , len, cap)
1090
1098
}
1091
1099
}
1092
1100
}
@@ -1260,6 +1268,14 @@ impl<T, const N: usize> SmallVec<T, N> {
1260
1268
#[ inline]
1261
1269
pub unsafe fn from_raw_parts ( ptr : * mut T , length : usize , capacity : usize ) -> SmallVec < T , N > {
1262
1270
assert ! ( !Self :: is_zst( ) ) ;
1271
+
1272
+ // SAFETY: We require caller to provide same ptr as we alloc
1273
+ // and we never alloc null pointer.
1274
+ let ptr = unsafe {
1275
+ debug_assert ! ( !ptr. is_null( ) , "Called `from_raw_parts` with null pointer." ) ;
1276
+ NonNull :: new_unchecked ( ptr)
1277
+ } ;
1278
+
1263
1279
SmallVec {
1264
1280
len : TaggedLen :: new ( length, true , Self :: is_zst ( ) ) ,
1265
1281
raw : RawSmallVec :: new_heap ( ptr, capacity) ,
@@ -1298,7 +1314,7 @@ impl<T, const N: usize> SmallVec<T, N> {
1298
1314
}
1299
1315
let len = self . len ( ) ;
1300
1316
let ( ptr, capacity) = self . raw . heap ;
1301
- let ptr = ptr as * mut T ;
1317
+ let ptr = ptr. as_ptr ( ) ;
1302
1318
// SAFETY: ptr is valid for `capacity - len` writes
1303
1319
let count = extend_batch ( ptr, capacity - len, len, & mut iter) ;
1304
1320
self . set_len ( len + count) ;
0 commit comments