@@ -367,7 +367,7 @@ impl DroplessArena {
367
367
/// current memory chunk. Returns `None` if there is no free space left to
368
368
/// satisfy the request.
369
369
#[ inline]
370
- fn alloc_raw_without_grow ( & self , bytes : usize , align : usize ) -> Option < & mut [ u8 ] > {
370
+ fn alloc_raw_without_grow ( & self , bytes : usize , align : usize ) -> Option < * mut u8 > {
371
371
let ptr = self . ptr . get ( ) as usize ;
372
372
let end = self . end . get ( ) as usize ;
373
373
// The allocation request fits into the current chunk iff:
@@ -383,14 +383,14 @@ impl DroplessArena {
383
383
let new_ptr = aligned. checked_add ( bytes) ?;
384
384
if new_ptr <= end {
385
385
self . ptr . set ( new_ptr as * mut u8 ) ;
386
- unsafe { Some ( slice :: from_raw_parts_mut ( aligned as * mut u8 , bytes ) ) }
386
+ Some ( aligned as * mut u8 )
387
387
} else {
388
388
None
389
389
}
390
390
}
391
391
392
392
#[ inline]
393
- pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> & mut [ u8 ] {
393
+ pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> * mut u8 {
394
394
assert ! ( bytes != 0 ) ;
395
395
loop {
396
396
if let Some ( a) = self . alloc_raw_without_grow ( bytes, align) {
@@ -406,7 +406,7 @@ impl DroplessArena {
406
406
pub fn alloc < T > ( & self , object : T ) -> & mut T {
407
407
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
408
408
409
- let mem = self . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
409
+ let mem = self . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
410
410
411
411
unsafe {
412
412
// Write into uninitialized memory.
@@ -431,13 +431,11 @@ impl DroplessArena {
431
431
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
432
432
assert ! ( !slice. is_empty( ) ) ;
433
433
434
- let mem = self . alloc_raw ( slice. len ( ) * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _
435
- as * mut T ;
434
+ let mem = self . alloc_raw ( slice. len ( ) * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
436
435
437
436
unsafe {
438
- let arena_slice = slice:: from_raw_parts_mut ( mem, slice. len ( ) ) ;
439
- arena_slice. copy_from_slice ( slice) ;
440
- arena_slice
437
+ mem. copy_from_nonoverlapping ( slice. as_ptr ( ) , slice. len ( ) ) ;
438
+ slice:: from_raw_parts_mut ( mem, slice. len ( ) )
441
439
}
442
440
}
443
441
@@ -480,7 +478,7 @@ impl DroplessArena {
480
478
return & mut [ ] ;
481
479
}
482
480
let size = len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) ;
483
- let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
481
+ let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut T ;
484
482
unsafe { self . write_from_iter ( iter, len, mem) }
485
483
}
486
484
( _, _) => {
@@ -495,7 +493,7 @@ impl DroplessArena {
495
493
let len = vec. len ( ) ;
496
494
let start_ptr = self
497
495
. alloc_raw ( len * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) )
498
- as * mut _ as * mut T ;
496
+ as * mut T ;
499
497
vec. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
500
498
vec. set_len ( 0 ) ;
501
499
slice:: from_raw_parts_mut ( start_ptr, len)
@@ -539,8 +537,7 @@ pub struct DropArena {
539
537
impl DropArena {
540
538
#[ inline]
541
539
pub unsafe fn alloc < T > ( & self , object : T ) -> & mut T {
542
- let mem =
543
- self . arena . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
540
+ let mem = self . arena . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
544
541
// Write into uninitialized memory.
545
542
ptr:: write ( mem, object) ;
546
543
let result = & mut * mem;
@@ -563,7 +560,7 @@ impl DropArena {
563
560
let start_ptr = self
564
561
. arena
565
562
. alloc_raw ( len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) , mem:: align_of :: < T > ( ) )
566
- as * mut _ as * mut T ;
563
+ as * mut T ;
567
564
568
565
let mut destructors = self . destructors . borrow_mut ( ) ;
569
566
// Reserve space for the destructors so we can't panic while adding them
0 commit comments