@@ -36,12 +36,14 @@ use ty::util::{IntTypeExt, Discr};
36
36
use ty:: walk:: TypeWalker ;
37
37
use util:: captures:: Captures ;
38
38
use util:: nodemap:: { NodeSet , DefIdMap , FxHashMap } ;
39
+ use arena:: SyncDroplessArena ;
39
40
40
41
use serialize:: { self , Encodable , Encoder } ;
41
42
use std:: cell:: RefCell ;
42
43
use std:: cmp:: { self , Ordering } ;
43
44
use std:: fmt;
44
45
use std:: hash:: { Hash , Hasher } ;
46
+ use std:: marker:: PhantomData ;
45
47
use std:: ops:: Deref ;
46
48
use rustc_data_structures:: sync:: Lrc ;
47
49
use std:: slice;
@@ -582,54 +584,120 @@ impl <'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for Ty<'tcx> {
582
584
}
583
585
}
584
586
587
+ extern {
588
+ /// A dummy type used to force Slice to by unsized without requiring fat pointers
589
+ type OpaqueSliceContents ;
590
+ }
591
+
585
592
/// A wrapper for slices with the additional invariant
586
593
/// that the slice is interned and no other slice with
587
594
/// the same contents can exist in the same context.
588
595
/// This means we can use pointer + length for both
589
596
/// equality comparisons and hashing.
590
- #[ derive( Debug , RustcEncodable ) ]
591
- pub struct Slice < T > ( [ T ] ) ;
597
+ pub struct Slice < T > ( PhantomData < T > , OpaqueSliceContents ) ;
598
+
599
+ impl < T > Slice < T > {
600
+ /// Returns the offset of the array
601
+ #[ inline( always) ]
602
+ fn offset ( ) -> usize {
603
+ // Align up the size of the len (usize) field
604
+ let align = mem:: align_of :: < T > ( ) ;
605
+ let align_mask = align - 1 ;
606
+ let offset = mem:: size_of :: < usize > ( ) ;
607
+ ( offset + align_mask) & !align_mask
608
+ }
609
+ }
610
+
611
+ impl < T : Copy > Slice < T > {
612
+ #[ inline]
613
+ fn from_arena < ' tcx > ( arena : & ' tcx SyncDroplessArena , slice : & [ T ] ) -> & ' tcx Slice < T > {
614
+ assert ! ( !mem:: needs_drop:: <T >( ) ) ;
615
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
616
+ assert ! ( slice. len( ) != 0 ) ;
617
+
618
+ let offset = Slice :: < T > :: offset ( ) ;
619
+ let size = offset + slice. len ( ) * mem:: size_of :: < T > ( ) ;
620
+
621
+ let mem: * mut u8 = arena. alloc_raw (
622
+ size,
623
+ cmp:: max ( mem:: align_of :: < T > ( ) , mem:: align_of :: < usize > ( ) ) ) . as_mut_ptr ( ) ;
624
+
625
+ unsafe {
626
+ // Write the length
627
+ * ( mem as * mut usize ) = slice. len ( ) ;
628
+
629
+ // Write the elements
630
+ let arena_slice = slice:: from_raw_parts_mut (
631
+ mem. offset ( offset as isize ) as * mut T ,
632
+ slice. len ( ) ) ;
633
+ arena_slice. copy_from_slice ( slice) ;
634
+
635
+ & * ( mem as * const Slice < T > )
636
+ }
637
+ }
638
+ }
639
+
640
+ impl < T : fmt:: Debug > fmt:: Debug for Slice < T > {
641
+ fn fmt ( & self , f : & mut fmt:: Formatter ) -> fmt:: Result {
642
+ ( * * self ) . fmt ( f)
643
+ }
644
+ }
645
+
646
+ impl < T : Encodable > Encodable for Slice < T > {
647
+ #[ inline]
648
+ fn encode < S : Encoder > ( & self , s : & mut S ) -> Result < ( ) , S :: Error > {
649
+ ( * * self ) . encode ( s)
650
+ }
651
+ }
592
652
593
653
impl < T > Ord for Slice < T > where T : Ord {
594
654
fn cmp ( & self , other : & Slice < T > ) -> Ordering {
595
655
if self == other { Ordering :: Equal } else {
596
- <[ T ] as Ord >:: cmp ( & self . 0 , & other. 0 )
656
+ <[ T ] as Ord >:: cmp ( & * * self , & * * other)
597
657
}
598
658
}
599
659
}
600
660
601
661
impl < T > PartialOrd for Slice < T > where T : PartialOrd {
602
662
fn partial_cmp ( & self , other : & Slice < T > ) -> Option < Ordering > {
603
663
if self == other { Some ( Ordering :: Equal ) } else {
604
- <[ T ] as PartialOrd >:: partial_cmp ( & self . 0 , & other. 0 )
664
+ <[ T ] as PartialOrd >:: partial_cmp ( & * * self , & * * other)
605
665
}
606
666
}
607
667
}
608
668
609
- impl < T > PartialEq for Slice < T > {
669
+ impl < T : PartialEq > PartialEq for Slice < T > {
610
670
#[ inline]
611
671
fn eq ( & self , other : & Slice < T > ) -> bool {
612
- ( & self . 0 as * const [ T ] ) == ( & other. 0 as * const [ T ] )
672
+ ( self as * const _ ) == ( other as * const _ )
613
673
}
614
674
}
615
- impl < T > Eq for Slice < T > { }
675
+ impl < T : Eq > Eq for Slice < T > { }
616
676
617
677
impl < T > Hash for Slice < T > {
678
+ #[ inline]
618
679
fn hash < H : Hasher > ( & self , s : & mut H ) {
619
- ( self . as_ptr ( ) , self . len ( ) ) . hash ( s)
680
+ ( self as * const Slice < T > ) . hash ( s)
620
681
}
621
682
}
622
683
623
684
impl < T > Deref for Slice < T > {
624
685
type Target = [ T ] ;
686
+ #[ inline( always) ]
625
687
fn deref ( & self ) -> & [ T ] {
626
- & self . 0
688
+ unsafe {
689
+ let raw = self as * const _ as * const u8 ;
690
+ let len = * ( raw as * const usize ) ;
691
+ let slice = raw. offset ( Slice :: < T > :: offset ( ) as isize ) ;
692
+ slice:: from_raw_parts ( slice as * const T , len)
693
+ }
627
694
}
628
695
}
629
696
630
697
impl < ' a , T > IntoIterator for & ' a Slice < T > {
631
698
type Item = & ' a T ;
632
699
type IntoIter = <& ' a [ T ] as IntoIterator >:: IntoIter ;
700
+ #[ inline( always) ]
633
701
fn into_iter ( self ) -> Self :: IntoIter {
634
702
self [ ..] . iter ( )
635
703
}
@@ -638,9 +706,14 @@ impl<'a, T> IntoIterator for &'a Slice<T> {
638
706
impl < ' tcx > serialize:: UseSpecializedDecodable for & ' tcx Slice < Ty < ' tcx > > { }
639
707
640
708
impl < T > Slice < T > {
709
+ #[ inline( always) ]
641
710
pub fn empty < ' a > ( ) -> & ' a Slice < T > {
711
+ #[ repr( align( 64 ) , C ) ]
712
+ struct EmptySlice ( [ u8 ; 64 ] ) ;
713
+ static EMPTY_SLICE : EmptySlice = EmptySlice ( [ 0 ; 64 ] ) ;
714
+ assert ! ( mem:: align_of:: <T >( ) <= 64 ) ;
642
715
unsafe {
643
- mem :: transmute ( slice :: from_raw_parts ( 0x1 as * const T , 0 ) )
716
+ & * ( & EMPTY_SLICE as * const _ as * const Slice < T > )
644
717
}
645
718
}
646
719
}
0 commit comments