@@ -921,14 +921,14 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
921
921
/// Searches for an element in the table.
922
922
#[ inline]
923
923
pub fn find ( & self , hash : u64 , mut eq : impl FnMut ( & T ) -> bool ) -> Option < Bucket < T > > {
924
- unsafe {
925
- for bucket in self . iter_hash ( hash ) {
926
- let elm = bucket . as_ref ( ) ;
927
- if likely ( eq ( elm ) ) {
928
- return Some ( bucket ) ;
929
- }
930
- }
931
- None
924
+ let result = self . table . find_inner ( hash , & mut |index| unsafe {
925
+ eq ( self . bucket ( index ) . as_ref ( ) )
926
+ } ) ;
927
+
928
+ // Avoid `Option::map` because it bloats LLVM IR.
929
+ match result {
930
+ Some ( index ) => Some ( unsafe { self . bucket ( index ) } ) ,
931
+ None => None ,
932
932
}
933
933
}
934
934
@@ -1054,6 +1054,7 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
1054
1054
/// `RawIterHash`. Because we cannot make the `next` method unsafe on the
1055
1055
/// `RawIterHash` struct, we have to make the `iter_hash` method unsafe.
1056
1056
#[ cfg_attr( feature = "inline-more" , inline) ]
1057
+ #[ cfg( feature = "raw" ) ]
1057
1058
pub unsafe fn iter_hash ( & self , hash : u64 ) -> RawIterHash < ' _ , T , A > {
1058
1059
RawIterHash :: new ( self , hash)
1059
1060
}
@@ -1255,6 +1256,32 @@ impl<A: Allocator + Clone> RawTableInner<A> {
1255
1256
}
1256
1257
}
1257
1258
1259
+ /// Searches for an element in the table. This uses dynamic dispatch to reduce the amount of
1260
+ /// code generated, but it is eliminated by LLVM optimizations.
1261
+ #[ inline]
1262
+ fn find_inner ( & self , hash : u64 , eq : & mut dyn FnMut ( usize ) -> bool ) -> Option < usize > {
1263
+ let h2_hash = h2 ( hash) ;
1264
+ let mut probe_seq = self . probe_seq ( hash) ;
1265
+
1266
+ loop {
1267
+ let group = unsafe { Group :: load ( self . ctrl ( probe_seq. pos ) ) } ;
1268
+
1269
+ for bit in group. match_byte ( h2_hash) {
1270
+ let index = ( probe_seq. pos + bit) & self . bucket_mask ;
1271
+
1272
+ if likely ( eq ( index) ) {
1273
+ return Some ( index) ;
1274
+ }
1275
+ }
1276
+
1277
+ if likely ( group. match_empty ( ) . any_bit_set ( ) ) {
1278
+ return None ;
1279
+ }
1280
+
1281
+ probe_seq. move_next ( self . bucket_mask ) ;
1282
+ }
1283
+ }
1284
+
1258
1285
#[ allow( clippy:: mut_mut) ]
1259
1286
#[ inline]
1260
1287
unsafe fn prepare_rehash_in_place ( & mut self ) {
@@ -2187,6 +2214,7 @@ struct RawIterHashInner<'a, A: Allocator + Clone> {
2187
2214
2188
2215
impl < ' a , T , A : Allocator + Clone > RawIterHash < ' a , T , A > {
2189
2216
#[ cfg_attr( feature = "inline-more" , inline) ]
2217
+ #[ cfg( feature = "raw" ) ]
2190
2218
fn new ( table : & ' a RawTable < T , A > , hash : u64 ) -> Self {
2191
2219
RawIterHash {
2192
2220
inner : RawIterHashInner :: new ( & table. table , hash) ,
@@ -2196,6 +2224,7 @@ impl<'a, T, A: Allocator + Clone> RawIterHash<'a, T, A> {
2196
2224
}
2197
2225
impl < ' a , A : Allocator + Clone > RawIterHashInner < ' a , A > {
2198
2226
#[ cfg_attr( feature = "inline-more" , inline) ]
2227
+ #[ cfg( feature = "raw" ) ]
2199
2228
fn new ( table : & ' a RawTableInner < A > , hash : u64 ) -> Self {
2200
2229
unsafe {
2201
2230
let h2_hash = h2 ( hash) ;
0 commit comments