@@ -118,7 +118,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
118
118
llargs : & [ ValueRef ] ,
119
119
destination : Option < ( ReturnDest < ' tcx > , mir:: BasicBlock ) > ,
120
120
cleanup : Option < mir:: BasicBlock >
121
- | {
121
+ | -> Option < ValueRef > {
122
122
if let Some ( cleanup) = cleanup {
123
123
let ret_bcx = if let Some ( ( _, target) ) = destination {
124
124
this. blocks [ target]
@@ -137,6 +137,8 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
137
137
this. set_debug_loc ( & ret_bcx, terminator. source_info ) ;
138
138
this. store_return ( & ret_bcx, ret_dest, & fn_ty. ret , invokeret) ;
139
139
}
140
+
141
+ None
140
142
} else {
141
143
let llret = bcx. call ( fn_ptr, & llargs, cleanup_bundle) ;
142
144
fn_ty. apply_attrs_callsite ( llret) ;
@@ -154,6 +156,8 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
154
156
} else {
155
157
bcx. unreachable ( ) ;
156
158
}
159
+
160
+ Some ( llret)
157
161
}
158
162
} ;
159
163
@@ -209,9 +213,9 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
209
213
mir:: TerminatorKind :: Return => {
210
214
let llval = match self . fn_ty . ret . mode {
211
215
PassMode :: Ignore | PassMode :: Indirect ( _) => {
212
- bcx. ret_void ( ) ;
213
- return ;
214
- }
216
+ bcx. ret_void ( ) ;
217
+ return ;
218
+ }
215
219
216
220
PassMode :: Direct ( _) | PassMode :: Pair ( ..) => {
217
221
let op = self . trans_consume ( & bcx, & mir:: Place :: Local ( mir:: RETURN_PLACE ) ) ;
@@ -224,27 +228,27 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
224
228
225
229
PassMode :: Cast ( cast_ty) => {
226
230
let op = match self . locals [ mir:: RETURN_PLACE ] {
227
- LocalRef :: Operand ( Some ( op) ) => op,
228
- LocalRef :: Operand ( None ) => bug ! ( "use of return before def" ) ,
231
+ LocalRef :: Operand ( Some ( op) ) => op,
232
+ LocalRef :: Operand ( None ) => bug ! ( "use of return before def" ) ,
229
233
LocalRef :: Place ( tr_place) => {
230
- OperandRef {
234
+ OperandRef {
231
235
val : Ref ( tr_place. llval , tr_place. alignment ) ,
232
236
layout : tr_place. layout
233
- }
234
237
}
235
- } ;
236
- let llslot = match op. val {
237
- Immediate ( _) | Pair ( ..) => {
238
+ }
239
+ } ;
240
+ let llslot = match op. val {
241
+ Immediate ( _) | Pair ( ..) => {
238
242
let scratch = PlaceRef :: alloca ( & bcx, self . fn_ty . ret . layout , "ret" ) ;
239
243
op. val . store ( & bcx, scratch) ;
240
244
scratch. llval
241
- }
242
- Ref ( llval, align) => {
243
- assert_eq ! ( align, Alignment :: AbiAligned ,
245
+ }
246
+ Ref ( llval, align) => {
247
+ assert_eq ! ( align, Alignment :: AbiAligned ,
244
248
"return place is unaligned!" ) ;
245
- llval
246
- }
247
- } ;
249
+ llval
250
+ }
251
+ } ;
248
252
bcx. load (
249
253
bcx. pointercast ( llslot, cast_ty. llvm_type ( bcx. ccx ) . ptr_to ( ) ) ,
250
254
Some ( self . fn_ty . ret . layout . align ) )
@@ -448,6 +452,11 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
448
452
let sig = callee. layout . ty . fn_sig ( bcx. tcx ( ) ) ;
449
453
let sig = bcx. tcx ( ) . erase_late_bound_regions_and_normalize ( & sig) ;
450
454
let abi = sig. abi ;
455
+ let notail = def. map ( |def| {
456
+ def. attrs ( bcx. ccx . tcx ( ) )
457
+ . iter ( )
458
+ . any ( |attr| attr. check_name ( "notail_when_called" ) )
459
+ } ) . unwrap_or ( false ) ;
451
460
452
461
// Handle intrinsics old trans wants Expr's for, ourselves.
453
462
let intrinsic = match def {
@@ -508,28 +517,28 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
508
517
ReturnDest :: Store ( dst) => dst. llval ,
509
518
ReturnDest :: DirectOperand ( _) =>
510
519
bug ! ( "Cannot use direct operand with an intrinsic call" )
511
- } ;
520
+ } ;
512
521
513
522
let args: Vec < _ > = args. iter ( ) . enumerate ( ) . map ( |( i, arg) | {
514
- // The indices passed to simd_shuffle* in the
515
- // third argument must be constant. This is
516
- // checked by const-qualification, which also
517
- // promotes any complex rvalues to constants.
523
+ // The indices passed to simd_shuffle* in the
524
+ // third argument must be constant. This is
525
+ // checked by const-qualification, which also
526
+ // promotes any complex rvalues to constants.
518
527
if i == 2 && intrinsic. unwrap ( ) . starts_with ( "simd_shuffle" ) {
519
- match * arg {
528
+ match * arg {
520
529
mir:: Operand :: Copy ( _) |
521
530
mir:: Operand :: Move ( _) => {
522
- span_bug ! ( span, "shuffle indices must be constant" ) ;
523
- }
524
- mir:: Operand :: Constant ( ref constant) => {
525
- let val = self . trans_constant ( & bcx, constant) ;
531
+ span_bug ! ( span, "shuffle indices must be constant" ) ;
532
+ }
533
+ mir:: Operand :: Constant ( ref constant) => {
534
+ let val = self . trans_constant ( & bcx, constant) ;
526
535
return OperandRef {
527
536
val : Immediate ( val. llval ) ,
528
537
layout : bcx. ccx . layout_of ( val. ty )
529
538
} ;
530
- }
531
539
}
532
540
}
541
+ }
533
542
534
543
self . trans_operand ( & bcx, arg)
535
544
} ) . collect ( ) ;
@@ -597,9 +606,14 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
597
606
_ => span_bug ! ( span, "no llfn for call" ) ,
598
607
} ;
599
608
600
- do_call ( self , bcx, fn_ty, fn_ptr, & llargs,
609
+ let llret = do_call ( self , bcx, fn_ty, fn_ptr, & llargs,
601
610
destination. as_ref ( ) . map ( |& ( _, target) | ( ret_dest, target) ) ,
602
611
cleanup) ;
612
+ if notail {
613
+ unsafe {
614
+ llret. map ( |llret| llvm:: LLVMRustSetCallNoTail ( llret) ) ;
615
+ }
616
+ }
603
617
}
604
618
mir:: TerminatorKind :: GeneratorDrop |
605
619
mir:: TerminatorKind :: Yield { .. } |
@@ -615,21 +629,21 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
615
629
// Fill padding with undef value, where applicable.
616
630
if let Some ( ty) = arg. pad {
617
631
llargs. push ( C_undef ( ty. llvm_type ( bcx. ccx ) ) ) ;
618
- }
632
+ }
619
633
620
634
if arg. is_ignore ( ) {
621
635
return ;
622
- }
636
+ }
623
637
624
638
if let PassMode :: Pair ( ..) = arg. mode {
625
639
match op. val {
626
640
Pair ( a, b) => {
627
641
llargs. push ( a) ;
628
642
llargs. push ( b) ;
629
- return ;
630
- }
631
- _ => bug ! ( "trans_argument: {:?} invalid for pair arugment" , op)
643
+ return ;
632
644
}
645
+ _ => bug ! ( "trans_argument: {:?} invalid for pair arugment" , op)
646
+ }
633
647
}
634
648
635
649
// Force by-ref if we have to load through a cast pointer.
@@ -640,10 +654,10 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
640
654
let scratch = PlaceRef :: alloca ( bcx, arg. layout , "arg" ) ;
641
655
op. val . store ( bcx, scratch) ;
642
656
( scratch. llval , Alignment :: AbiAligned , true )
643
- }
657
+ }
644
658
_ => {
645
659
( op. immediate_or_packed_pair ( bcx) , Alignment :: AbiAligned , false )
646
- }
660
+ }
647
661
}
648
662
}
649
663
Ref ( llval, align @ Alignment :: Packed ( _) ) if arg. is_indirect ( ) => {
@@ -674,8 +688,8 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
674
688
if let layout:: Abi :: Scalar ( ref scalar) = arg. layout . abi {
675
689
if scalar. is_bool ( ) {
676
690
bcx. range_metadata ( llval, 0 ..2 ) ;
677
- }
678
- }
691
+ }
692
+ }
679
693
// We store bools as i8 so we need to truncate to i1.
680
694
llval = base:: to_immediate ( bcx, llval, arg. layout ) ;
681
695
}
@@ -697,15 +711,15 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
697
711
for i in 0 ..tuple. layout . fields . count ( ) {
698
712
let field_ptr = tuple_ptr. project_field ( bcx, i) ;
699
713
self . trans_argument ( bcx, field_ptr. load ( bcx) , llargs, & args[ i] ) ;
700
- }
701
- } else {
714
+ }
715
+ } else {
702
716
// If the tuple is immediate, the elements are as well.
703
717
for i in 0 ..tuple. layout . fields . count ( ) {
704
718
let op = tuple. extract_field ( bcx, i) ;
705
719
self . trans_argument ( bcx, op, llargs, & args[ i] ) ;
720
+ }
721
+ }
706
722
}
707
- }
708
- }
709
723
710
724
fn get_personality_slot ( & mut self , bcx : & Builder < ' a , ' tcx > ) -> PlaceRef < ' tcx > {
711
725
let ccx = bcx. ccx ;
0 commit comments