|
1 | 1 | // Not in interpret to make sure we do not use private implementation details
|
2 | 2 |
|
3 |
| -use std::borrow::{Borrow, Cow}; |
4 |
| -use std::collections::hash_map::Entry; |
5 | 3 | use std::convert::TryInto;
|
6 | 4 | use std::error::Error;
|
7 | 5 | use std::fmt;
|
8 | 6 | use std::hash::Hash;
|
9 | 7 |
|
| 8 | +use crate::interpret::eval_nullary_intrinsic; |
10 | 9 | use crate::interpret::eval_nullary_intrinsic;
|
11 | 10 | use rustc::hir::def::DefKind;
|
12 |
| -use rustc::hir::def_id::DefId; |
13 | 11 | use rustc::mir;
|
14 | 12 | use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef};
|
15 | 13 | use rustc::traits::Reveal;
|
16 |
| -use rustc::ty::layout::{self, HasTyCtxt, LayoutOf, VariantIdx}; |
17 |
| -use rustc::ty::{self, subst::Subst, Ty, TyCtxt}; |
18 |
| -use rustc_data_structures::fx::FxHashMap; |
| 14 | +use rustc::ty::layout::{self, LayoutOf, VariantIdx}; |
| 15 | +use rustc::ty::{self, subst::Subst, TyCtxt}; |
19 | 16 |
|
20 | 17 | use syntax::{
|
21 | 18 | source_map::{Span, DUMMY_SP},
|
22 | 19 | symbol::Symbol,
|
23 | 20 | };
|
24 | 21 |
|
25 | 22 | use crate::interpret::{
|
26 |
| - self, intern_const_alloc_recursive, snapshot, AllocId, Allocation, AssertMessage, ConstValue, |
27 |
| - GlobalId, ImmTy, Immediate, InterpCx, InterpErrorInfo, InterpResult, MPlaceTy, Machine, Memory, |
28 |
| - MemoryKind, OpTy, PlaceTy, Pointer, RawConst, RefTracking, Scalar, StackPopCleanup, |
| 23 | + intern_const_alloc_recursive, Allocation, ConstValue, GlobalId, ImmTy, Immediate, InterpCx, |
| 24 | + InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemoryKind, OpTy, RawConst, RefTracking, |
| 25 | + Scalar, StackPopCleanup, |
29 | 26 | };
|
30 | 27 |
|
31 |
| -/// Number of steps until the detector even starts doing anything. |
32 |
| -/// Also, a warning is shown to the user when this number is reached. |
33 |
| -const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000; |
34 |
| -/// The number of steps between loop detector snapshots. |
35 |
| -/// Should be a power of two for performance reasons. |
36 |
| -const DETECTOR_SNAPSHOT_PERIOD: isize = 256; |
| 28 | +mod error; |
| 29 | +mod machine; |
| 30 | + |
| 31 | +pub use error::*; |
| 32 | +pub use machine::*; |
37 | 33 |
|
38 | 34 | /// The `InterpCx` is only meant to be used to do field and index projections into constants for
|
39 | 35 | /// `simd_shuffle` and const patterns in match arms.
|
@@ -173,335 +169,6 @@ fn eval_body_using_ecx<'mir, 'tcx>(
|
173 | 169 | Ok(ret)
|
174 | 170 | }
|
175 | 171 |
|
176 |
| -#[derive(Clone, Debug)] |
177 |
| -pub enum ConstEvalError { |
178 |
| - NeedsRfc(String), |
179 |
| - ConstAccessesStatic, |
180 |
| -} |
181 |
| - |
182 |
| -impl<'tcx> Into<InterpErrorInfo<'tcx>> for ConstEvalError { |
183 |
| - fn into(self) -> InterpErrorInfo<'tcx> { |
184 |
| - err_unsup!(Unsupported(self.to_string())).into() |
185 |
| - } |
186 |
| -} |
187 |
| - |
188 |
| -impl fmt::Display for ConstEvalError { |
189 |
| - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
190 |
| - use self::ConstEvalError::*; |
191 |
| - match *self { |
192 |
| - NeedsRfc(ref msg) => { |
193 |
| - write!(f, "\"{}\" needs an rfc before being allowed inside constants", msg) |
194 |
| - } |
195 |
| - ConstAccessesStatic => write!(f, "constant accesses static"), |
196 |
| - } |
197 |
| - } |
198 |
| -} |
199 |
| - |
200 |
| -impl Error for ConstEvalError {} |
201 |
| - |
202 |
| -// Extra machine state for CTFE, and the Machine instance |
203 |
| -pub struct CompileTimeInterpreter<'mir, 'tcx> { |
204 |
| - /// When this value is negative, it indicates the number of interpreter |
205 |
| - /// steps *until* the loop detector is enabled. When it is positive, it is |
206 |
| - /// the number of steps after the detector has been enabled modulo the loop |
207 |
| - /// detector period. |
208 |
| - pub(super) steps_since_detector_enabled: isize, |
209 |
| - |
210 |
| - /// Extra state to detect loops. |
211 |
| - pub(super) loop_detector: snapshot::InfiniteLoopDetector<'mir, 'tcx>, |
212 |
| -} |
213 |
| - |
214 |
| -#[derive(Copy, Clone, Debug)] |
215 |
| -pub struct MemoryExtra { |
216 |
| - /// Whether this machine may read from statics |
217 |
| - can_access_statics: bool, |
218 |
| -} |
219 |
| - |
220 |
| -impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> { |
221 |
| - fn new() -> Self { |
222 |
| - CompileTimeInterpreter { |
223 |
| - loop_detector: Default::default(), |
224 |
| - steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED, |
225 |
| - } |
226 |
| - } |
227 |
| -} |
228 |
| - |
229 |
| -impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> { |
230 |
| - #[inline(always)] |
231 |
| - fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool |
232 |
| - where |
233 |
| - K: Borrow<Q>, |
234 |
| - { |
235 |
| - FxHashMap::contains_key(self, k) |
236 |
| - } |
237 |
| - |
238 |
| - #[inline(always)] |
239 |
| - fn insert(&mut self, k: K, v: V) -> Option<V> { |
240 |
| - FxHashMap::insert(self, k, v) |
241 |
| - } |
242 |
| - |
243 |
| - #[inline(always)] |
244 |
| - fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V> |
245 |
| - where |
246 |
| - K: Borrow<Q>, |
247 |
| - { |
248 |
| - FxHashMap::remove(self, k) |
249 |
| - } |
250 |
| - |
251 |
| - #[inline(always)] |
252 |
| - fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> { |
253 |
| - self.iter().filter_map(move |(k, v)| f(k, &*v)).collect() |
254 |
| - } |
255 |
| - |
256 |
| - #[inline(always)] |
257 |
| - fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> { |
258 |
| - match self.get(&k) { |
259 |
| - Some(v) => Ok(v), |
260 |
| - None => { |
261 |
| - vacant()?; |
262 |
| - bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading") |
263 |
| - } |
264 |
| - } |
265 |
| - } |
266 |
| - |
267 |
| - #[inline(always)] |
268 |
| - fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> { |
269 |
| - match self.entry(k) { |
270 |
| - Entry::Occupied(e) => Ok(e.into_mut()), |
271 |
| - Entry::Vacant(e) => { |
272 |
| - let v = vacant()?; |
273 |
| - Ok(e.insert(v)) |
274 |
| - } |
275 |
| - } |
276 |
| - } |
277 |
| -} |
278 |
| - |
279 |
| -crate type CompileTimeEvalContext<'mir, 'tcx> = |
280 |
| - InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>; |
281 |
| - |
282 |
| -impl interpret::MayLeak for ! { |
283 |
| - #[inline(always)] |
284 |
| - fn may_leak(self) -> bool { |
285 |
| - // `self` is uninhabited |
286 |
| - self |
287 |
| - } |
288 |
| -} |
289 |
| - |
290 |
| -impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> { |
291 |
| - type MemoryKinds = !; |
292 |
| - type PointerTag = (); |
293 |
| - type ExtraFnVal = !; |
294 |
| - |
295 |
| - type FrameExtra = (); |
296 |
| - type MemoryExtra = MemoryExtra; |
297 |
| - type AllocExtra = (); |
298 |
| - |
299 |
| - type MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>; |
300 |
| - |
301 |
| - const STATIC_KIND: Option<!> = None; // no copying of statics allowed |
302 |
| - |
303 |
| - // We do not check for alignment to avoid having to carry an `Align` |
304 |
| - // in `ConstValue::ByRef`. |
305 |
| - const CHECK_ALIGN: bool = false; |
306 |
| - |
307 |
| - #[inline(always)] |
308 |
| - fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { |
309 |
| - false // for now, we don't enforce validity |
310 |
| - } |
311 |
| - |
312 |
| - fn find_mir_or_eval_fn( |
313 |
| - ecx: &mut InterpCx<'mir, 'tcx, Self>, |
314 |
| - instance: ty::Instance<'tcx>, |
315 |
| - args: &[OpTy<'tcx>], |
316 |
| - ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, |
317 |
| - _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts |
318 |
| - ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> { |
319 |
| - debug!("find_mir_or_eval_fn: {:?}", instance); |
320 |
| - |
321 |
| - // Only check non-glue functions |
322 |
| - if let ty::InstanceDef::Item(def_id) = instance.def { |
323 |
| - // Execution might have wandered off into other crates, so we cannot do a stability- |
324 |
| - // sensitive check here. But we can at least rule out functions that are not const |
325 |
| - // at all. |
326 |
| - if ecx.tcx.is_const_fn_raw(def_id) { |
327 |
| - // If this function is a `const fn` then as an optimization we can query this |
328 |
| - // evaluation immediately. |
329 |
| - // |
330 |
| - // For the moment we only do this for functions which take no arguments |
331 |
| - // (or all arguments are ZSTs) so that we don't memoize too much. |
332 |
| - // |
333 |
| - // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot |
334 |
| - // perform this optimization on items tagged with it. |
335 |
| - let no_implicit_args = !instance.def.requires_caller_location(ecx.tcx()); |
336 |
| - if args.iter().all(|a| a.layout.is_zst()) && no_implicit_args { |
337 |
| - let gid = GlobalId { instance, promoted: None }; |
338 |
| - ecx.eval_const_fn_call(gid, ret)?; |
339 |
| - return Ok(None); |
340 |
| - } |
341 |
| - } else { |
342 |
| - // Some functions we support even if they are non-const -- but avoid testing |
343 |
| - // that for const fn! We certainly do *not* want to actually call the fn |
344 |
| - // though, so be sure we return here. |
345 |
| - return if ecx.hook_panic_fn(instance, args, ret)? { |
346 |
| - Ok(None) |
347 |
| - } else { |
348 |
| - throw_unsup_format!("calling non-const function `{}`", instance) |
349 |
| - }; |
350 |
| - } |
351 |
| - } |
352 |
| - // This is a const fn. Call it. |
353 |
| - Ok(Some(match ecx.load_mir(instance.def, None) { |
354 |
| - Ok(body) => *body, |
355 |
| - Err(err) => { |
356 |
| - if let err_unsup!(NoMirFor(ref path)) = err.kind { |
357 |
| - return Err(ConstEvalError::NeedsRfc(format!( |
358 |
| - "calling extern function `{}`", |
359 |
| - path |
360 |
| - )) |
361 |
| - .into()); |
362 |
| - } |
363 |
| - return Err(err); |
364 |
| - } |
365 |
| - })) |
366 |
| - } |
367 |
| - |
368 |
| - fn call_extra_fn( |
369 |
| - _ecx: &mut InterpCx<'mir, 'tcx, Self>, |
370 |
| - fn_val: !, |
371 |
| - _args: &[OpTy<'tcx>], |
372 |
| - _ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, |
373 |
| - _unwind: Option<mir::BasicBlock>, |
374 |
| - ) -> InterpResult<'tcx> { |
375 |
| - match fn_val {} |
376 |
| - } |
377 |
| - |
378 |
| - fn call_intrinsic( |
379 |
| - ecx: &mut InterpCx<'mir, 'tcx, Self>, |
380 |
| - span: Span, |
381 |
| - instance: ty::Instance<'tcx>, |
382 |
| - args: &[OpTy<'tcx>], |
383 |
| - ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, |
384 |
| - _unwind: Option<mir::BasicBlock>, |
385 |
| - ) -> InterpResult<'tcx> { |
386 |
| - if ecx.emulate_intrinsic(span, instance, args, ret)? { |
387 |
| - return Ok(()); |
388 |
| - } |
389 |
| - // An intrinsic that we do not support |
390 |
| - let intrinsic_name = ecx.tcx.item_name(instance.def_id()); |
391 |
| - Err(ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into()) |
392 |
| - } |
393 |
| - |
394 |
| - fn assert_panic( |
395 |
| - ecx: &mut InterpCx<'mir, 'tcx, Self>, |
396 |
| - _span: Span, |
397 |
| - msg: &AssertMessage<'tcx>, |
398 |
| - _unwind: Option<mir::BasicBlock>, |
399 |
| - ) -> InterpResult<'tcx> { |
400 |
| - use rustc::mir::interpret::PanicInfo::*; |
401 |
| - Err(match msg { |
402 |
| - BoundsCheck { ref len, ref index } => { |
403 |
| - let len = ecx |
404 |
| - .read_immediate(ecx.eval_operand(len, None)?) |
405 |
| - .expect("can't eval len") |
406 |
| - .to_scalar()? |
407 |
| - .to_machine_usize(&*ecx)?; |
408 |
| - let index = ecx |
409 |
| - .read_immediate(ecx.eval_operand(index, None)?) |
410 |
| - .expect("can't eval index") |
411 |
| - .to_scalar()? |
412 |
| - .to_machine_usize(&*ecx)?; |
413 |
| - err_panic!(BoundsCheck { len, index }) |
414 |
| - } |
415 |
| - Overflow(op) => err_panic!(Overflow(*op)), |
416 |
| - OverflowNeg => err_panic!(OverflowNeg), |
417 |
| - DivisionByZero => err_panic!(DivisionByZero), |
418 |
| - RemainderByZero => err_panic!(RemainderByZero), |
419 |
| - ResumedAfterReturn(generator_kind) => err_panic!(ResumedAfterReturn(*generator_kind)), |
420 |
| - ResumedAfterPanic(generator_kind) => err_panic!(ResumedAfterPanic(*generator_kind)), |
421 |
| - Panic { .. } => bug!("`Panic` variant cannot occur in MIR"), |
422 |
| - } |
423 |
| - .into()) |
424 |
| - } |
425 |
| - |
426 |
| - fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> { |
427 |
| - Err(ConstEvalError::NeedsRfc("pointer-to-integer cast".to_string()).into()) |
428 |
| - } |
429 |
| - |
430 |
| - fn binary_ptr_op( |
431 |
| - _ecx: &InterpCx<'mir, 'tcx, Self>, |
432 |
| - _bin_op: mir::BinOp, |
433 |
| - _left: ImmTy<'tcx>, |
434 |
| - _right: ImmTy<'tcx>, |
435 |
| - ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> { |
436 |
| - Err(ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into()) |
437 |
| - } |
438 |
| - |
439 |
| - fn find_foreign_static( |
440 |
| - _tcx: TyCtxt<'tcx>, |
441 |
| - _def_id: DefId, |
442 |
| - ) -> InterpResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> { |
443 |
| - throw_unsup!(ReadForeignStatic) |
444 |
| - } |
445 |
| - |
446 |
| - #[inline(always)] |
447 |
| - fn init_allocation_extra<'b>( |
448 |
| - _memory_extra: &MemoryExtra, |
449 |
| - _id: AllocId, |
450 |
| - alloc: Cow<'b, Allocation>, |
451 |
| - _kind: Option<MemoryKind<!>>, |
452 |
| - ) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) { |
453 |
| - // We do not use a tag so we can just cheaply forward the allocation |
454 |
| - (alloc, ()) |
455 |
| - } |
456 |
| - |
457 |
| - #[inline(always)] |
458 |
| - fn tag_static_base_pointer(_memory_extra: &MemoryExtra, _id: AllocId) -> Self::PointerTag { |
459 |
| - () |
460 |
| - } |
461 |
| - |
462 |
| - fn box_alloc( |
463 |
| - _ecx: &mut InterpCx<'mir, 'tcx, Self>, |
464 |
| - _dest: PlaceTy<'tcx>, |
465 |
| - ) -> InterpResult<'tcx> { |
466 |
| - Err(ConstEvalError::NeedsRfc("heap allocations via `box` keyword".to_string()).into()) |
467 |
| - } |
468 |
| - |
469 |
| - fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { |
470 |
| - { |
471 |
| - let steps = &mut ecx.machine.steps_since_detector_enabled; |
472 |
| - |
473 |
| - *steps += 1; |
474 |
| - if *steps < 0 { |
475 |
| - return Ok(()); |
476 |
| - } |
477 |
| - |
478 |
| - *steps %= DETECTOR_SNAPSHOT_PERIOD; |
479 |
| - if *steps != 0 { |
480 |
| - return Ok(()); |
481 |
| - } |
482 |
| - } |
483 |
| - |
484 |
| - let span = ecx.frame().span; |
485 |
| - ecx.machine.loop_detector.observe_and_analyze(*ecx.tcx, span, &ecx.memory, &ecx.stack[..]) |
486 |
| - } |
487 |
| - |
488 |
| - #[inline(always)] |
489 |
| - fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { |
490 |
| - Ok(()) |
491 |
| - } |
492 |
| - |
493 |
| - fn before_access_static( |
494 |
| - memory_extra: &MemoryExtra, |
495 |
| - _allocation: &Allocation, |
496 |
| - ) -> InterpResult<'tcx> { |
497 |
| - if memory_extra.can_access_statics { |
498 |
| - Ok(()) |
499 |
| - } else { |
500 |
| - Err(ConstEvalError::ConstAccessesStatic.into()) |
501 |
| - } |
502 |
| - } |
503 |
| -} |
504 |
| - |
505 | 172 | /// Extracts a field of a (variant of a) const.
|
506 | 173 | // this function uses `unwrap` copiously, because an already validated constant must have valid
|
507 | 174 | // fields and can thus never fail outside of compiler bugs
|
|
0 commit comments