1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_data_structures::fx::{FxHashMap, FxHashSet};
5use rustc_hir::LangItem;
6use rustc_hir::attrs::InlineAttr;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Unnormalized,
18 Upcast, Variance,
19};
20use rustc_middle::{bug, span_bug};
21use rustc_mir_dataflow::debuginfo::debuginfo_locals;
22use rustc_trait_selection::traits::ObligationCtxt;
23
24use crate::util::{self, most_packed_projection};
25
26#[derive(Copy, Clone, Debug, PartialEq, Eq)]
27enum EdgeKind {
28 Unwind,
29 Normal,
30}
31
32pub(super) struct Validator {
33 pub when: String,
35}
36
37impl<'tcx> crate::MirPass<'tcx> for Validator {
38 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
39 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
44 return;
45 }
46 let def_id = body.source.def_id();
47 let typing_env = body.typing_env(tcx);
48 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
49 true
51 } else if !tcx.def_kind(def_id).is_fn_like() {
52 true
53 } else {
54 let body_ty = tcx.type_of(def_id).skip_binder();
55 let body_abi = match body_ty.kind() {
56 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
57 ty::Closure(..) => ExternAbi::RustCall,
58 ty::CoroutineClosure(..) => ExternAbi::RustCall,
59 ty::Coroutine(..) => ExternAbi::Rust,
60 ty::Error(_) => return,
62 _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
63 };
64
65 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
66 };
67
68 let mut cfg_checker = CfgChecker {
69 when: &self.when,
70 body,
71 tcx,
72 unwind_edge_count: 0,
73 reachable_blocks: traversal::reachable_as_bitset(body),
74 value_cache: FxHashSet::default(),
75 can_unwind,
76 };
77 cfg_checker.visit_body(body);
78 cfg_checker.check_cleanup_control_flow();
79
80 for (location, msg) in validate_types(tcx, typing_env, body, body) {
82 cfg_checker.fail(location, msg);
83 }
84
85 for (location, msg) in validate_debuginfos(body) {
87 cfg_checker.fail(location, msg);
88 }
89
90 if let MirPhase::Runtime(_) = body.phase
91 && let ty::InstanceKind::Item(_) = body.source.instance
92 && body.has_free_regions()
93 {
94 cfg_checker.fail(
95 Location::START,
96 format!("Free regions in optimized {} MIR", body.phase.name()),
97 );
98 }
99 }
100
101 fn is_required(&self) -> bool {
102 true
103 }
104}
105
106struct CfgChecker<'a, 'tcx> {
113 when: &'a str,
114 body: &'a Body<'tcx>,
115 tcx: TyCtxt<'tcx>,
116 unwind_edge_count: usize,
117 reachable_blocks: DenseBitSet<BasicBlock>,
118 value_cache: FxHashSet<u128>,
119 can_unwind: bool,
122}
123
124impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
125 #[track_caller]
126 fn fail(&self, location: Location, msg: impl AsRef<str>) {
127 if self.tcx.dcx().has_errors().is_none() {
129 span_bug!(
130 self.body.source_info(location).span,
131 "broken MIR in {:?} ({}) at {:?}:\n{}",
132 self.body.source.instance,
133 self.when,
134 location,
135 msg.as_ref(),
136 );
137 }
138 }
139
140 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
141 if bb == START_BLOCK {
142 self.fail(location, "start block must not have predecessors")
143 }
144 if let Some(bb) = self.body.basic_blocks.get(bb) {
145 let src = self.body.basic_blocks.get(location.block).unwrap();
146 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
147 (false, false, EdgeKind::Normal)
149 | (true, true, EdgeKind::Normal) => {}
151 (false, true, EdgeKind::Unwind) => {
153 self.unwind_edge_count += 1;
154 }
155 _ => {
157 self.fail(
158 location,
159 format!(
160 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
161 edge_kind,
162 bb,
163 src.is_cleanup,
164 bb.is_cleanup,
165 )
166 )
167 }
168 }
169 } else {
170 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
171 }
172 }
173
174 fn check_cleanup_control_flow(&self) {
175 if self.unwind_edge_count <= 1 {
176 return;
177 }
178 let doms = self.body.basic_blocks.dominators();
179 let mut post_contract_node = FxHashMap::default();
180 let mut dom_path = vec![];
182 let mut get_post_contract_node = |mut bb| {
183 let root = loop {
184 if let Some(root) = post_contract_node.get(&bb) {
185 break *root;
186 }
187 let parent = doms.immediate_dominator(bb).unwrap();
188 dom_path.push(bb);
189 if !self.body.basic_blocks[parent].is_cleanup {
190 break bb;
191 }
192 bb = parent;
193 };
194 for bb in dom_path.drain(..) {
195 post_contract_node.insert(bb, root);
196 }
197 root
198 };
199
200 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
201 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
202 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
203 continue;
204 }
205 let bb = get_post_contract_node(bb);
206 for s in bb_data.terminator().successors() {
207 let s = get_post_contract_node(s);
208 if s == bb {
209 continue;
210 }
211 let parent = &mut parent[bb];
212 match parent {
213 None => {
214 *parent = Some(s);
215 }
216 Some(e) if *e == s => (),
217 Some(e) => self.fail(
218 Location { block: bb, statement_index: 0 },
219 format!(
220 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
221 bb,
222 s,
223 *e
224 )
225 ),
226 }
227 }
228 }
229
230 let mut stack = FxHashSet::default();
232 for (mut bb, parent) in parent.iter_enumerated_mut() {
233 stack.clear();
234 stack.insert(bb);
235 loop {
236 let Some(parent) = parent.take() else { break };
237 let no_cycle = stack.insert(parent);
238 if !no_cycle {
239 self.fail(
240 Location { block: bb, statement_index: 0 },
241 format!(
242 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
243 ),
244 );
245 break;
246 }
247 bb = parent;
248 }
249 }
250 }
251
252 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
253 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
254 match unwind {
255 UnwindAction::Cleanup(unwind) => {
256 if is_cleanup {
257 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
258 }
259 self.check_edge(location, unwind, EdgeKind::Unwind);
260 }
261 UnwindAction::Continue => {
262 if is_cleanup {
263 self.fail(location, "`UnwindAction::Continue` in cleanup block");
264 }
265
266 if !self.can_unwind {
267 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
268 }
269 }
270 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
271 if !is_cleanup {
272 self.fail(
273 location,
274 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
275 );
276 }
277 }
278 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
280 }
281 }
282
283 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
284 let Some(target) = target else { return false };
285 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
286 && self.body.basic_blocks.predecessors()[target].len() > 1
287 }
288}
289
290impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
291 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
292 if self.body.local_decls.get(local).is_none() {
293 self.fail(
294 location,
295 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
296 );
297 }
298 }
299
300 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
301 match &statement.kind {
302 StatementKind::AscribeUserType(..) => {
303 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
304 self.fail(
305 location,
306 "`AscribeUserType` should have been removed after drop lowering phase",
307 );
308 }
309 }
310 StatementKind::FakeRead(..) => {
311 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
312 self.fail(
313 location,
314 "`FakeRead` should have been removed after drop lowering phase",
315 );
316 }
317 }
318 StatementKind::SetDiscriminant { .. } => {
319 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
320 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
321 }
322 }
323 StatementKind::Coverage(kind) => {
324 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
325 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
326 {
327 self.fail(
328 location,
329 format!("{kind:?} should have been removed after analysis"),
330 );
331 }
332 }
333 StatementKind::Assign(..)
334 | StatementKind::StorageLive(_)
335 | StatementKind::StorageDead(_)
336 | StatementKind::Intrinsic(_)
337 | StatementKind::ConstEvalCounter
338 | StatementKind::PlaceMention(..)
339 | StatementKind::BackwardIncompatibleDropHint { .. }
340 | StatementKind::Nop => {}
341 }
342
343 self.super_statement(statement, location);
344 }
345
346 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
347 match &terminator.kind {
348 TerminatorKind::Goto { target } => {
349 self.check_edge(location, *target, EdgeKind::Normal);
350 }
351 TerminatorKind::SwitchInt { targets, discr: _ } => {
352 for (_, target) in targets.iter() {
353 self.check_edge(location, target, EdgeKind::Normal);
354 }
355 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
356
357 self.value_cache.clear();
358 self.value_cache.extend(targets.iter().map(|(value, _)| value));
359 let has_duplicates = targets.iter().len() != self.value_cache.len();
360 if has_duplicates {
361 self.fail(
362 location,
363 format!(
364 "duplicated values in `SwitchInt` terminator: {:?}",
365 terminator.kind,
366 ),
367 );
368 }
369 }
370 TerminatorKind::Drop { target, unwind, drop, .. } => {
371 self.check_edge(location, *target, EdgeKind::Normal);
372 self.check_unwind_edge(location, *unwind);
373 if let Some(drop) = drop {
374 self.check_edge(location, *drop, EdgeKind::Normal);
375 }
376 }
377 TerminatorKind::Call { func, args, .. }
378 | TerminatorKind::TailCall { func, args, .. } => {
379 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
381 if let Some(target) = target {
382 self.check_edge(location, target, EdgeKind::Normal);
383 }
384 self.check_unwind_edge(location, unwind);
385
386 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
392 && self.is_critical_call_edge(target, unwind)
393 {
394 self.fail(
395 location,
396 format!(
397 "encountered critical edge in `Call` terminator {:?}",
398 terminator.kind,
399 ),
400 );
401 }
402
403 if most_packed_projection(self.tcx, &self.body.local_decls, destination)
406 .is_some()
407 {
408 self.fail(
410 location,
411 format!(
412 "encountered packed place in `Call` terminator destination: {:?}",
413 terminator.kind,
414 ),
415 );
416 }
417 }
418
419 for arg in args {
420 if let Operand::Move(place) = &arg.node {
421 if most_packed_projection(self.tcx, &self.body.local_decls, *place)
422 .is_some()
423 {
424 self.fail(
426 location,
427 format!(
428 "encountered `Move` of a packed place in `Call` terminator: {:?}",
429 terminator.kind,
430 ),
431 );
432 }
433 }
434 }
435
436 if let ty::FnDef(did, ..) = *func.ty(&self.body.local_decls, self.tcx).kind()
437 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
438 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
439 {
440 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
441 }
442 }
443 TerminatorKind::Assert { target, unwind, .. } => {
444 self.check_edge(location, *target, EdgeKind::Normal);
445 self.check_unwind_edge(location, *unwind);
446 }
447 TerminatorKind::Yield { resume, drop, .. } => {
448 if self.body.coroutine.is_none() {
449 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
450 }
451 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
452 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
453 }
454 self.check_edge(location, *resume, EdgeKind::Normal);
455 if let Some(drop) = drop {
456 self.check_edge(location, *drop, EdgeKind::Normal);
457 }
458 }
459 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
460 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
461 self.fail(
462 location,
463 "`FalseEdge` should have been removed after drop elaboration",
464 );
465 }
466 self.check_edge(location, *real_target, EdgeKind::Normal);
467 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
468 }
469 TerminatorKind::FalseUnwind { real_target, unwind } => {
470 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
471 self.fail(
472 location,
473 "`FalseUnwind` should have been removed after drop elaboration",
474 );
475 }
476 self.check_edge(location, *real_target, EdgeKind::Normal);
477 self.check_unwind_edge(location, *unwind);
478 }
479 TerminatorKind::InlineAsm { targets, unwind, .. } => {
480 for &target in targets {
481 self.check_edge(location, target, EdgeKind::Normal);
482 }
483 self.check_unwind_edge(location, *unwind);
484 }
485 TerminatorKind::CoroutineDrop => {
486 if self.body.coroutine.is_none() {
487 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
488 }
489 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
490 self.fail(
491 location,
492 "`CoroutineDrop` should have been replaced by coroutine lowering",
493 );
494 }
495 }
496 TerminatorKind::UnwindResume => {
497 let bb = location.block;
498 if !self.body.basic_blocks[bb].is_cleanup {
499 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
500 }
501 if !self.can_unwind {
502 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
503 }
504 }
505 TerminatorKind::UnwindTerminate(_) => {
506 let bb = location.block;
507 if !self.body.basic_blocks[bb].is_cleanup {
508 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
509 }
510 }
511 TerminatorKind::Return => {
512 let bb = location.block;
513 if self.body.basic_blocks[bb].is_cleanup {
514 self.fail(location, "Cannot `Return` from cleanup basic block")
515 }
516 }
517 TerminatorKind::Unreachable => {}
518 }
519
520 self.super_terminator(terminator, location);
521 }
522
523 fn visit_source_scope(&mut self, scope: SourceScope) {
524 if self.body.source_scopes.get(scope).is_none() {
525 self.tcx.dcx().span_bug(
526 self.body.span,
527 format!(
528 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
529 self.body.source.instance, self.when, scope,
530 ),
531 );
532 }
533 }
534}
535
536pub(super) fn validate_types<'tcx>(
542 tcx: TyCtxt<'tcx>,
543 typing_env: ty::TypingEnv<'tcx>,
544 body: &Body<'tcx>,
545 caller_body: &Body<'tcx>,
546) -> Vec<(Location, String)> {
547 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
548 with_no_trimmed_paths!({
553 type_checker.visit_body(body);
554 });
555 type_checker.failures
556}
557
558struct TypeChecker<'a, 'tcx> {
559 body: &'a Body<'tcx>,
560 caller_body: &'a Body<'tcx>,
561 tcx: TyCtxt<'tcx>,
562 typing_env: ty::TypingEnv<'tcx>,
563 failures: Vec<(Location, String)>,
564}
565
566impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
567 fn fail(&mut self, location: Location, msg: impl Into<String>) {
568 self.failures.push((location, msg.into()));
569 }
570
571 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
574 if src == dest {
576 return true;
578 }
579
580 if (src, dest).has_opaque_types() {
586 return true;
587 }
588
589 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
592 Variance::Invariant
593 } else {
594 Variance::Covariant
595 };
596
597 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
598 }
599
600 fn predicate_must_hold_modulo_regions(
602 &self,
603 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
604 ) -> bool {
605 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
606
607 if pred.has_opaque_types() {
613 return true;
614 }
615
616 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
617 let ocx = ObligationCtxt::new(&infcx);
618 ocx.register_obligation(Obligation::new(
619 self.tcx,
620 ObligationCause::dummy(),
621 param_env,
622 pred,
623 ));
624 ocx.evaluate_obligations_error_on_ambiguity().is_empty()
625 }
626}
627
628impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
629 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
630 if self.tcx.sess.opts.unstable_opts.validate_mir
632 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
633 {
634 if let Operand::Copy(place) = operand {
636 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
637
638 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
639 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
640 }
641 }
642 }
643
644 self.super_operand(operand, location);
645 }
646
647 fn visit_projection_elem(
648 &mut self,
649 place_ref: PlaceRef<'tcx>,
650 elem: PlaceElem<'tcx>,
651 context: PlaceContext,
652 location: Location,
653 ) {
654 match elem {
655 ProjectionElem::Deref
656 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
657 {
658 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
659
660 if base_ty.is_box() {
661 self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
662 }
663 }
664 ProjectionElem::Field(f, ty) => {
665 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
666 let fail_out_of_bounds = |this: &mut Self, location| {
667 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
668 };
669 let check_equal = |this: &mut Self, location, f_ty| {
670 if !this.mir_assign_valid_types(ty, f_ty) {
671 this.fail(
672 location,
673 format!(
674 "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
675 )
676 )
677 }
678 };
679
680 let kind = match parent_ty.ty.kind() {
681 &ty::Alias(ty::AliasTy { kind: ty::Opaque { def_id }, args, .. }) => {
682 self.tcx.type_of(def_id).instantiate(self.tcx, args).skip_norm_wip().kind()
683 }
684 kind => kind,
685 };
686
687 match kind {
688 ty::Tuple(fields) => {
689 let Some(f_ty) = fields.get(f.as_usize()) else {
690 fail_out_of_bounds(self, location);
691 return;
692 };
693 check_equal(self, location, *f_ty);
694 }
695 ty::Pat(base, _) => check_equal(self, location, *base),
697 ty::Adt(adt_def, args) => {
698 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
700 self.fail(
701 location,
702 format!(
703 "You can't project to field {f:?} of `DynMetadata` because \
704 layout is weird and thinks it doesn't have fields."
705 ),
706 );
707 }
708
709 if adt_def.repr().simd() {
710 self.fail(
711 location,
712 format!(
713 "Projecting into SIMD type {adt_def:?} is banned by MCP#838"
714 ),
715 );
716 }
717
718 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
719 let Some(field) = adt_def.variant(var).fields.get(f) else {
720 fail_out_of_bounds(self, location);
721 return;
722 };
723 check_equal(self, location, field.ty(self.tcx, args));
724 }
725 ty::Closure(_, args) => {
726 let args = args.as_closure();
727 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
728 fail_out_of_bounds(self, location);
729 return;
730 };
731 check_equal(self, location, f_ty);
732 }
733 ty::CoroutineClosure(_, args) => {
734 let args = args.as_coroutine_closure();
735 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
736 fail_out_of_bounds(self, location);
737 return;
738 };
739 check_equal(self, location, f_ty);
740 }
741 &ty::Coroutine(def_id, args) => {
742 let f_ty = if let Some(var) = parent_ty.variant_index {
743 let layout = if def_id == self.caller_body.source.def_id() {
749 self.caller_body
750 .coroutine_layout_raw()
751 .or_else(|| self.tcx.coroutine_layout(def_id, args).ok())
752 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
753 && let ty::ClosureKind::FnOnce =
754 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
755 && self.caller_body.source.def_id()
756 == self.tcx.coroutine_by_move_body_def_id(def_id)
757 {
758 self.caller_body.coroutine_layout_raw()
760 } else {
761 self.tcx.coroutine_layout(def_id, args).ok()
762 };
763
764 let Some(layout) = layout else {
765 self.fail(
766 location,
767 format!("No coroutine layout for {parent_ty:?}"),
768 );
769 return;
770 };
771
772 let Some(&local) = layout.variant_fields[var].get(f) else {
773 fail_out_of_bounds(self, location);
774 return;
775 };
776
777 let Some(f_ty) = layout.field_tys.get(local) else {
778 self.fail(
779 location,
780 format!("Out of bounds local {local:?} for {parent_ty:?}"),
781 );
782 return;
783 };
784
785 ty::EarlyBinder::bind(f_ty.ty)
786 .instantiate(self.tcx, args)
787 .skip_norm_wip()
788 } else {
789 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
790 else {
791 fail_out_of_bounds(self, location);
792 return;
793 };
794
795 f_ty
796 };
797
798 check_equal(self, location, f_ty);
799 }
800 _ => {
801 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
802 }
803 }
804 }
805 ProjectionElem::Index(index) => {
806 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
807 match indexed_ty.kind() {
808 ty::Array(_, _) | ty::Slice(_) => {}
809 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
810 }
811
812 let index_ty = self.body.local_decls[index].ty;
813 if index_ty != self.tcx.types.usize {
814 self.fail(location, format!("bad index ({index_ty} != usize)"))
815 }
816 }
817 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
818 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
819 match indexed_ty.kind() {
820 ty::Array(_, _) => {
821 if from_end {
822 self.fail(location, "arrays should not be indexed from end");
823 }
824 }
825 ty::Slice(_) => {}
826 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
827 }
828
829 if from_end {
830 if offset > min_length {
831 self.fail(
832 location,
833 format!(
834 "constant index with offset -{offset} out of bounds of min length {min_length}"
835 ),
836 );
837 }
838 } else {
839 if offset >= min_length {
840 self.fail(
841 location,
842 format!(
843 "constant index with offset {offset} out of bounds of min length {min_length}"
844 ),
845 );
846 }
847 }
848 }
849 ProjectionElem::Subslice { from, to, from_end } => {
850 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
851 match indexed_ty.kind() {
852 ty::Array(_, _) => {
853 if from_end {
854 self.fail(location, "arrays should not be subsliced from end");
855 }
856 }
857 ty::Slice(_) => {
858 if !from_end {
859 self.fail(location, "slices should be subsliced from end");
860 }
861 }
862 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
863 }
864
865 if !from_end && from > to {
866 self.fail(location, "backwards subslice {from}..{to}");
867 }
868 }
869 ProjectionElem::OpaqueCast(ty)
870 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
871 {
872 self.fail(
873 location,
874 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
875 )
876 }
877 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
878 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
879 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
880 self.fail(
881 location,
882 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
883 );
884 return;
885 };
886 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
887 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
888 self.fail(
889 location,
890 format!(
891 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
892 ),
893 );
894 }
895 }
896 _ => {}
897 }
898 self.super_projection_elem(place_ref, elem, context, location);
899 }
900
901 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
902 if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
903 if ty.is_union() || ty.is_enum() {
904 self.fail(
905 START_BLOCK.start_location(),
906 format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
907 );
908 }
909 if projection.is_empty() {
910 self.fail(
911 START_BLOCK.start_location(),
912 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
913 );
914 }
915 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
916 self.fail(
917 START_BLOCK.start_location(),
918 format!(
919 "illegal projection {:?} in debuginfo for {:?}",
920 projection, debuginfo.name
921 ),
922 );
923 }
924 }
925 match debuginfo.value {
926 VarDebugInfoContents::Const(_) => {}
927 VarDebugInfoContents::Place(place) => {
928 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
929 self.fail(
930 START_BLOCK.start_location(),
931 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
932 );
933 }
934 }
935 }
936 self.super_var_debug_info(debuginfo);
937 }
938
939 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
940 let _ = place.ty(&self.body.local_decls, self.tcx);
942
943 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
944 && place.projection.len() > 1
945 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
946 && place.projection[1..].contains(&ProjectionElem::Deref)
947 {
948 self.fail(
949 location,
950 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
951 );
952 }
953
954 let mut projections_iter = place.projection.iter();
956 while let Some(proj) = projections_iter.next() {
957 if matches!(proj, ProjectionElem::Downcast(..)) {
958 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
959 self.fail(
960 location,
961 format!(
962 "place {place:?} has `Downcast` projection not followed by `Field`"
963 ),
964 );
965 }
966 }
967 }
968
969 if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
970 self.body.local_decls[place.local].local_info
971 && !place.is_indirect_first_projection()
972 {
973 if cntxt != PlaceContext::MutatingUse(MutatingUseContext::Store)
974 || place.as_local().is_none()
975 {
976 self.fail(
977 location,
978 format!("`DerefTemp` locals must only be dereferenced or directly assigned to"),
979 );
980 }
981 }
982
983 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
984 && let Some(i) = place
985 .projection
986 .iter()
987 .position(|elem| matches!(elem, ProjectionElem::Subslice { .. }))
988 && let Some(tail) = place.projection.get(i + 1..)
989 && tail.iter().any(|elem| {
990 matches!(
991 elem,
992 ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. }
993 )
994 })
995 {
996 self.fail(
997 location,
998 format!("place {place:?} has `ConstantIndex` or `Subslice` after `Subslice`"),
999 );
1000 }
1001
1002 self.super_place(place, cntxt, location);
1003 }
1004
1005 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
1006 macro_rules! check_kinds {
1007 ($t:expr, $text:literal, $typat:pat) => {
1008 if !matches!(($t).kind(), $typat) {
1009 self.fail(location, format!($text, $t));
1010 }
1011 };
1012 }
1013 match rvalue {
1014 Rvalue::Use(_, _) => {}
1015 Rvalue::CopyForDeref(_) => {
1016 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1017 self.fail(location, "`CopyForDeref` should have been removed in runtime MIR");
1018 }
1019 }
1020 Rvalue::Aggregate(kind, fields) => match **kind {
1021 AggregateKind::Tuple => {}
1022 AggregateKind::Array(dest) => {
1023 for src in fields {
1024 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1025 self.fail(location, "array field has the wrong type");
1026 }
1027 }
1028 }
1029 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
1030 let adt_def = self.tcx.adt_def(def_id);
1031 assert!(adt_def.is_union());
1032 assert_eq!(idx, FIRST_VARIANT);
1033 let dest_ty = self.tcx.normalize_erasing_regions(
1034 self.typing_env,
1035 Unnormalized::new_wip(
1036 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
1037 ),
1038 );
1039 if let [field] = fields.raw.as_slice() {
1040 let src_ty = field.ty(self.body, self.tcx);
1041 if !self.mir_assign_valid_types(src_ty, dest_ty) {
1042 self.fail(location, "union field has the wrong type");
1043 }
1044 } else {
1045 self.fail(location, "unions should have one initialized field");
1046 }
1047 }
1048 AggregateKind::Adt(def_id, idx, args, _, None) => {
1049 let adt_def = self.tcx.adt_def(def_id);
1050 assert!(!adt_def.is_union());
1051 let variant = &adt_def.variants()[idx];
1052 if variant.fields.len() != fields.len() {
1053 self.fail(location, format!(
1054 "adt {def_id:?} has the wrong number of initialized fields, expected {}, found {}",
1055 fields.len(),
1056 variant.fields.len(),
1057 ));
1058 }
1059 for (src, dest) in std::iter::zip(fields, &variant.fields) {
1060 let dest_ty = self.tcx.normalize_erasing_regions(
1061 self.typing_env,
1062 Unnormalized::new_wip(dest.ty(self.tcx, args)),
1063 );
1064 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
1065 self.fail(location, "adt field has the wrong type");
1066 }
1067 }
1068 }
1069 AggregateKind::Closure(_, args) => {
1070 let upvars = args.as_closure().upvar_tys();
1071 if upvars.len() != fields.len() {
1072 self.fail(location, "closure has the wrong number of initialized fields");
1073 }
1074 for (src, dest) in std::iter::zip(fields, upvars) {
1075 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1076 self.fail(location, "closure field has the wrong type");
1077 }
1078 }
1079 }
1080 AggregateKind::Coroutine(_, args) => {
1081 let upvars = args.as_coroutine().upvar_tys();
1082 if upvars.len() != fields.len() {
1083 self.fail(location, "coroutine has the wrong number of initialized fields");
1084 }
1085 for (src, dest) in std::iter::zip(fields, upvars) {
1086 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1087 self.fail(location, "coroutine field has the wrong type");
1088 }
1089 }
1090 }
1091 AggregateKind::CoroutineClosure(_, args) => {
1092 let upvars = args.as_coroutine_closure().upvar_tys();
1093 if upvars.len() != fields.len() {
1094 self.fail(
1095 location,
1096 "coroutine-closure has the wrong number of initialized fields",
1097 );
1098 }
1099 for (src, dest) in std::iter::zip(fields, upvars) {
1100 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1101 self.fail(location, "coroutine-closure field has the wrong type");
1102 }
1103 }
1104 }
1105 AggregateKind::RawPtr(pointee_ty, mutability) => {
1106 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1107 self.fail(location, "RawPtr should be in runtime MIR only");
1111 }
1112
1113 if let [data_ptr, metadata] = fields.raw.as_slice() {
1114 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1115 let metadata_ty = metadata.ty(self.body, self.tcx);
1116 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1117 if *in_mut != mutability {
1118 self.fail(location, "input and output mutability must match");
1119 }
1120
1121 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1123 self.fail(location, "input pointer must be thin");
1124 }
1125 } else {
1126 self.fail(
1127 location,
1128 "first operand to raw pointer aggregate must be a raw pointer",
1129 );
1130 }
1131
1132 if pointee_ty.is_slice() {
1134 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1135 self.fail(location, "slice metadata must be usize");
1136 }
1137 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1138 if metadata_ty != self.tcx.types.unit {
1139 self.fail(location, "metadata for pointer-to-thin must be unit");
1140 }
1141 }
1142 } else {
1143 self.fail(location, "raw pointer aggregate must have 2 fields");
1144 }
1145 }
1146 },
1147 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1148 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1149 self.fail(
1150 location,
1151 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1152 );
1153 }
1154 }
1155 Rvalue::Ref(..) => {}
1156 Rvalue::BinaryOp(op, vals) => {
1157 use BinOp::*;
1158 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1159 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1160 if crate::util::binop_right_homogeneous(*op) {
1161 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1162 if !self.mir_assign_valid_types(a, b) {
1164 self.fail(
1165 location,
1166 format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1167 );
1168 }
1169 } else if a != b {
1170 self.fail(
1171 location,
1172 format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1173 );
1174 }
1175 }
1176
1177 match op {
1178 Offset => {
1179 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1180 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1181 self.fail(location, format!("Cannot offset by non-isize type {b}"));
1182 }
1183 }
1184 Eq | Lt | Le | Ne | Ge | Gt => {
1185 for x in [a, b] {
1186 check_kinds!(
1187 x,
1188 "Cannot {op:?} compare type {:?}",
1189 ty::Bool
1190 | ty::Char
1191 | ty::Int(..)
1192 | ty::Uint(..)
1193 | ty::Float(..)
1194 | ty::RawPtr(..)
1195 | ty::FnPtr(..)
1196 )
1197 }
1198 }
1199 Cmp => {
1200 for x in [a, b] {
1201 check_kinds!(
1202 x,
1203 "Cannot three-way compare non-integer type {:?}",
1204 ty::Char | ty::Uint(..) | ty::Int(..)
1205 )
1206 }
1207 }
1208 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1209 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1210 for x in [a, b] {
1211 check_kinds!(
1212 x,
1213 "Cannot {op:?} non-integer type {:?}",
1214 ty::Uint(..) | ty::Int(..)
1215 )
1216 }
1217 }
1218 BitAnd | BitOr | BitXor => {
1219 for x in [a, b] {
1220 check_kinds!(
1221 x,
1222 "Cannot perform bitwise op {op:?} on type {:?}",
1223 ty::Uint(..) | ty::Int(..) | ty::Bool
1224 )
1225 }
1226 }
1227 Add | Sub | Mul | Div | Rem => {
1228 for x in [a, b] {
1229 check_kinds!(
1230 x,
1231 "Cannot perform arithmetic {op:?} on type {:?}",
1232 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1233 )
1234 }
1235 }
1236 }
1237 }
1238 Rvalue::UnaryOp(op, operand) => {
1239 let a = operand.ty(&self.body.local_decls, self.tcx);
1240 match op {
1241 UnOp::Neg => {
1242 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1243 }
1244 UnOp::Not => {
1245 check_kinds!(
1246 a,
1247 "Cannot binary not type {:?}",
1248 ty::Int(..) | ty::Uint(..) | ty::Bool
1249 );
1250 }
1251 UnOp::PtrMetadata => {
1252 check_kinds!(
1253 a,
1254 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1255 ty::RawPtr(..) | ty::Ref(..)
1256 );
1257 }
1258 }
1259 }
1260 Rvalue::Cast(kind, operand, target_type) => {
1261 let op_ty = operand.ty(self.body, self.tcx);
1262 match kind {
1263 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1265 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer(_), _) => {
1266 check_kinds!(
1268 op_ty,
1269 "CastKind::{kind:?} input must be a fn item, not {:?}",
1270 ty::FnDef(..)
1271 );
1272 check_kinds!(
1273 target_type,
1274 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1275 ty::FnPtr(..)
1276 );
1277 }
1278 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1279 check_kinds!(
1281 op_ty,
1282 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1283 ty::FnPtr(..)
1284 );
1285 check_kinds!(
1286 target_type,
1287 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1288 ty::FnPtr(..)
1289 );
1290 }
1291 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1292 check_kinds!(
1294 op_ty,
1295 "CastKind::{kind:?} input must be a closure, not {:?}",
1296 ty::Closure(..)
1297 );
1298 check_kinds!(
1299 target_type,
1300 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1301 ty::FnPtr(..)
1302 );
1303 }
1304 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1305 check_kinds!(
1307 op_ty,
1308 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1309 ty::RawPtr(_, Mutability::Mut)
1310 );
1311 check_kinds!(
1312 target_type,
1313 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1314 ty::RawPtr(_, Mutability::Not)
1315 );
1316 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1317 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1318 }
1319 }
1320 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1321 check_kinds!(
1323 op_ty,
1324 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1325 ty::RawPtr(..)
1326 );
1327 check_kinds!(
1328 target_type,
1329 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1330 ty::RawPtr(..)
1331 );
1332 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1333 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1334 }
1335 }
1336 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1337 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1340 self.tcx,
1341 self.tcx.require_lang_item(
1342 LangItem::CoerceUnsized,
1343 self.body.source_info(location).span,
1344 ),
1345 [op_ty, *target_type],
1346 )) {
1347 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1348 }
1349 }
1350 CastKind::IntToInt | CastKind::IntToFloat => {
1351 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1352 let target_valid = target_type.is_numeric() || target_type.is_char();
1353 if !input_valid || !target_valid {
1354 self.fail(
1355 location,
1356 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1357 );
1358 }
1359 }
1360 CastKind::FnPtrToPtr => {
1361 check_kinds!(
1362 op_ty,
1363 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1364 ty::FnPtr(..)
1365 );
1366 check_kinds!(
1367 target_type,
1368 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1369 ty::RawPtr(..)
1370 );
1371 }
1372 CastKind::PtrToPtr => {
1373 check_kinds!(
1374 op_ty,
1375 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1376 ty::RawPtr(..)
1377 );
1378 check_kinds!(
1379 target_type,
1380 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1381 ty::RawPtr(..)
1382 );
1383 }
1384 CastKind::FloatToFloat | CastKind::FloatToInt => {
1385 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1386 self.fail(
1387 location,
1388 format!(
1389 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1390 ),
1391 );
1392 }
1393 }
1394 CastKind::Transmute => {
1395 if !self
1399 .tcx
1400 .normalize_erasing_regions(
1401 self.typing_env,
1402 Unnormalized::new_wip(op_ty),
1403 )
1404 .is_sized(self.tcx, self.typing_env)
1405 {
1406 self.fail(
1407 location,
1408 format!("Cannot transmute from non-`Sized` type {op_ty}"),
1409 );
1410 }
1411 if !self
1412 .tcx
1413 .normalize_erasing_regions(
1414 self.typing_env,
1415 Unnormalized::new_wip(*target_type),
1416 )
1417 .is_sized(self.tcx, self.typing_env)
1418 {
1419 self.fail(
1420 location,
1421 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1422 );
1423 }
1424 }
1425 CastKind::Subtype => {
1426 if !util::sub_types(self.tcx, self.typing_env, op_ty, *target_type) {
1427 self.fail(
1428 location,
1429 format!("Failed subtyping {op_ty} and {target_type}"),
1430 )
1431 }
1432 }
1433 }
1434 }
1435 Rvalue::Repeat(_, _)
1436 | Rvalue::ThreadLocalRef(_)
1437 | Rvalue::RawPtr(_, _)
1438 | Rvalue::Discriminant(_) => {}
1439
1440 Rvalue::WrapUnsafeBinder(op, ty) => {
1441 let unwrapped_ty = op.ty(self.body, self.tcx);
1442 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1443 self.fail(
1444 location,
1445 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1446 );
1447 return;
1448 };
1449 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1450 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1451 self.fail(
1452 location,
1453 format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1454 );
1455 }
1456 }
1457 }
1458 self.super_rvalue(rvalue, location);
1459 }
1460
1461 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1462 match &statement.kind {
1463 StatementKind::Assign(box (dest, rvalue)) => {
1464 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1466 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1467
1468 if !self.mir_assign_valid_types(right_ty, left_ty) {
1469 self.fail(
1470 location,
1471 format!(
1472 "encountered `{:?}` with incompatible types:\n\
1473 left-hand side has type: {}\n\
1474 right-hand side has type: {}",
1475 statement.kind, left_ty, right_ty,
1476 ),
1477 );
1478 }
1479
1480 if let Some(local) = dest.as_local()
1481 && let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
1482 self.body.local_decls[local].local_info
1483 && !matches!(rvalue, Rvalue::CopyForDeref(_))
1484 {
1485 self.fail(location, "assignment to a `DerefTemp` must use `CopyForDeref`")
1486 }
1487 }
1488 StatementKind::AscribeUserType(..) => {
1489 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1490 self.fail(
1491 location,
1492 "`AscribeUserType` should have been removed after drop lowering phase",
1493 );
1494 }
1495 }
1496 StatementKind::FakeRead(..) => {
1497 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1498 self.fail(
1499 location,
1500 "`FakeRead` should have been removed after drop lowering phase",
1501 );
1502 }
1503 }
1504 StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1505 let ty = op.ty(&self.body.local_decls, self.tcx);
1506 if !ty.is_bool() {
1507 self.fail(
1508 location,
1509 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1510 );
1511 }
1512 }
1513 StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1514 CopyNonOverlapping { src, dst, count },
1515 )) => {
1516 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1517 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1518 src_deref
1519 } else {
1520 self.fail(
1521 location,
1522 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1523 );
1524 return;
1525 };
1526 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1527 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1528 dst_deref
1529 } else {
1530 self.fail(
1531 location,
1532 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1533 );
1534 return;
1535 };
1536 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1539 self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1540 }
1541
1542 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1543 if op_cnt_ty != self.tcx.types.usize {
1544 self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1545 }
1546 }
1547 StatementKind::SetDiscriminant { place, .. } => {
1548 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1549 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1550 }
1551 let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1552 if !matches!(
1553 pty.kind(),
1554 ty::Adt(..)
1555 | ty::Coroutine(..)
1556 | ty::Alias(ty::AliasTy { kind: ty::Opaque { .. }, .. })
1557 ) {
1558 self.fail(
1559 location,
1560 format!(
1561 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1562 ),
1563 );
1564 }
1565 }
1566 StatementKind::StorageLive(_)
1567 | StatementKind::StorageDead(_)
1568 | StatementKind::Coverage(_)
1569 | StatementKind::ConstEvalCounter
1570 | StatementKind::PlaceMention(..)
1571 | StatementKind::BackwardIncompatibleDropHint { .. }
1572 | StatementKind::Nop => {}
1573 }
1574
1575 self.super_statement(statement, location);
1576 }
1577
1578 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1579 match &terminator.kind {
1580 TerminatorKind::SwitchInt { targets, discr } => {
1581 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1582
1583 let target_width = self.tcx.sess.target.pointer_width;
1584
1585 let size = Size::from_bits(match switch_ty.kind() {
1586 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1587 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1588 ty::Char => 32,
1589 ty::Bool => 1,
1590 other => bug!("unhandled type: {:?}", other),
1591 });
1592
1593 for (value, _) in targets.iter() {
1594 if ScalarInt::try_from_uint(value, size).is_none() {
1595 self.fail(
1596 location,
1597 format!("the value {value:#x} is not a proper {switch_ty}"),
1598 )
1599 }
1600 }
1601 }
1602 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1603 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1604 match func_ty.kind() {
1605 ty::FnPtr(..) | ty::FnDef(..) => {}
1606 _ => self.fail(
1607 location,
1608 format!(
1609 "encountered non-callable type {func_ty} in `{}` terminator",
1610 terminator.kind.name()
1611 ),
1612 ),
1613 }
1614
1615 if let TerminatorKind::TailCall { .. } = terminator.kind {
1616 }
1619 }
1620 TerminatorKind::Assert { cond, .. } => {
1621 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1622 if cond_ty != self.tcx.types.bool {
1623 self.fail(
1624 location,
1625 format!(
1626 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1627 ),
1628 );
1629 }
1630 }
1631 TerminatorKind::Goto { .. }
1632 | TerminatorKind::Drop { .. }
1633 | TerminatorKind::Yield { .. }
1634 | TerminatorKind::FalseEdge { .. }
1635 | TerminatorKind::FalseUnwind { .. }
1636 | TerminatorKind::InlineAsm { .. }
1637 | TerminatorKind::CoroutineDrop
1638 | TerminatorKind::UnwindResume
1639 | TerminatorKind::UnwindTerminate(_)
1640 | TerminatorKind::Return
1641 | TerminatorKind::Unreachable => {}
1642 }
1643
1644 self.super_terminator(terminator, location);
1645 }
1646
1647 fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
1648 if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) = local_decl.local_info {
1649 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1650 self.fail(
1651 START_BLOCK.start_location(),
1652 "`DerefTemp` should have been removed in runtime MIR",
1653 );
1654 } else if local_decl.ty.builtin_deref(true).is_none() {
1655 self.fail(
1656 START_BLOCK.start_location(),
1657 "`DerefTemp` should only be used for dereferenceable types",
1658 )
1659 }
1660 }
1661
1662 self.super_local_decl(local, local_decl);
1663 }
1664}
1665
1666pub(super) fn validate_debuginfos<'tcx>(body: &Body<'tcx>) -> Vec<(Location, String)> {
1667 let mut debuginfo_checker =
1668 DebuginfoChecker { debuginfo_locals: debuginfo_locals(body), failures: Vec::new() };
1669 debuginfo_checker.visit_body(body);
1670 debuginfo_checker.failures
1671}
1672
1673struct DebuginfoChecker {
1674 debuginfo_locals: DenseBitSet<Local>,
1675 failures: Vec<(Location, String)>,
1676}
1677
1678impl<'tcx> Visitor<'tcx> for DebuginfoChecker {
1679 fn visit_statement_debuginfo(
1680 &mut self,
1681 stmt_debuginfo: &StmtDebugInfo<'tcx>,
1682 location: Location,
1683 ) {
1684 let local = match stmt_debuginfo {
1685 StmtDebugInfo::AssignRef(local, _) | StmtDebugInfo::InvalidAssign(local) => *local,
1686 };
1687 if !self.debuginfo_locals.contains(local) {
1688 self.failures.push((location, format!("{local:?} is not in debuginfo")));
1689 }
1690 }
1691}