1use std::assert_matches;
6use std::cell::RefCell;
7use std::fmt::Formatter;
8
9use rustc_abi::{BackendRepr, FIRST_VARIANT, FieldIdx, Size, VariantIdx};
10use rustc_const_eval::const_eval::{DummyMachine, throw_machine_stop_str};
11use rustc_const_eval::interpret::{
12 ImmTy, Immediate, InterpCx, OpTy, PlaceTy, Projectable, interp_ok,
13};
14use rustc_data_structures::fx::FxHashMap;
15use rustc_hir::def::DefKind;
16use rustc_middle::bug;
17use rustc_middle::mir::interpret::{InterpResult, Scalar};
18use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
19use rustc_middle::mir::*;
20use rustc_middle::ty::{self, Ty, TyCtxt};
21use rustc_mir_dataflow::fmt::DebugWithContext;
22use rustc_mir_dataflow::lattice::{FlatSet, HasBottom};
23use rustc_mir_dataflow::value_analysis::{
24 Map, PlaceCollectionMode, PlaceIndex, State, TrackElem, ValueOrPlace, debug_with_context,
25};
26use rustc_mir_dataflow::{Analysis, ResultsVisitor, visit_reachable_results};
27use rustc_span::DUMMY_SP;
28use tracing::{debug, debug_span, instrument};
29
30const BLOCK_LIMIT: usize = 100;
33const PLACE_LIMIT: usize = 100;
34
35pub(super) struct DataflowConstProp;
36
37impl<'tcx> crate::MirPass<'tcx> for DataflowConstProp {
38 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
39 sess.mir_opt_level() >= 3
40 }
41
42 #[instrument(skip_all level = "debug")]
43 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
44 if body.coroutine.is_some() {
46 return;
47 }
48
49 debug!(def_id = ?body.source.def_id());
50 if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
51 debug!("aborted dataflow const prop due too many basic blocks");
52 return;
53 }
54
55 let value_limit = if tcx.sess.mir_opt_level() < 4 { Some(PLACE_LIMIT) } else { None };
64
65 let map = Map::new(tcx, body, PlaceCollectionMode::Full { value_limit });
67
68 let const_ = debug_span!("analyze")
70 .in_scope(|| ConstAnalysis::new(tcx, body, map).iterate_to_fixpoint(tcx, body, None));
71
72 let mut visitor = Collector::new(tcx, &body.local_decls);
74 debug_span!("collect").in_scope(|| visit_reachable_results(body, &const_, &mut visitor));
75 let mut patch = visitor.patch;
76 debug_span!("patch").in_scope(|| patch.visit_body_preserves_cfg(body));
77 }
78
79 fn is_required(&self) -> bool {
80 false
81 }
82}
83
84struct ConstAnalysis<'a, 'tcx> {
89 map: Map<'tcx>,
90 tcx: TyCtxt<'tcx>,
91 local_decls: &'a LocalDecls<'tcx>,
92 ecx: RefCell<InterpCx<'tcx, DummyMachine>>,
93 typing_env: ty::TypingEnv<'tcx>,
94}
95
96impl<'tcx> Analysis<'tcx> for ConstAnalysis<'_, 'tcx> {
97 type Domain = State<FlatSet<Scalar>>;
98
99 const NAME: &'static str = "ConstAnalysis";
100
101 fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
105 State::Unreachable
106 }
107
108 fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
109 assert_matches!(state, State::Unreachable);
111 *state = State::new_reachable();
112 for arg in body.args_iter() {
113 state.flood(PlaceRef { local: arg, projection: &[] }, &self.map);
114 }
115 }
116
117 fn apply_primary_statement_effect(
118 &self,
119 state: &mut Self::Domain,
120 statement: &Statement<'tcx>,
121 _location: Location,
122 ) {
123 if state.is_reachable() {
124 self.handle_statement(statement, state);
125 }
126 }
127
128 fn apply_primary_terminator_effect<'mir>(
129 &self,
130 state: &mut Self::Domain,
131 terminator: &'mir Terminator<'tcx>,
132 _location: Location,
133 ) -> TerminatorEdges<'mir, 'tcx> {
134 if state.is_reachable() {
135 self.handle_terminator(terminator, state)
136 } else {
137 TerminatorEdges::None
138 }
139 }
140
141 fn apply_call_return_effect(
142 &self,
143 state: &mut Self::Domain,
144 _block: BasicBlock,
145 return_places: CallReturnPlaces<'_, 'tcx>,
146 ) {
147 if state.is_reachable() {
148 self.handle_call_return(return_places, state)
149 }
150 }
151}
152
153impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
154 fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map<'tcx>) -> Self {
155 let typing_env = body.typing_env(tcx);
156 Self {
157 map,
158 tcx,
159 local_decls: &body.local_decls,
160 ecx: RefCell::new(InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine)),
161 typing_env,
162 }
163 }
164
165 fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<FlatSet<Scalar>>) {
166 match &statement.kind {
167 StatementKind::Assign(box (place, rvalue)) => {
168 self.handle_assign(*place, rvalue, state);
169 }
170 StatementKind::SetDiscriminant { box place, variant_index } => {
171 self.handle_set_discriminant(*place, *variant_index, state);
172 }
173 StatementKind::Intrinsic(box intrinsic) => {
174 self.handle_intrinsic(intrinsic);
175 }
176 StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
177 state.flood_with(
180 Place::from(*local).as_ref(),
181 &self.map,
182 FlatSet::<Scalar>::BOTTOM,
183 );
184 }
185 StatementKind::ConstEvalCounter
186 | StatementKind::Nop
187 | StatementKind::FakeRead(..)
188 | StatementKind::PlaceMention(..)
189 | StatementKind::Coverage(..)
190 | StatementKind::BackwardIncompatibleDropHint { .. }
191 | StatementKind::AscribeUserType(..) => {}
192 }
193 }
194
195 fn handle_intrinsic(&self, intrinsic: &NonDivergingIntrinsic<'tcx>) {
196 match intrinsic {
197 NonDivergingIntrinsic::Assume(..) => {
198 }
200 NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping {
201 dst: _,
202 src: _,
203 count: _,
204 }) => {
205 }
207 }
208 }
209
210 fn handle_operand(
211 &self,
212 operand: &Operand<'tcx>,
213 state: &mut State<FlatSet<Scalar>>,
214 ) -> ValueOrPlace<FlatSet<Scalar>> {
215 match operand {
216 Operand::RuntimeChecks(_) => ValueOrPlace::TOP,
217 Operand::Constant(box constant) => {
218 ValueOrPlace::Value(self.handle_constant(constant, state))
219 }
220 Operand::Copy(place) | Operand::Move(place) => {
221 self.map.find(place.as_ref()).map(ValueOrPlace::Place).unwrap_or(ValueOrPlace::TOP)
224 }
225 }
226 }
227
228 fn handle_terminator<'mir>(
231 &self,
232 terminator: &'mir Terminator<'tcx>,
233 state: &mut State<FlatSet<Scalar>>,
234 ) -> TerminatorEdges<'mir, 'tcx> {
235 match &terminator.kind {
236 TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
237 }
239 TerminatorKind::Drop { place, .. } => {
240 state.flood_with(place.as_ref(), &self.map, FlatSet::<Scalar>::BOTTOM);
241 }
242 TerminatorKind::Yield { .. } => {
243 bug!("encountered disallowed terminator");
245 }
246 TerminatorKind::SwitchInt { discr, targets } => {
247 return self.handle_switch_int(discr, targets, state);
248 }
249 TerminatorKind::TailCall { .. } => {
250 }
253 TerminatorKind::Goto { .. }
254 | TerminatorKind::UnwindResume
255 | TerminatorKind::UnwindTerminate(_)
256 | TerminatorKind::Return
257 | TerminatorKind::Unreachable
258 | TerminatorKind::Assert { .. }
259 | TerminatorKind::CoroutineDrop
260 | TerminatorKind::FalseEdge { .. }
261 | TerminatorKind::FalseUnwind { .. } => {
262 }
264 }
265 terminator.edges()
266 }
267
268 fn handle_call_return(
269 &self,
270 return_places: CallReturnPlaces<'_, 'tcx>,
271 state: &mut State<FlatSet<Scalar>>,
272 ) {
273 return_places.for_each(|place| {
274 state.flood(place.as_ref(), &self.map);
275 })
276 }
277
278 fn handle_set_discriminant(
279 &self,
280 place: Place<'tcx>,
281 variant_index: VariantIdx,
282 state: &mut State<FlatSet<Scalar>>,
283 ) {
284 state.flood_discr(place.as_ref(), &self.map);
285 if self.map.find_discr(place.as_ref()).is_some() {
286 let enum_ty = place.ty(self.local_decls, self.tcx).ty;
287 if let Some(discr) = self.eval_discriminant(enum_ty, variant_index) {
288 state.assign_discr(
289 place.as_ref(),
290 ValueOrPlace::Value(FlatSet::Elem(discr)),
291 &self.map,
292 );
293 }
294 }
295 }
296
297 fn handle_assign(
298 &self,
299 target: Place<'tcx>,
300 rvalue: &Rvalue<'tcx>,
301 state: &mut State<FlatSet<Scalar>>,
302 ) {
303 match rvalue {
304 Rvalue::Use(operand, _) => {
305 state.flood(target.as_ref(), &self.map);
306 if let Some(target) = self.map.find(target.as_ref()) {
307 self.assign_operand(state, target, operand);
308 }
309 }
310 Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),
311 Rvalue::Aggregate(kind, operands) => {
312 state.flood(target.as_ref(), &self.map);
315
316 let Some(target_idx) = self.map.find(target.as_ref()) else { return };
317
318 let (variant_target, variant_index) = match **kind {
319 AggregateKind::Tuple | AggregateKind::Closure(..) => (Some(target_idx), None),
320 AggregateKind::Adt(def_id, variant_index, ..) => {
321 match self.tcx.def_kind(def_id) {
322 DefKind::Struct => (Some(target_idx), None),
323 DefKind::Enum => (
324 self.map.apply(target_idx, TrackElem::Variant(variant_index)),
325 Some(variant_index),
326 ),
327 _ => return,
328 }
329 }
330 _ => return,
331 };
332 if let Some(variant_target_idx) = variant_target {
333 for (field_index, operand) in operands.iter_enumerated() {
334 if let Some(field) =
335 self.map.apply(variant_target_idx, TrackElem::Field(field_index))
336 {
337 self.assign_operand(state, field, operand);
338 }
339 }
340 }
341 if let Some(variant_index) = variant_index
342 && let Some(discr_idx) = self.map.apply(target_idx, TrackElem::Discriminant)
343 {
344 let enum_ty = target.ty(self.local_decls, self.tcx).ty;
350 if let Some(discr_val) = self.eval_discriminant(enum_ty, variant_index) {
351 state.insert_value_idx(discr_idx, FlatSet::Elem(discr_val), &self.map);
352 }
353 }
354 }
355 Rvalue::BinaryOp(op, box (left, right)) if op.is_overflowing() => {
356 state.flood(target.as_ref(), &self.map);
358
359 let Some(target) = self.map.find(target.as_ref()) else { return };
360
361 let value_target = self.map.apply(target, TrackElem::Field(0_u32.into()));
362 let overflow_target = self.map.apply(target, TrackElem::Field(1_u32.into()));
363
364 if value_target.is_some() || overflow_target.is_some() {
365 let (val, overflow) = self.binary_op(state, *op, left, right);
366
367 if let Some(value_target) = value_target {
368 state.insert_value_idx(value_target, val, &self.map);
370 }
371 if let Some(overflow_target) = overflow_target {
372 state.insert_value_idx(overflow_target, overflow, &self.map);
374 }
375 }
376 }
377 Rvalue::Cast(
378 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
379 operand,
380 _,
381 ) => {
382 let pointer = self.handle_operand(operand, state);
383 state.assign(target.as_ref(), pointer, &self.map);
384
385 if let Some(target_len) = self.map.find_len(target.as_ref())
386 && let operand_ty = operand.ty(self.local_decls, self.tcx)
387 && let Some(operand_ty) = operand_ty.builtin_deref(true)
388 && let ty::Array(_, len) = operand_ty.kind()
389 && let Some(len) = Const::Ty(self.tcx.types.usize, *len)
390 .try_eval_scalar_int(self.tcx, self.typing_env)
391 {
392 state.insert_value_idx(target_len, FlatSet::Elem(len.into()), &self.map);
393 }
394 }
395 _ => {
396 let result = self.handle_rvalue(rvalue, state);
397 state.assign(target.as_ref(), result, &self.map);
398 }
399 }
400 }
401
402 fn handle_rvalue(
403 &self,
404 rvalue: &Rvalue<'tcx>,
405 state: &mut State<FlatSet<Scalar>>,
406 ) -> ValueOrPlace<FlatSet<Scalar>> {
407 let val = match rvalue {
408 Rvalue::Cast(CastKind::IntToInt | CastKind::IntToFloat, operand, ty) => {
409 let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else {
410 return ValueOrPlace::Value(FlatSet::Top);
411 };
412 match self.eval_operand(operand, state) {
413 FlatSet::Elem(op) => self
414 .ecx
415 .borrow()
416 .int_to_int_or_float(&op, layout)
417 .discard_err()
418 .map_or(FlatSet::Top, |result| self.wrap_immediate(*result)),
419 FlatSet::Bottom => FlatSet::Bottom,
420 FlatSet::Top => FlatSet::Top,
421 }
422 }
423 Rvalue::Cast(CastKind::FloatToInt | CastKind::FloatToFloat, operand, ty) => {
424 let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else {
425 return ValueOrPlace::Value(FlatSet::Top);
426 };
427 match self.eval_operand(operand, state) {
428 FlatSet::Elem(op) => self
429 .ecx
430 .borrow()
431 .float_to_float_or_int(&op, layout)
432 .discard_err()
433 .map_or(FlatSet::Top, |result| self.wrap_immediate(*result)),
434 FlatSet::Bottom => FlatSet::Bottom,
435 FlatSet::Top => FlatSet::Top,
436 }
437 }
438 Rvalue::Cast(CastKind::Transmute | CastKind::Subtype, operand, _) => {
439 match self.eval_operand(operand, state) {
440 FlatSet::Elem(op) => self.wrap_immediate(*op),
441 FlatSet::Bottom => FlatSet::Bottom,
442 FlatSet::Top => FlatSet::Top,
443 }
444 }
445 Rvalue::BinaryOp(op, box (left, right)) if !op.is_overflowing() => {
446 let (val, _overflow) = self.binary_op(state, *op, left, right);
449 val
450 }
451 Rvalue::UnaryOp(op, operand) => {
452 if let UnOp::PtrMetadata = op
453 && let Some(place) = operand.place()
454 && let Some(len) = self.map.find_len(place.as_ref())
455 {
456 return ValueOrPlace::Place(len);
457 }
458 match self.eval_operand(operand, state) {
459 FlatSet::Elem(value) => self
460 .ecx
461 .borrow()
462 .unary_op(*op, &value)
463 .discard_err()
464 .map_or(FlatSet::Top, |val| self.wrap_immediate(*val)),
465 FlatSet::Bottom => FlatSet::Bottom,
466 FlatSet::Top => FlatSet::Top,
467 }
468 }
469 Rvalue::Discriminant(place) => state.get_discr(place.as_ref(), &self.map),
470 Rvalue::Use(operand, _) => return self.handle_operand(operand, state),
471 Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),
472 Rvalue::Ref(..) | Rvalue::RawPtr(..) => {
473 return ValueOrPlace::TOP;
475 }
476 Rvalue::Repeat(..)
477 | Rvalue::ThreadLocalRef(..)
478 | Rvalue::Cast(..)
479 | Rvalue::BinaryOp(..)
480 | Rvalue::Aggregate(..)
481 | Rvalue::WrapUnsafeBinder(..) => {
482 return ValueOrPlace::TOP;
484 }
485 };
486 ValueOrPlace::Value(val)
487 }
488
489 fn handle_constant(
490 &self,
491 constant: &ConstOperand<'tcx>,
492 _state: &mut State<FlatSet<Scalar>>,
493 ) -> FlatSet<Scalar> {
494 constant
495 .const_
496 .try_eval_scalar(self.tcx, self.typing_env)
497 .map_or(FlatSet::Top, FlatSet::Elem)
498 }
499
500 fn handle_switch_int<'mir>(
501 &self,
502 discr: &'mir Operand<'tcx>,
503 targets: &'mir SwitchTargets,
504 state: &mut State<FlatSet<Scalar>>,
505 ) -> TerminatorEdges<'mir, 'tcx> {
506 let value = match self.handle_operand(discr, state) {
507 ValueOrPlace::Value(value) => value,
508 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
509 };
510 match value {
511 FlatSet::Bottom => TerminatorEdges::None,
514 FlatSet::Elem(scalar) => {
515 if let Ok(scalar_int) = scalar.try_to_scalar_int() {
516 TerminatorEdges::Single(
517 targets.target_for_value(scalar_int.to_bits_unchecked()),
518 )
519 } else {
520 TerminatorEdges::SwitchInt { discr, targets }
521 }
522 }
523 FlatSet::Top => TerminatorEdges::SwitchInt { discr, targets },
524 }
525 }
526
527 fn assign_operand(
529 &self,
530 state: &mut State<FlatSet<Scalar>>,
531 place: PlaceIndex,
532 operand: &Operand<'tcx>,
533 ) {
534 match operand {
535 Operand::RuntimeChecks(_) => {}
536 Operand::Copy(rhs) | Operand::Move(rhs) => {
537 if let Some(rhs) = self.map.find(rhs.as_ref()) {
538 state.insert_place_idx(place, rhs, &self.map);
539 } else if rhs.projection.first() == Some(&PlaceElem::Deref)
540 && let FlatSet::Elem(pointer) = state.get(rhs.local.into(), &self.map)
541 && let rhs_ty = self.local_decls[rhs.local].ty
542 && let Ok(rhs_layout) =
543 self.tcx.layout_of(self.typing_env.as_query_input(rhs_ty))
544 {
545 let op = ImmTy::from_scalar(pointer, rhs_layout).into();
546 self.assign_constant(state, place, op, rhs.projection);
547 }
548 }
549 Operand::Constant(box constant) => {
550 if let Some(constant) = self
551 .ecx
552 .borrow()
553 .eval_mir_constant(&constant.const_, constant.span, None)
554 .discard_err()
555 {
556 self.assign_constant(state, place, constant, &[]);
557 }
558 }
559 }
560 }
561
562 #[instrument(level = "trace", skip(self, state))]
566 fn assign_constant(
567 &self,
568 state: &mut State<FlatSet<Scalar>>,
569 place: PlaceIndex,
570 mut operand: OpTy<'tcx>,
571 projection: &[PlaceElem<'tcx>],
572 ) {
573 for &(mut proj_elem) in projection {
574 if let PlaceElem::Index(index) = proj_elem {
575 if let FlatSet::Elem(index) = state.get(index.into(), &self.map)
576 && let Some(offset) = index.to_target_usize(&self.tcx).discard_err()
577 && let Some(min_length) = offset.checked_add(1)
578 {
579 proj_elem = PlaceElem::ConstantIndex { offset, min_length, from_end: false };
580 } else {
581 return;
582 }
583 }
584 operand = if let Some(operand) =
585 self.ecx.borrow().project(&operand, proj_elem).discard_err()
586 {
587 operand
588 } else {
589 return;
590 }
591 }
592
593 self.map.for_each_projection_value(
594 place,
595 operand,
596 &mut |elem, op| match elem {
597 TrackElem::Field(idx) => self.ecx.borrow().project_field(op, idx).discard_err(),
598 TrackElem::Variant(idx) => {
599 self.ecx.borrow().project_downcast(op, idx).discard_err()
600 }
601 TrackElem::Discriminant => {
602 let variant = self.ecx.borrow().read_discriminant(op).discard_err()?;
603 let discr_value = self
604 .ecx
605 .borrow()
606 .discriminant_for_variant(op.layout.ty, variant)
607 .discard_err()?;
608 Some(discr_value.into())
609 }
610 TrackElem::DerefLen => {
611 let op: OpTy<'_> = self.ecx.borrow().deref_pointer(op).discard_err()?.into();
612 let len_usize = op.len(&self.ecx.borrow()).discard_err()?;
613 let layout = self
614 .tcx
615 .layout_of(self.typing_env.as_query_input(self.tcx.types.usize))
616 .unwrap();
617 Some(ImmTy::from_uint(len_usize, layout).into())
618 }
619 },
620 &mut |place, op| {
621 if let Some(imm) = self.ecx.borrow().read_immediate_raw(op).discard_err()
622 && let Some(imm) = imm.right()
623 {
624 let elem = self.wrap_immediate(*imm);
625 state.insert_value_idx(place, elem, &self.map);
626 }
627 },
628 );
629 }
630
631 fn binary_op(
632 &self,
633 state: &mut State<FlatSet<Scalar>>,
634 op: BinOp,
635 left: &Operand<'tcx>,
636 right: &Operand<'tcx>,
637 ) -> (FlatSet<Scalar>, FlatSet<Scalar>) {
638 let left = self.eval_operand(left, state);
639 let right = self.eval_operand(right, state);
640
641 match (left, right) {
642 (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
643 (FlatSet::Elem(left), FlatSet::Elem(right)) => {
645 match self.ecx.borrow().binary_op(op, &left, &right).discard_err() {
646 Some(val) => {
650 if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
651 let (val, overflow) = val.to_scalar_pair();
652 (FlatSet::Elem(val), FlatSet::Elem(overflow))
653 } else {
654 (FlatSet::Elem(val.to_scalar()), FlatSet::Bottom)
655 }
656 }
657 _ => (FlatSet::Top, FlatSet::Top),
658 }
659 }
660 (FlatSet::Elem(const_arg), _) | (_, FlatSet::Elem(const_arg)) => {
662 let layout = const_arg.layout;
663 if !matches!(layout.backend_repr, rustc_abi::BackendRepr::Scalar(..)) {
664 return (FlatSet::Top, FlatSet::Top);
665 }
666
667 let arg_scalar = const_arg.to_scalar();
668 let Some(arg_value) = arg_scalar.to_bits(layout.size).discard_err() else {
669 return (FlatSet::Top, FlatSet::Top);
670 };
671
672 match op {
673 BinOp::BitAnd if arg_value == 0 => (FlatSet::Elem(arg_scalar), FlatSet::Bottom),
674 BinOp::BitOr
675 if arg_value == layout.size.truncate(u128::MAX)
676 || (layout.ty.is_bool() && arg_value == 1) =>
677 {
678 (FlatSet::Elem(arg_scalar), FlatSet::Bottom)
679 }
680 BinOp::Mul if layout.ty.is_integral() && arg_value == 0 => {
681 (FlatSet::Elem(arg_scalar), FlatSet::Elem(Scalar::from_bool(false)))
682 }
683 _ => (FlatSet::Top, FlatSet::Top),
684 }
685 }
686 (FlatSet::Top, FlatSet::Top) => (FlatSet::Top, FlatSet::Top),
687 }
688 }
689
690 fn eval_operand(
691 &self,
692 op: &Operand<'tcx>,
693 state: &mut State<FlatSet<Scalar>>,
694 ) -> FlatSet<ImmTy<'tcx>> {
695 let value = match self.handle_operand(op, state) {
696 ValueOrPlace::Value(value) => value,
697 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
698 };
699 match value {
700 FlatSet::Top => FlatSet::Top,
701 FlatSet::Elem(scalar) => {
702 let ty = op.ty(self.local_decls, self.tcx);
703 self.tcx
704 .layout_of(self.typing_env.as_query_input(ty))
705 .map_or(FlatSet::Top, |layout| {
706 FlatSet::Elem(ImmTy::from_scalar(scalar, layout))
707 })
708 }
709 FlatSet::Bottom => FlatSet::Bottom,
710 }
711 }
712
713 fn eval_discriminant(&self, enum_ty: Ty<'tcx>, variant_index: VariantIdx) -> Option<Scalar> {
714 if !enum_ty.is_enum() {
715 return None;
716 }
717 let enum_ty_layout = self.tcx.layout_of(self.typing_env.as_query_input(enum_ty)).ok()?;
718 let discr_value = self
719 .ecx
720 .borrow()
721 .discriminant_for_variant(enum_ty_layout.ty, variant_index)
722 .discard_err()?;
723 Some(discr_value.to_scalar())
724 }
725
726 fn wrap_immediate(&self, imm: Immediate) -> FlatSet<Scalar> {
727 match imm {
728 Immediate::Scalar(scalar) => FlatSet::Elem(scalar),
729 Immediate::Uninit => FlatSet::Bottom,
730 _ => FlatSet::Top,
731 }
732 }
733}
734
735impl<'tcx> DebugWithContext<ConstAnalysis<'_, 'tcx>> for State<FlatSet<Scalar>> {
737 fn fmt_with(&self, ctxt: &ConstAnalysis<'_, 'tcx>, f: &mut Formatter<'_>) -> std::fmt::Result {
738 match self {
739 State::Reachable(values) => debug_with_context(values, None, &ctxt.map, f),
740 State::Unreachable => write!(f, "unreachable"),
741 }
742 }
743
744 fn fmt_diff_with(
745 &self,
746 old: &Self,
747 ctxt: &ConstAnalysis<'_, 'tcx>,
748 f: &mut Formatter<'_>,
749 ) -> std::fmt::Result {
750 match (self, old) {
751 (State::Reachable(this), State::Reachable(old)) => {
752 debug_with_context(this, Some(old), &ctxt.map, f)
753 }
754 _ => Ok(()), }
756 }
757}
758
759struct Patch<'tcx> {
760 tcx: TyCtxt<'tcx>,
761
762 before_effect: FxHashMap<(Location, Place<'tcx>), Const<'tcx>>,
766
767 assignments: FxHashMap<Location, Const<'tcx>>,
769}
770
771impl<'tcx> Patch<'tcx> {
772 pub(crate) fn new(tcx: TyCtxt<'tcx>) -> Self {
773 Self { tcx, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
774 }
775
776 fn make_operand(&self, const_: Const<'tcx>) -> Operand<'tcx> {
777 Operand::Constant(Box::new(ConstOperand { span: DUMMY_SP, user_ty: None, const_ }))
778 }
779}
780
781struct Collector<'a, 'tcx> {
782 patch: Patch<'tcx>,
783 local_decls: &'a LocalDecls<'tcx>,
784}
785
786impl<'a, 'tcx> Collector<'a, 'tcx> {
787 pub(crate) fn new(tcx: TyCtxt<'tcx>, local_decls: &'a LocalDecls<'tcx>) -> Self {
788 Self { patch: Patch::new(tcx), local_decls }
789 }
790
791 #[instrument(level = "trace", skip(self, ecx, map), ret)]
792 fn try_make_constant(
793 &self,
794 ecx: &mut InterpCx<'tcx, DummyMachine>,
795 place: Place<'tcx>,
796 state: &State<FlatSet<Scalar>>,
797 map: &Map<'tcx>,
798 ) -> Option<Const<'tcx>> {
799 let ty = place.ty(self.local_decls, self.patch.tcx).ty;
800 let layout = ecx.layout_of(ty).ok()?;
801
802 if layout.is_zst() {
803 return Some(Const::zero_sized(ty));
804 }
805
806 if layout.is_unsized() {
807 return None;
808 }
809
810 let place = map.find(place.as_ref())?;
811 if layout.backend_repr.is_scalar()
812 && let Some(value) = propagatable_scalar(place, state, map)
813 {
814 return Some(Const::Val(ConstValue::Scalar(value), ty));
815 }
816
817 if matches!(layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
818 let alloc_id = ecx
819 .intern_with_temp_alloc(layout, |ecx, dest| {
820 try_write_constant(ecx, dest, place, ty, state, map)
821 })
822 .discard_err()?;
823 return Some(Const::Val(ConstValue::Indirect { alloc_id, offset: Size::ZERO }, ty));
824 }
825
826 None
827 }
828}
829
830#[instrument(level = "trace", skip(map), ret)]
831fn propagatable_scalar(
832 place: PlaceIndex,
833 state: &State<FlatSet<Scalar>>,
834 map: &Map<'_>,
835) -> Option<Scalar> {
836 if let FlatSet::Elem(value) = state.get_idx(place, map)
837 && value.try_to_scalar_int().is_ok()
838 {
839 Some(value)
841 } else {
842 None
843 }
844}
845
846#[instrument(level = "trace", skip(ecx, state, map), ret)]
847fn try_write_constant<'tcx>(
848 ecx: &mut InterpCx<'tcx, DummyMachine>,
849 dest: &PlaceTy<'tcx>,
850 place: PlaceIndex,
851 ty: Ty<'tcx>,
852 state: &State<FlatSet<Scalar>>,
853 map: &Map<'tcx>,
854) -> InterpResult<'tcx> {
855 let layout = ecx.layout_of(ty)?;
856
857 if layout.is_zst() {
859 return interp_ok(());
860 }
861
862 if layout.backend_repr.is_scalar()
864 && let Some(value) = propagatable_scalar(place, state, map)
865 {
866 return ecx.write_immediate(Immediate::Scalar(value), dest);
867 }
868
869 match ty.kind() {
870 ty::FnDef(..) => {}
872
873 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char =>
875 throw_machine_stop_str!("primitive type with provenance"),
876
877 ty::Tuple(elem_tys) => {
878 for (i, elem) in elem_tys.iter().enumerate() {
879 let i = FieldIdx::from_usize(i);
880 let Some(field) = map.apply(place, TrackElem::Field(i)) else {
881 throw_machine_stop_str!("missing field in tuple")
882 };
883 let field_dest = ecx.project_field(dest, i)?;
884 try_write_constant(ecx, &field_dest, field, elem, state, map)?;
885 }
886 }
887
888 ty::Adt(def, args) => {
889 if def.is_union() {
890 throw_machine_stop_str!("cannot propagate unions")
891 }
892
893 let (variant_idx, variant_def, variant_place, variant_dest) = if def.is_enum() {
894 let Some(discr) = map.apply(place, TrackElem::Discriminant) else {
895 throw_machine_stop_str!("missing discriminant for enum")
896 };
897 let FlatSet::Elem(Scalar::Int(discr)) = state.get_idx(discr, map) else {
898 throw_machine_stop_str!("discriminant with provenance")
899 };
900 let discr_bits = discr.to_bits(discr.size());
901 let Some((variant, _)) = def.discriminants(*ecx.tcx).find(|(_, var)| discr_bits == var.val) else {
902 throw_machine_stop_str!("illegal discriminant for enum")
903 };
904 let Some(variant_place) = map.apply(place, TrackElem::Variant(variant)) else {
905 throw_machine_stop_str!("missing variant for enum")
906 };
907 let variant_dest = ecx.project_downcast(dest, variant)?;
908 (variant, def.variant(variant), variant_place, variant_dest)
909 } else {
910 (FIRST_VARIANT, def.non_enum_variant(), place, dest.clone())
911 };
912
913 for (i, field) in variant_def.fields.iter_enumerated() {
914 let ty = field.ty(*ecx.tcx, args);
915 let Some(field) = map.apply(variant_place, TrackElem::Field(i)) else {
916 throw_machine_stop_str!("missing field in ADT")
917 };
918 let field_dest = ecx.project_field(&variant_dest, i)?;
919 try_write_constant(ecx, &field_dest, field, ty, state, map)?;
920 }
921 ecx.write_discriminant(variant_idx, dest)?;
922 }
923
924 ty::Array(_, _)
926 | ty::Pat(_, _)
927
928 | ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..) | ty::Str | ty::Slice(_)
930
931 | ty::Never
932 | ty::Foreign(..)
933 | ty::Alias(..)
934 | ty::Param(_)
935 | ty::Bound(..)
936 | ty::Placeholder(..)
937 | ty::Closure(..)
938 | ty::CoroutineClosure(..)
939 | ty::Coroutine(..)
940 | ty::Dynamic(..)
941 | ty::UnsafeBinder(_) => throw_machine_stop_str!("unsupported type"),
942
943 ty::Error(_) | ty::Infer(..) | ty::CoroutineWitness(..) => bug!(),
944 }
945
946 interp_ok(())
947}
948
949impl<'tcx> ResultsVisitor<'tcx, ConstAnalysis<'_, 'tcx>> for Collector<'_, 'tcx> {
950 #[instrument(level = "trace", skip(self, analysis, statement))]
951 fn visit_after_early_statement_effect(
952 &mut self,
953 analysis: &ConstAnalysis<'_, 'tcx>,
954 state: &State<FlatSet<Scalar>>,
955 statement: &Statement<'tcx>,
956 location: Location,
957 ) {
958 match &statement.kind {
959 StatementKind::Assign(box (_, rvalue)) => {
960 OperandCollector {
961 state,
962 visitor: self,
963 ecx: &mut analysis.ecx.borrow_mut(),
964 map: &analysis.map,
965 }
966 .visit_rvalue(rvalue, location);
967 }
968 _ => (),
969 }
970 }
971
972 #[instrument(level = "trace", skip(self, analysis, statement))]
973 fn visit_after_primary_statement_effect(
974 &mut self,
975 analysis: &ConstAnalysis<'_, 'tcx>,
976 state: &State<FlatSet<Scalar>>,
977 statement: &Statement<'tcx>,
978 location: Location,
979 ) {
980 match statement.kind {
981 StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(_), _))) => {
982 }
984 StatementKind::Assign(box (place, _)) => {
985 if let Some(value) = self.try_make_constant(
986 &mut analysis.ecx.borrow_mut(),
987 place,
988 state,
989 &analysis.map,
990 ) {
991 self.patch.assignments.insert(location, value);
992 }
993 }
994 _ => (),
995 }
996 }
997
998 fn visit_after_early_terminator_effect(
999 &mut self,
1000 analysis: &ConstAnalysis<'_, 'tcx>,
1001 state: &State<FlatSet<Scalar>>,
1002 terminator: &Terminator<'tcx>,
1003 location: Location,
1004 ) {
1005 OperandCollector {
1006 state,
1007 visitor: self,
1008 ecx: &mut analysis.ecx.borrow_mut(),
1009 map: &analysis.map,
1010 }
1011 .visit_terminator(terminator, location);
1012 }
1013}
1014
1015impl<'tcx> MutVisitor<'tcx> for Patch<'tcx> {
1016 fn tcx(&self) -> TyCtxt<'tcx> {
1017 self.tcx
1018 }
1019
1020 fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
1021 if let Some(value) = self.assignments.get(&location) {
1022 match &mut statement.kind {
1023 StatementKind::Assign(box (_, rvalue)) => {
1024 let old_retag = match rvalue {
1025 Rvalue::Use(_, retag) => *retag,
1026 _ => WithRetag::Yes,
1027 };
1028 *rvalue = Rvalue::Use(self.make_operand(*value), old_retag);
1029 }
1030 _ => bug!("found assignment info for non-assign statement"),
1031 }
1032 } else {
1033 self.super_statement(statement, location);
1034 }
1035 }
1036
1037 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1038 match operand {
1039 Operand::Copy(place) | Operand::Move(place) => {
1040 if let Some(value) = self.before_effect.get(&(location, *place)) {
1041 *operand = self.make_operand(*value);
1042 } else if !place.projection.is_empty() {
1043 self.super_operand(operand, location)
1044 }
1045 }
1046 Operand::Constant(_) | Operand::RuntimeChecks(_) => {}
1047 }
1048 }
1049
1050 fn process_projection_elem(
1051 &mut self,
1052 elem: PlaceElem<'tcx>,
1053 location: Location,
1054 ) -> Option<PlaceElem<'tcx>> {
1055 if let PlaceElem::Index(local) = elem {
1056 let offset = self.before_effect.get(&(location, local.into()))?;
1057 let offset = offset.try_to_scalar()?;
1058 let offset = offset.to_target_usize(&self.tcx).discard_err()?;
1059 let min_length = offset.checked_add(1)?;
1060 Some(PlaceElem::ConstantIndex { offset, min_length, from_end: false })
1061 } else {
1062 None
1063 }
1064 }
1065}
1066
1067struct OperandCollector<'a, 'b, 'tcx> {
1068 state: &'a State<FlatSet<Scalar>>,
1069 visitor: &'a mut Collector<'b, 'tcx>,
1070 ecx: &'a mut InterpCx<'tcx, DummyMachine>,
1071 map: &'a Map<'tcx>,
1072}
1073
1074impl<'tcx> Visitor<'tcx> for OperandCollector<'_, '_, 'tcx> {
1075 fn visit_projection_elem(
1076 &mut self,
1077 _: PlaceRef<'tcx>,
1078 elem: PlaceElem<'tcx>,
1079 _: PlaceContext,
1080 location: Location,
1081 ) {
1082 if let PlaceElem::Index(local) = elem
1083 && let Some(value) =
1084 self.visitor.try_make_constant(self.ecx, local.into(), self.state, self.map)
1085 {
1086 self.visitor.patch.before_effect.insert((location, local.into()), value);
1087 }
1088 }
1089
1090 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
1091 if let Some(place) = operand.place() {
1092 if let Some(value) =
1093 self.visitor.try_make_constant(self.ecx, place, self.state, self.map)
1094 {
1095 self.visitor.patch.before_effect.insert((location, place), value);
1096 } else if !place.projection.is_empty() {
1097 self.super_operand(operand, location)
1099 }
1100 }
1101 }
1102}