1use std::assert_matches;
6use std::cell::RefCell;
7use std::fmt::Formatter;
8
9use rustc_abi::{BackendRepr, FIRST_VARIANT, FieldIdx, Size, VariantIdx};
10use rustc_const_eval::const_eval::{DummyMachine, throw_machine_stop_str};
11use rustc_const_eval::interpret::{
12 ImmTy, Immediate, InterpCx, OpTy, PlaceTy, Projectable, interp_ok,
13};
14use rustc_data_structures::fx::FxHashMap;
15use rustc_hir::def::DefKind;
16use rustc_middle::bug;
17use rustc_middle::mir::interpret::{InterpResult, Scalar};
18use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
19use rustc_middle::mir::*;
20use rustc_middle::ty::{self, Ty, TyCtxt};
21use rustc_mir_dataflow::fmt::DebugWithContext;
22use rustc_mir_dataflow::lattice::{FlatSet, HasBottom};
23use rustc_mir_dataflow::value_analysis::{
24 Map, PlaceCollectionMode, PlaceIndex, State, TrackElem, ValueOrPlace, debug_with_context,
25};
26use rustc_mir_dataflow::{Analysis, ResultsVisitor, visit_reachable_results};
27use rustc_span::DUMMY_SP;
28use tracing::{debug, debug_span, instrument};
29
30const BLOCK_LIMIT: usize = 100;
33const PLACE_LIMIT: usize = 100;
34
35pub(super) struct DataflowConstProp;
36
37impl<'tcx> crate::MirPass<'tcx> for DataflowConstProp {
38 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
39 sess.mir_opt_level() >= 3
40 }
41
42 #[instrument(skip_all level = "debug")]
43 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
44 if body.coroutine.is_some() {
46 return;
47 }
48
49 debug!(def_id = ?body.source.def_id());
50 if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
51 debug!("aborted dataflow const prop due too many basic blocks");
52 return;
53 }
54
55 let value_limit = if tcx.sess.mir_opt_level() < 4 { Some(PLACE_LIMIT) } else { None };
64
65 let map = Map::new(tcx, body, PlaceCollectionMode::Full { value_limit });
67
68 let const_ = debug_span!("analyze")
70 .in_scope(|| ConstAnalysis::new(tcx, body, map).iterate_to_fixpoint(tcx, body, None));
71
72 let mut visitor = Collector::new(tcx, &body.local_decls);
74 debug_span!("collect").in_scope(|| visit_reachable_results(body, &const_, &mut visitor));
75 let mut patch = visitor.patch;
76 debug_span!("patch").in_scope(|| patch.visit_body_preserves_cfg(body));
77 }
78
79 fn is_required(&self) -> bool {
80 false
81 }
82}
83
84struct ConstAnalysis<'a, 'tcx> {
89 map: Map<'tcx>,
90 tcx: TyCtxt<'tcx>,
91 local_decls: &'a LocalDecls<'tcx>,
92 ecx: RefCell<InterpCx<'tcx, DummyMachine>>,
93 typing_env: ty::TypingEnv<'tcx>,
94}
95
96impl<'tcx> Analysis<'tcx> for ConstAnalysis<'_, 'tcx> {
97 type Domain = State<FlatSet<Scalar>>;
98
99 const NAME: &'static str = "ConstAnalysis";
100
101 fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
105 State::Unreachable
106 }
107
108 fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
109 assert_matches!(state, State::Unreachable);
111 *state = State::new_reachable();
112 for arg in body.args_iter() {
113 state.flood(PlaceRef { local: arg, projection: &[] }, &self.map);
114 }
115 }
116
117 fn apply_primary_statement_effect(
118 &self,
119 state: &mut Self::Domain,
120 statement: &Statement<'tcx>,
121 _location: Location,
122 ) {
123 if state.is_reachable() {
124 self.handle_statement(statement, state);
125 }
126 }
127
128 fn apply_primary_terminator_effect<'mir>(
129 &self,
130 state: &mut Self::Domain,
131 terminator: &'mir Terminator<'tcx>,
132 _location: Location,
133 ) -> TerminatorEdges<'mir, 'tcx> {
134 if state.is_reachable() {
135 self.handle_terminator(terminator, state)
136 } else {
137 TerminatorEdges::None
138 }
139 }
140
141 fn apply_call_return_effect(
142 &self,
143 state: &mut Self::Domain,
144 _block: BasicBlock,
145 return_places: CallReturnPlaces<'_, 'tcx>,
146 ) {
147 if state.is_reachable() {
148 self.handle_call_return(return_places, state)
149 }
150 }
151}
152
153impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
154 fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map<'tcx>) -> Self {
155 let typing_env = body.typing_env(tcx);
156 Self {
157 map,
158 tcx,
159 local_decls: &body.local_decls,
160 ecx: RefCell::new(InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine)),
161 typing_env,
162 }
163 }
164
165 fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<FlatSet<Scalar>>) {
166 match &statement.kind {
167 StatementKind::Assign(box (place, rvalue)) => {
168 self.handle_assign(*place, rvalue, state);
169 }
170 StatementKind::SetDiscriminant { box place, variant_index } => {
171 self.handle_set_discriminant(*place, *variant_index, state);
172 }
173 StatementKind::Intrinsic(box intrinsic) => {
174 self.handle_intrinsic(intrinsic);
175 }
176 StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
177 state.flood_with(
180 Place::from(*local).as_ref(),
181 &self.map,
182 FlatSet::<Scalar>::BOTTOM,
183 );
184 }
185 StatementKind::Retag(..) => {
186 }
188 StatementKind::ConstEvalCounter
189 | StatementKind::Nop
190 | StatementKind::FakeRead(..)
191 | StatementKind::PlaceMention(..)
192 | StatementKind::Coverage(..)
193 | StatementKind::BackwardIncompatibleDropHint { .. }
194 | StatementKind::AscribeUserType(..) => {}
195 }
196 }
197
198 fn handle_intrinsic(&self, intrinsic: &NonDivergingIntrinsic<'tcx>) {
199 match intrinsic {
200 NonDivergingIntrinsic::Assume(..) => {
201 }
203 NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping {
204 dst: _,
205 src: _,
206 count: _,
207 }) => {
208 }
210 }
211 }
212
213 fn handle_operand(
214 &self,
215 operand: &Operand<'tcx>,
216 state: &mut State<FlatSet<Scalar>>,
217 ) -> ValueOrPlace<FlatSet<Scalar>> {
218 match operand {
219 Operand::RuntimeChecks(_) => ValueOrPlace::TOP,
220 Operand::Constant(box constant) => {
221 ValueOrPlace::Value(self.handle_constant(constant, state))
222 }
223 Operand::Copy(place) | Operand::Move(place) => {
224 self.map.find(place.as_ref()).map(ValueOrPlace::Place).unwrap_or(ValueOrPlace::TOP)
227 }
228 }
229 }
230
231 fn handle_terminator<'mir>(
234 &self,
235 terminator: &'mir Terminator<'tcx>,
236 state: &mut State<FlatSet<Scalar>>,
237 ) -> TerminatorEdges<'mir, 'tcx> {
238 match &terminator.kind {
239 TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
240 }
242 TerminatorKind::Drop { place, .. } => {
243 state.flood_with(place.as_ref(), &self.map, FlatSet::<Scalar>::BOTTOM);
244 }
245 TerminatorKind::Yield { .. } => {
246 bug!("encountered disallowed terminator");
248 }
249 TerminatorKind::SwitchInt { discr, targets } => {
250 return self.handle_switch_int(discr, targets, state);
251 }
252 TerminatorKind::TailCall { .. } => {
253 }
256 TerminatorKind::Goto { .. }
257 | TerminatorKind::UnwindResume
258 | TerminatorKind::UnwindTerminate(_)
259 | TerminatorKind::Return
260 | TerminatorKind::Unreachable
261 | TerminatorKind::Assert { .. }
262 | TerminatorKind::CoroutineDrop
263 | TerminatorKind::FalseEdge { .. }
264 | TerminatorKind::FalseUnwind { .. } => {
265 }
267 }
268 terminator.edges()
269 }
270
271 fn handle_call_return(
272 &self,
273 return_places: CallReturnPlaces<'_, 'tcx>,
274 state: &mut State<FlatSet<Scalar>>,
275 ) {
276 return_places.for_each(|place| {
277 state.flood(place.as_ref(), &self.map);
278 })
279 }
280
281 fn handle_set_discriminant(
282 &self,
283 place: Place<'tcx>,
284 variant_index: VariantIdx,
285 state: &mut State<FlatSet<Scalar>>,
286 ) {
287 state.flood_discr(place.as_ref(), &self.map);
288 if self.map.find_discr(place.as_ref()).is_some() {
289 let enum_ty = place.ty(self.local_decls, self.tcx).ty;
290 if let Some(discr) = self.eval_discriminant(enum_ty, variant_index) {
291 state.assign_discr(
292 place.as_ref(),
293 ValueOrPlace::Value(FlatSet::Elem(discr)),
294 &self.map,
295 );
296 }
297 }
298 }
299
300 fn handle_assign(
301 &self,
302 target: Place<'tcx>,
303 rvalue: &Rvalue<'tcx>,
304 state: &mut State<FlatSet<Scalar>>,
305 ) {
306 match rvalue {
307 Rvalue::Use(operand) => {
308 state.flood(target.as_ref(), &self.map);
309 if let Some(target) = self.map.find(target.as_ref()) {
310 self.assign_operand(state, target, operand);
311 }
312 }
313 Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),
314 Rvalue::Aggregate(kind, operands) => {
315 state.flood(target.as_ref(), &self.map);
318
319 let Some(target_idx) = self.map.find(target.as_ref()) else { return };
320
321 let (variant_target, variant_index) = match **kind {
322 AggregateKind::Tuple | AggregateKind::Closure(..) => (Some(target_idx), None),
323 AggregateKind::Adt(def_id, variant_index, ..) => {
324 match self.tcx.def_kind(def_id) {
325 DefKind::Struct => (Some(target_idx), None),
326 DefKind::Enum => (
327 self.map.apply(target_idx, TrackElem::Variant(variant_index)),
328 Some(variant_index),
329 ),
330 _ => return,
331 }
332 }
333 _ => return,
334 };
335 if let Some(variant_target_idx) = variant_target {
336 for (field_index, operand) in operands.iter_enumerated() {
337 if let Some(field) =
338 self.map.apply(variant_target_idx, TrackElem::Field(field_index))
339 {
340 self.assign_operand(state, field, operand);
341 }
342 }
343 }
344 if let Some(variant_index) = variant_index
345 && let Some(discr_idx) = self.map.apply(target_idx, TrackElem::Discriminant)
346 {
347 let enum_ty = target.ty(self.local_decls, self.tcx).ty;
353 if let Some(discr_val) = self.eval_discriminant(enum_ty, variant_index) {
354 state.insert_value_idx(discr_idx, FlatSet::Elem(discr_val), &self.map);
355 }
356 }
357 }
358 Rvalue::BinaryOp(op, box (left, right)) if op.is_overflowing() => {
359 state.flood(target.as_ref(), &self.map);
361
362 let Some(target) = self.map.find(target.as_ref()) else { return };
363
364 let value_target = self.map.apply(target, TrackElem::Field(0_u32.into()));
365 let overflow_target = self.map.apply(target, TrackElem::Field(1_u32.into()));
366
367 if value_target.is_some() || overflow_target.is_some() {
368 let (val, overflow) = self.binary_op(state, *op, left, right);
369
370 if let Some(value_target) = value_target {
371 state.insert_value_idx(value_target, val, &self.map);
373 }
374 if let Some(overflow_target) = overflow_target {
375 state.insert_value_idx(overflow_target, overflow, &self.map);
377 }
378 }
379 }
380 Rvalue::Cast(
381 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
382 operand,
383 _,
384 ) => {
385 let pointer = self.handle_operand(operand, state);
386 state.assign(target.as_ref(), pointer, &self.map);
387
388 if let Some(target_len) = self.map.find_len(target.as_ref())
389 && let operand_ty = operand.ty(self.local_decls, self.tcx)
390 && let Some(operand_ty) = operand_ty.builtin_deref(true)
391 && let ty::Array(_, len) = operand_ty.kind()
392 && let Some(len) = Const::Ty(self.tcx.types.usize, *len)
393 .try_eval_scalar_int(self.tcx, self.typing_env)
394 {
395 state.insert_value_idx(target_len, FlatSet::Elem(len.into()), &self.map);
396 }
397 }
398 _ => {
399 let result = self.handle_rvalue(rvalue, state);
400 state.assign(target.as_ref(), result, &self.map);
401 }
402 }
403 }
404
405 fn handle_rvalue(
406 &self,
407 rvalue: &Rvalue<'tcx>,
408 state: &mut State<FlatSet<Scalar>>,
409 ) -> ValueOrPlace<FlatSet<Scalar>> {
410 let val = match rvalue {
411 Rvalue::Cast(CastKind::IntToInt | CastKind::IntToFloat, operand, ty) => {
412 let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else {
413 return ValueOrPlace::Value(FlatSet::Top);
414 };
415 match self.eval_operand(operand, state) {
416 FlatSet::Elem(op) => self
417 .ecx
418 .borrow()
419 .int_to_int_or_float(&op, layout)
420 .discard_err()
421 .map_or(FlatSet::Top, |result| self.wrap_immediate(*result)),
422 FlatSet::Bottom => FlatSet::Bottom,
423 FlatSet::Top => FlatSet::Top,
424 }
425 }
426 Rvalue::Cast(CastKind::FloatToInt | CastKind::FloatToFloat, operand, ty) => {
427 let Ok(layout) = self.tcx.layout_of(self.typing_env.as_query_input(*ty)) else {
428 return ValueOrPlace::Value(FlatSet::Top);
429 };
430 match self.eval_operand(operand, state) {
431 FlatSet::Elem(op) => self
432 .ecx
433 .borrow()
434 .float_to_float_or_int(&op, layout)
435 .discard_err()
436 .map_or(FlatSet::Top, |result| self.wrap_immediate(*result)),
437 FlatSet::Bottom => FlatSet::Bottom,
438 FlatSet::Top => FlatSet::Top,
439 }
440 }
441 Rvalue::Cast(CastKind::Transmute | CastKind::Subtype, operand, _) => {
442 match self.eval_operand(operand, state) {
443 FlatSet::Elem(op) => self.wrap_immediate(*op),
444 FlatSet::Bottom => FlatSet::Bottom,
445 FlatSet::Top => FlatSet::Top,
446 }
447 }
448 Rvalue::BinaryOp(op, box (left, right)) if !op.is_overflowing() => {
449 let (val, _overflow) = self.binary_op(state, *op, left, right);
452 val
453 }
454 Rvalue::UnaryOp(op, operand) => {
455 if let UnOp::PtrMetadata = op
456 && let Some(place) = operand.place()
457 && let Some(len) = self.map.find_len(place.as_ref())
458 {
459 return ValueOrPlace::Place(len);
460 }
461 match self.eval_operand(operand, state) {
462 FlatSet::Elem(value) => self
463 .ecx
464 .borrow()
465 .unary_op(*op, &value)
466 .discard_err()
467 .map_or(FlatSet::Top, |val| self.wrap_immediate(*val)),
468 FlatSet::Bottom => FlatSet::Bottom,
469 FlatSet::Top => FlatSet::Top,
470 }
471 }
472 Rvalue::Discriminant(place) => state.get_discr(place.as_ref(), &self.map),
473 Rvalue::Use(operand) => return self.handle_operand(operand, state),
474 Rvalue::CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),
475 Rvalue::Ref(..) | Rvalue::RawPtr(..) => {
476 return ValueOrPlace::TOP;
478 }
479 Rvalue::Repeat(..)
480 | Rvalue::ThreadLocalRef(..)
481 | Rvalue::Cast(..)
482 | Rvalue::BinaryOp(..)
483 | Rvalue::Aggregate(..)
484 | Rvalue::WrapUnsafeBinder(..) => {
485 return ValueOrPlace::TOP;
487 }
488 };
489 ValueOrPlace::Value(val)
490 }
491
492 fn handle_constant(
493 &self,
494 constant: &ConstOperand<'tcx>,
495 _state: &mut State<FlatSet<Scalar>>,
496 ) -> FlatSet<Scalar> {
497 constant
498 .const_
499 .try_eval_scalar(self.tcx, self.typing_env)
500 .map_or(FlatSet::Top, FlatSet::Elem)
501 }
502
503 fn handle_switch_int<'mir>(
504 &self,
505 discr: &'mir Operand<'tcx>,
506 targets: &'mir SwitchTargets,
507 state: &mut State<FlatSet<Scalar>>,
508 ) -> TerminatorEdges<'mir, 'tcx> {
509 let value = match self.handle_operand(discr, state) {
510 ValueOrPlace::Value(value) => value,
511 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
512 };
513 match value {
514 FlatSet::Bottom => TerminatorEdges::None,
517 FlatSet::Elem(scalar) => {
518 if let Ok(scalar_int) = scalar.try_to_scalar_int() {
519 TerminatorEdges::Single(
520 targets.target_for_value(scalar_int.to_bits_unchecked()),
521 )
522 } else {
523 TerminatorEdges::SwitchInt { discr, targets }
524 }
525 }
526 FlatSet::Top => TerminatorEdges::SwitchInt { discr, targets },
527 }
528 }
529
530 fn assign_operand(
532 &self,
533 state: &mut State<FlatSet<Scalar>>,
534 place: PlaceIndex,
535 operand: &Operand<'tcx>,
536 ) {
537 match operand {
538 Operand::RuntimeChecks(_) => {}
539 Operand::Copy(rhs) | Operand::Move(rhs) => {
540 if let Some(rhs) = self.map.find(rhs.as_ref()) {
541 state.insert_place_idx(place, rhs, &self.map);
542 } else if rhs.projection.first() == Some(&PlaceElem::Deref)
543 && let FlatSet::Elem(pointer) = state.get(rhs.local.into(), &self.map)
544 && let rhs_ty = self.local_decls[rhs.local].ty
545 && let Ok(rhs_layout) =
546 self.tcx.layout_of(self.typing_env.as_query_input(rhs_ty))
547 {
548 let op = ImmTy::from_scalar(pointer, rhs_layout).into();
549 self.assign_constant(state, place, op, rhs.projection);
550 }
551 }
552 Operand::Constant(box constant) => {
553 if let Some(constant) = self
554 .ecx
555 .borrow()
556 .eval_mir_constant(&constant.const_, constant.span, None)
557 .discard_err()
558 {
559 self.assign_constant(state, place, constant, &[]);
560 }
561 }
562 }
563 }
564
565 #[instrument(level = "trace", skip(self, state))]
569 fn assign_constant(
570 &self,
571 state: &mut State<FlatSet<Scalar>>,
572 place: PlaceIndex,
573 mut operand: OpTy<'tcx>,
574 projection: &[PlaceElem<'tcx>],
575 ) {
576 for &(mut proj_elem) in projection {
577 if let PlaceElem::Index(index) = proj_elem {
578 if let FlatSet::Elem(index) = state.get(index.into(), &self.map)
579 && let Some(offset) = index.to_target_usize(&self.tcx).discard_err()
580 && let Some(min_length) = offset.checked_add(1)
581 {
582 proj_elem = PlaceElem::ConstantIndex { offset, min_length, from_end: false };
583 } else {
584 return;
585 }
586 }
587 operand = if let Some(operand) =
588 self.ecx.borrow().project(&operand, proj_elem).discard_err()
589 {
590 operand
591 } else {
592 return;
593 }
594 }
595
596 self.map.for_each_projection_value(
597 place,
598 operand,
599 &mut |elem, op| match elem {
600 TrackElem::Field(idx) => self.ecx.borrow().project_field(op, idx).discard_err(),
601 TrackElem::Variant(idx) => {
602 self.ecx.borrow().project_downcast(op, idx).discard_err()
603 }
604 TrackElem::Discriminant => {
605 let variant = self.ecx.borrow().read_discriminant(op).discard_err()?;
606 let discr_value = self
607 .ecx
608 .borrow()
609 .discriminant_for_variant(op.layout.ty, variant)
610 .discard_err()?;
611 Some(discr_value.into())
612 }
613 TrackElem::DerefLen => {
614 let op: OpTy<'_> = self.ecx.borrow().deref_pointer(op).discard_err()?.into();
615 let len_usize = op.len(&self.ecx.borrow()).discard_err()?;
616 let layout = self
617 .tcx
618 .layout_of(self.typing_env.as_query_input(self.tcx.types.usize))
619 .unwrap();
620 Some(ImmTy::from_uint(len_usize, layout).into())
621 }
622 },
623 &mut |place, op| {
624 if let Some(imm) = self.ecx.borrow().read_immediate_raw(op).discard_err()
625 && let Some(imm) = imm.right()
626 {
627 let elem = self.wrap_immediate(*imm);
628 state.insert_value_idx(place, elem, &self.map);
629 }
630 },
631 );
632 }
633
634 fn binary_op(
635 &self,
636 state: &mut State<FlatSet<Scalar>>,
637 op: BinOp,
638 left: &Operand<'tcx>,
639 right: &Operand<'tcx>,
640 ) -> (FlatSet<Scalar>, FlatSet<Scalar>) {
641 let left = self.eval_operand(left, state);
642 let right = self.eval_operand(right, state);
643
644 match (left, right) {
645 (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
646 (FlatSet::Elem(left), FlatSet::Elem(right)) => {
648 match self.ecx.borrow().binary_op(op, &left, &right).discard_err() {
649 Some(val) => {
653 if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
654 let (val, overflow) = val.to_scalar_pair();
655 (FlatSet::Elem(val), FlatSet::Elem(overflow))
656 } else {
657 (FlatSet::Elem(val.to_scalar()), FlatSet::Bottom)
658 }
659 }
660 _ => (FlatSet::Top, FlatSet::Top),
661 }
662 }
663 (FlatSet::Elem(const_arg), _) | (_, FlatSet::Elem(const_arg)) => {
665 let layout = const_arg.layout;
666 if !matches!(layout.backend_repr, rustc_abi::BackendRepr::Scalar(..)) {
667 return (FlatSet::Top, FlatSet::Top);
668 }
669
670 let arg_scalar = const_arg.to_scalar();
671 let Some(arg_value) = arg_scalar.to_bits(layout.size).discard_err() else {
672 return (FlatSet::Top, FlatSet::Top);
673 };
674
675 match op {
676 BinOp::BitAnd if arg_value == 0 => (FlatSet::Elem(arg_scalar), FlatSet::Bottom),
677 BinOp::BitOr
678 if arg_value == layout.size.truncate(u128::MAX)
679 || (layout.ty.is_bool() && arg_value == 1) =>
680 {
681 (FlatSet::Elem(arg_scalar), FlatSet::Bottom)
682 }
683 BinOp::Mul if layout.ty.is_integral() && arg_value == 0 => {
684 (FlatSet::Elem(arg_scalar), FlatSet::Elem(Scalar::from_bool(false)))
685 }
686 _ => (FlatSet::Top, FlatSet::Top),
687 }
688 }
689 (FlatSet::Top, FlatSet::Top) => (FlatSet::Top, FlatSet::Top),
690 }
691 }
692
693 fn eval_operand(
694 &self,
695 op: &Operand<'tcx>,
696 state: &mut State<FlatSet<Scalar>>,
697 ) -> FlatSet<ImmTy<'tcx>> {
698 let value = match self.handle_operand(op, state) {
699 ValueOrPlace::Value(value) => value,
700 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
701 };
702 match value {
703 FlatSet::Top => FlatSet::Top,
704 FlatSet::Elem(scalar) => {
705 let ty = op.ty(self.local_decls, self.tcx);
706 self.tcx
707 .layout_of(self.typing_env.as_query_input(ty))
708 .map_or(FlatSet::Top, |layout| {
709 FlatSet::Elem(ImmTy::from_scalar(scalar, layout))
710 })
711 }
712 FlatSet::Bottom => FlatSet::Bottom,
713 }
714 }
715
716 fn eval_discriminant(&self, enum_ty: Ty<'tcx>, variant_index: VariantIdx) -> Option<Scalar> {
717 if !enum_ty.is_enum() {
718 return None;
719 }
720 let enum_ty_layout = self.tcx.layout_of(self.typing_env.as_query_input(enum_ty)).ok()?;
721 let discr_value = self
722 .ecx
723 .borrow()
724 .discriminant_for_variant(enum_ty_layout.ty, variant_index)
725 .discard_err()?;
726 Some(discr_value.to_scalar())
727 }
728
729 fn wrap_immediate(&self, imm: Immediate) -> FlatSet<Scalar> {
730 match imm {
731 Immediate::Scalar(scalar) => FlatSet::Elem(scalar),
732 Immediate::Uninit => FlatSet::Bottom,
733 _ => FlatSet::Top,
734 }
735 }
736}
737
738impl<'tcx> DebugWithContext<ConstAnalysis<'_, 'tcx>> for State<FlatSet<Scalar>> {
740 fn fmt_with(&self, ctxt: &ConstAnalysis<'_, 'tcx>, f: &mut Formatter<'_>) -> std::fmt::Result {
741 match self {
742 State::Reachable(values) => debug_with_context(values, None, &ctxt.map, f),
743 State::Unreachable => write!(f, "unreachable"),
744 }
745 }
746
747 fn fmt_diff_with(
748 &self,
749 old: &Self,
750 ctxt: &ConstAnalysis<'_, 'tcx>,
751 f: &mut Formatter<'_>,
752 ) -> std::fmt::Result {
753 match (self, old) {
754 (State::Reachable(this), State::Reachable(old)) => {
755 debug_with_context(this, Some(old), &ctxt.map, f)
756 }
757 _ => Ok(()), }
759 }
760}
761
762struct Patch<'tcx> {
763 tcx: TyCtxt<'tcx>,
764
765 before_effect: FxHashMap<(Location, Place<'tcx>), Const<'tcx>>,
769
770 assignments: FxHashMap<Location, Const<'tcx>>,
772}
773
774impl<'tcx> Patch<'tcx> {
775 pub(crate) fn new(tcx: TyCtxt<'tcx>) -> Self {
776 Self { tcx, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
777 }
778
779 fn make_operand(&self, const_: Const<'tcx>) -> Operand<'tcx> {
780 Operand::Constant(Box::new(ConstOperand { span: DUMMY_SP, user_ty: None, const_ }))
781 }
782}
783
784struct Collector<'a, 'tcx> {
785 patch: Patch<'tcx>,
786 local_decls: &'a LocalDecls<'tcx>,
787}
788
789impl<'a, 'tcx> Collector<'a, 'tcx> {
790 pub(crate) fn new(tcx: TyCtxt<'tcx>, local_decls: &'a LocalDecls<'tcx>) -> Self {
791 Self { patch: Patch::new(tcx), local_decls }
792 }
793
794 #[instrument(level = "trace", skip(self, ecx, map), ret)]
795 fn try_make_constant(
796 &self,
797 ecx: &mut InterpCx<'tcx, DummyMachine>,
798 place: Place<'tcx>,
799 state: &State<FlatSet<Scalar>>,
800 map: &Map<'tcx>,
801 ) -> Option<Const<'tcx>> {
802 let ty = place.ty(self.local_decls, self.patch.tcx).ty;
803 let layout = ecx.layout_of(ty).ok()?;
804
805 if layout.is_zst() {
806 return Some(Const::zero_sized(ty));
807 }
808
809 if layout.is_unsized() {
810 return None;
811 }
812
813 let place = map.find(place.as_ref())?;
814 if layout.backend_repr.is_scalar()
815 && let Some(value) = propagatable_scalar(place, state, map)
816 {
817 return Some(Const::Val(ConstValue::Scalar(value), ty));
818 }
819
820 if matches!(layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
821 let alloc_id = ecx
822 .intern_with_temp_alloc(layout, |ecx, dest| {
823 try_write_constant(ecx, dest, place, ty, state, map)
824 })
825 .discard_err()?;
826 return Some(Const::Val(ConstValue::Indirect { alloc_id, offset: Size::ZERO }, ty));
827 }
828
829 None
830 }
831}
832
833#[instrument(level = "trace", skip(map), ret)]
834fn propagatable_scalar(
835 place: PlaceIndex,
836 state: &State<FlatSet<Scalar>>,
837 map: &Map<'_>,
838) -> Option<Scalar> {
839 if let FlatSet::Elem(value) = state.get_idx(place, map)
840 && value.try_to_scalar_int().is_ok()
841 {
842 Some(value)
844 } else {
845 None
846 }
847}
848
849#[instrument(level = "trace", skip(ecx, state, map), ret)]
850fn try_write_constant<'tcx>(
851 ecx: &mut InterpCx<'tcx, DummyMachine>,
852 dest: &PlaceTy<'tcx>,
853 place: PlaceIndex,
854 ty: Ty<'tcx>,
855 state: &State<FlatSet<Scalar>>,
856 map: &Map<'tcx>,
857) -> InterpResult<'tcx> {
858 let layout = ecx.layout_of(ty)?;
859
860 if layout.is_zst() {
862 return interp_ok(());
863 }
864
865 if layout.backend_repr.is_scalar()
867 && let Some(value) = propagatable_scalar(place, state, map)
868 {
869 return ecx.write_immediate(Immediate::Scalar(value), dest);
870 }
871
872 match ty.kind() {
873 ty::FnDef(..) => {}
875
876 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char =>
878 throw_machine_stop_str!("primitive type with provenance"),
879
880 ty::Tuple(elem_tys) => {
881 for (i, elem) in elem_tys.iter().enumerate() {
882 let i = FieldIdx::from_usize(i);
883 let Some(field) = map.apply(place, TrackElem::Field(i)) else {
884 throw_machine_stop_str!("missing field in tuple")
885 };
886 let field_dest = ecx.project_field(dest, i)?;
887 try_write_constant(ecx, &field_dest, field, elem, state, map)?;
888 }
889 }
890
891 ty::Adt(def, args) => {
892 if def.is_union() {
893 throw_machine_stop_str!("cannot propagate unions")
894 }
895
896 let (variant_idx, variant_def, variant_place, variant_dest) = if def.is_enum() {
897 let Some(discr) = map.apply(place, TrackElem::Discriminant) else {
898 throw_machine_stop_str!("missing discriminant for enum")
899 };
900 let FlatSet::Elem(Scalar::Int(discr)) = state.get_idx(discr, map) else {
901 throw_machine_stop_str!("discriminant with provenance")
902 };
903 let discr_bits = discr.to_bits(discr.size());
904 let Some((variant, _)) = def.discriminants(*ecx.tcx).find(|(_, var)| discr_bits == var.val) else {
905 throw_machine_stop_str!("illegal discriminant for enum")
906 };
907 let Some(variant_place) = map.apply(place, TrackElem::Variant(variant)) else {
908 throw_machine_stop_str!("missing variant for enum")
909 };
910 let variant_dest = ecx.project_downcast(dest, variant)?;
911 (variant, def.variant(variant), variant_place, variant_dest)
912 } else {
913 (FIRST_VARIANT, def.non_enum_variant(), place, dest.clone())
914 };
915
916 for (i, field) in variant_def.fields.iter_enumerated() {
917 let ty = field.ty(*ecx.tcx, args);
918 let Some(field) = map.apply(variant_place, TrackElem::Field(i)) else {
919 throw_machine_stop_str!("missing field in ADT")
920 };
921 let field_dest = ecx.project_field(&variant_dest, i)?;
922 try_write_constant(ecx, &field_dest, field, ty, state, map)?;
923 }
924 ecx.write_discriminant(variant_idx, dest)?;
925 }
926
927 ty::Array(_, _)
929 | ty::Pat(_, _)
930
931 | ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..) | ty::Str | ty::Slice(_)
933
934 | ty::Never
935 | ty::Foreign(..)
936 | ty::Alias(..)
937 | ty::Param(_)
938 | ty::Bound(..)
939 | ty::Placeholder(..)
940 | ty::Closure(..)
941 | ty::CoroutineClosure(..)
942 | ty::Coroutine(..)
943 | ty::Dynamic(..)
944 | ty::UnsafeBinder(_) => throw_machine_stop_str!("unsupported type"),
945
946 ty::Error(_) | ty::Infer(..) | ty::CoroutineWitness(..) => bug!(),
947 }
948
949 interp_ok(())
950}
951
952impl<'tcx> ResultsVisitor<'tcx, ConstAnalysis<'_, 'tcx>> for Collector<'_, 'tcx> {
953 #[instrument(level = "trace", skip(self, analysis, statement))]
954 fn visit_after_early_statement_effect(
955 &mut self,
956 analysis: &ConstAnalysis<'_, 'tcx>,
957 state: &State<FlatSet<Scalar>>,
958 statement: &Statement<'tcx>,
959 location: Location,
960 ) {
961 match &statement.kind {
962 StatementKind::Assign(box (_, rvalue)) => {
963 OperandCollector {
964 state,
965 visitor: self,
966 ecx: &mut analysis.ecx.borrow_mut(),
967 map: &analysis.map,
968 }
969 .visit_rvalue(rvalue, location);
970 }
971 _ => (),
972 }
973 }
974
975 #[instrument(level = "trace", skip(self, analysis, statement))]
976 fn visit_after_primary_statement_effect(
977 &mut self,
978 analysis: &ConstAnalysis<'_, 'tcx>,
979 state: &State<FlatSet<Scalar>>,
980 statement: &Statement<'tcx>,
981 location: Location,
982 ) {
983 match statement.kind {
984 StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(_)))) => {
985 }
987 StatementKind::Assign(box (place, _)) => {
988 if let Some(value) = self.try_make_constant(
989 &mut analysis.ecx.borrow_mut(),
990 place,
991 state,
992 &analysis.map,
993 ) {
994 self.patch.assignments.insert(location, value);
995 }
996 }
997 _ => (),
998 }
999 }
1000
1001 fn visit_after_early_terminator_effect(
1002 &mut self,
1003 analysis: &ConstAnalysis<'_, 'tcx>,
1004 state: &State<FlatSet<Scalar>>,
1005 terminator: &Terminator<'tcx>,
1006 location: Location,
1007 ) {
1008 OperandCollector {
1009 state,
1010 visitor: self,
1011 ecx: &mut analysis.ecx.borrow_mut(),
1012 map: &analysis.map,
1013 }
1014 .visit_terminator(terminator, location);
1015 }
1016}
1017
1018impl<'tcx> MutVisitor<'tcx> for Patch<'tcx> {
1019 fn tcx(&self) -> TyCtxt<'tcx> {
1020 self.tcx
1021 }
1022
1023 fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
1024 if let Some(value) = self.assignments.get(&location) {
1025 match &mut statement.kind {
1026 StatementKind::Assign(box (_, rvalue)) => {
1027 *rvalue = Rvalue::Use(self.make_operand(*value));
1028 }
1029 _ => bug!("found assignment info for non-assign statement"),
1030 }
1031 } else {
1032 self.super_statement(statement, location);
1033 }
1034 }
1035
1036 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1037 match operand {
1038 Operand::Copy(place) | Operand::Move(place) => {
1039 if let Some(value) = self.before_effect.get(&(location, *place)) {
1040 *operand = self.make_operand(*value);
1041 } else if !place.projection.is_empty() {
1042 self.super_operand(operand, location)
1043 }
1044 }
1045 Operand::Constant(_) | Operand::RuntimeChecks(_) => {}
1046 }
1047 }
1048
1049 fn process_projection_elem(
1050 &mut self,
1051 elem: PlaceElem<'tcx>,
1052 location: Location,
1053 ) -> Option<PlaceElem<'tcx>> {
1054 if let PlaceElem::Index(local) = elem {
1055 let offset = self.before_effect.get(&(location, local.into()))?;
1056 let offset = offset.try_to_scalar()?;
1057 let offset = offset.to_target_usize(&self.tcx).discard_err()?;
1058 let min_length = offset.checked_add(1)?;
1059 Some(PlaceElem::ConstantIndex { offset, min_length, from_end: false })
1060 } else {
1061 None
1062 }
1063 }
1064}
1065
1066struct OperandCollector<'a, 'b, 'tcx> {
1067 state: &'a State<FlatSet<Scalar>>,
1068 visitor: &'a mut Collector<'b, 'tcx>,
1069 ecx: &'a mut InterpCx<'tcx, DummyMachine>,
1070 map: &'a Map<'tcx>,
1071}
1072
1073impl<'tcx> Visitor<'tcx> for OperandCollector<'_, '_, 'tcx> {
1074 fn visit_projection_elem(
1075 &mut self,
1076 _: PlaceRef<'tcx>,
1077 elem: PlaceElem<'tcx>,
1078 _: PlaceContext,
1079 location: Location,
1080 ) {
1081 if let PlaceElem::Index(local) = elem
1082 && let Some(value) =
1083 self.visitor.try_make_constant(self.ecx, local.into(), self.state, self.map)
1084 {
1085 self.visitor.patch.before_effect.insert((location, local.into()), value);
1086 }
1087 }
1088
1089 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
1090 if let Some(place) = operand.place() {
1091 if let Some(value) =
1092 self.visitor.try_make_constant(self.ecx, place, self.state, self.map)
1093 {
1094 self.visitor.patch.before_effect.insert((location, place), value);
1095 } else if !place.projection.is_empty() {
1096 self.super_operand(operand, location)
1098 }
1099 }
1100 }
1101}