Skip to main content

rustc_type_ir/search_graph/
stack.rs

1use std::ops::Index;
2
3use derive_where::derive_where;
4use rustc_index::IndexVec;
5
6use crate::search_graph::{
7    AvailableDepth, CandidateHeadUsages, Cx, CycleHeads, HeadUsages, NestedGoals, PathKind,
8};
9
10impl ::std::fmt::Debug for StackDepth {
    fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
        fmt.write_fmt(format_args!("{0}", self.as_u32()))
    }
}rustc_index::newtype_index! {
11    #[orderable]
12    #[gate_rustc_only]
13    pub(super) struct StackDepth {}
14}
15
16/// Stack entries of the evaluation stack. Its fields tend to be lazily updated
17/// when popping a child goal or completely immutable.
18#[automatically_derived]
impl<X: Cx> ::core::fmt::Debug for StackEntry<X> where X: Cx {
    fn fmt(&self, __f: &mut ::core::fmt::Formatter<'_>)
        -> ::core::fmt::Result {
        match self {
            StackEntry {
                input: ref __field_input,
                step_kind_from_parent: ref __field_step_kind_from_parent,
                available_depth: ref __field_available_depth,
                required_depth: ref __field_required_depth,
                provisional_result: ref __field_provisional_result,
                heads: ref __field_heads,
                encountered_overflow: ref __field_encountered_overflow,
                usages: ref __field_usages,
                candidate_usages: ref __field_candidate_usages,
                nested_goals: ref __field_nested_goals } => {
                let mut __builder =
                    ::core::fmt::Formatter::debug_struct(__f, "StackEntry");
                ::core::fmt::DebugStruct::field(&mut __builder, "input",
                    __field_input);
                ::core::fmt::DebugStruct::field(&mut __builder,
                    "step_kind_from_parent", __field_step_kind_from_parent);
                ::core::fmt::DebugStruct::field(&mut __builder,
                    "available_depth", __field_available_depth);
                ::core::fmt::DebugStruct::field(&mut __builder,
                    "required_depth", __field_required_depth);
                ::core::fmt::DebugStruct::field(&mut __builder,
                    "provisional_result", __field_provisional_result);
                ::core::fmt::DebugStruct::field(&mut __builder, "heads",
                    __field_heads);
                ::core::fmt::DebugStruct::field(&mut __builder,
                    "encountered_overflow", __field_encountered_overflow);
                ::core::fmt::DebugStruct::field(&mut __builder, "usages",
                    __field_usages);
                ::core::fmt::DebugStruct::field(&mut __builder,
                    "candidate_usages", __field_candidate_usages);
                ::core::fmt::DebugStruct::field(&mut __builder,
                    "nested_goals", __field_nested_goals);
                ::core::fmt::DebugStruct::finish(&mut __builder)
            }
        }
    }
}#[derive_where(Debug; X: Cx)]
19pub(super) struct StackEntry<X: Cx> {
20    pub input: X::Input,
21
22    /// Whether proving this goal is a coinductive step.
23    ///
24    /// This is used when encountering a trait solver cycle to
25    /// decide whether the initial provisional result of the cycle.
26    pub step_kind_from_parent: PathKind,
27
28    /// The available depth of a given goal, immutable.
29    pub available_depth: AvailableDepth,
30
31    /// The maximum depth required while evaluating this goal.
32    pub required_depth: usize,
33
34    /// Starts out as `None` and gets set when rerunning this
35    /// goal in case we encounter a cycle.
36    pub provisional_result: Option<X::Result>,
37
38    /// All cycle heads this goal depends on. Lazily updated and only
39    /// up-to date for the top of the stack.
40    pub heads: CycleHeads,
41
42    /// Whether evaluating this goal encountered overflow. Lazily updated.
43    pub encountered_overflow: bool,
44
45    /// Whether and how this goal has been used as a cycle head. Lazily updated.
46    pub usages: Option<HeadUsages>,
47
48    /// We want to be able to ignore head usages if they happen inside of candidates
49    /// which don't impact the result of a goal. This enables us to avoid rerunning goals
50    /// and is also used when rebasing provisional cache entries.
51    ///
52    /// To implement this, we track all usages while evaluating a candidate. If this candidate
53    /// then ends up ignored, we manually remove its usages from `usages` and `heads`.
54    pub candidate_usages: Option<CandidateHeadUsages>,
55
56    /// The nested goals of this goal, see the doc comment of the type.
57    pub nested_goals: NestedGoals<X>,
58}
59
60/// The stack of goals currently being computed.
61///
62/// An element is *deeper* in the stack if its index is *lower*.
63///
64/// Only the last entry of the stack is mutable. All other entries get
65/// lazily updated in `update_parent_goal`.
66#[automatically_derived]
impl<X: Cx> ::core::default::Default for Stack<X> where X: Cx {
    fn default() -> Self {
        Stack { entries: ::core::default::Default::default() }
    }
}#[derive_where(Default; X: Cx)]
67pub(super) struct Stack<X: Cx> {
68    entries: IndexVec<StackDepth, StackEntry<X>>,
69}
70
71impl<X: Cx> Stack<X> {
72    pub(super) fn is_empty(&self) -> bool {
73        self.entries.is_empty()
74    }
75
76    pub(super) fn len(&self) -> usize {
77        self.entries.len()
78    }
79
80    pub(super) fn last(&self) -> Option<&StackEntry<X>> {
81        self.entries.raw.last()
82    }
83
84    pub(super) fn last_mut(&mut self) -> Option<&mut StackEntry<X>> {
85        self.entries.raw.last_mut()
86    }
87
88    pub(super) fn last_mut_with_index(&mut self) -> Option<(StackDepth, &mut StackEntry<X>)> {
89        self.entries.last_index().map(|idx| (idx, &mut self.entries[idx]))
90    }
91
92    pub(super) fn next_index(&self) -> StackDepth {
93        self.entries.next_index()
94    }
95
96    pub(super) fn push(&mut self, entry: StackEntry<X>) -> StackDepth {
97        if truecfg!(debug_assertions) && self.entries.iter().any(|e| e.input == entry.input) {
98            {
    ::core::panicking::panic_fmt(format_args!("pushing duplicate entry on stack: {1:?} {0:?}",
            self.entries, entry));
};panic!("pushing duplicate entry on stack: {entry:?} {:?}", self.entries);
99        }
100        self.entries.push(entry)
101    }
102
103    pub(super) fn pop(&mut self) -> StackEntry<X> {
104        self.entries.pop().unwrap()
105    }
106
107    pub(super) fn cycle_step_kinds(&self, head: StackDepth) -> impl Iterator<Item = PathKind> {
108        self.entries.raw[head.index() + 1..].iter().map(|entry| entry.step_kind_from_parent)
109    }
110
111    pub(super) fn iter(&self) -> impl Iterator<Item = &StackEntry<X>> {
112        self.entries.iter()
113    }
114
115    pub(super) fn find(&self, input: X::Input) -> Option<StackDepth> {
116        self.entries.iter_enumerated().find(|(_, e)| e.input == input).map(|(idx, _)| idx)
117    }
118}
119
120impl<X: Cx> Index<StackDepth> for Stack<X> {
121    type Output = StackEntry<X>;
122    fn index(&self, index: StackDepth) -> &StackEntry<X> {
123        &self.entries[index]
124    }
125}